text-generation-inference
16 строк · 744.0 Байт
1flash_att_commit := 3a9bfd076f98746c73362328958dbc68d145fbec
2
3flash-attention:
4# Clone flash attention
5pip install -U packaging ninja --no-cache-dir
6git clone https://github.com/HazyResearch/flash-attention.git
7
8build-flash-attention: flash-attention
9cd flash-attention && git fetch && git checkout $(flash_att_commit)
10cd flash-attention && python setup.py build
11cd flash-attention/csrc/rotary && python setup.py build
12cd flash-attention/csrc/layer_norm && python setup.py build
13
14install-flash-attention: build-flash-attention
15pip uninstall flash_attn rotary_emb dropout_layer_norm -y || true
16cd flash-attention && python setup.py install && cd csrc/layer_norm && python setup.py install && cd ../rotary && python setup.py install
17