关于flash-attention安装踩过的坑

时间:2025-01-23 07:42:39
# 首先安装ninja
pip install ninja
# 检验是否安装成功
ninja --version
echo $?
# return 0再继续,否则重新安装ninja
pip install flash-attn==xxx(version) --no-build-isolation
# 不出意料会报错:
# Guessing wheel URL:  /Dao-AILab/flash-attention/releases/download/v2.3.6/flash_attn-2.3.6+cu118torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
# error: Remote end closed connection without response
# 尝试手动安装 (先下载.whl文件)
pip install flash_attn-2.3.6+cu118torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
# 大功告成