1- accelerate == 0.25 .*
1+ accelerate == 0.27 .*
22colorama
33datasets
44einops
@@ -8,7 +8,7 @@ jinja2==3.1.2
88lm_eval == 0.3.0
99markdown
1010numpy == 1.26.*
11- optimum == 1.16 .*
11+ optimum == 1.17 .*
1212pandas
1313peft == 0.8.*
1414Pillow >= 9.5.0
@@ -28,33 +28,33 @@ bitsandbytes==0.42.*; platform_system != "Windows"
2828https://github.com/jllllll/bitsandbytes-windows-webui/releases/download/wheels/bitsandbytes-0.41.1-py3-none-win_amd64.whl; platform_system == "Windows"
2929
3030# llama-cpp-python (CPU only, AVX2)
31- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44 +cpuavx2-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
32- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44 +cpuavx2-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
33- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44 +cpuavx2-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
34- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.44 +cpuavx2-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
31+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.50 +cpuavx2-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
32+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.50 +cpuavx2-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
33+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.50 +cpuavx2-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
34+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/cpu/llama_cpp_python-0.2.50 +cpuavx2-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
3535
3636# llama-cpp-python (CUDA, no tensor cores)
37- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.44 +cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
38- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.44 +cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
39- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.44 +cu121-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
40- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.44 +cu121-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
37+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.50 +cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
38+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.50 +cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
39+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.50 +cu121-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
40+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda-0.2.50 +cu121-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
4141
4242# llama-cpp-python (CUDA, tensor cores)
43- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.44 +cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
44- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.44 +cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
45- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.44 +cu121-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
46- https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.44 +cu121-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
43+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.50 +cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
44+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.50 +cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
45+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.50 +cu121-cp311-cp311-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
46+ https://github.com/oobabooga/llama-cpp-python-cuBLAS-wheels/releases/download/textgen-webui/llama_cpp_python_cuda_tensorcores-0.2.50 +cu121-cp310-cp310-manylinux_2_31_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
4747
4848# CUDA wheels
4949https://github.com/jllllll/AutoGPTQ/releases/download/v0.6.0/auto_gptq-0.6.0+cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
5050https://github.com/jllllll/AutoGPTQ/releases/download/v0.6.0/auto_gptq-0.6.0+cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
5151https://github.com/jllllll/AutoGPTQ/releases/download/v0.6.0/auto_gptq-0.6.0+cu121-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
5252https://github.com/jllllll/AutoGPTQ/releases/download/v0.6.0/auto_gptq-0.6.0+cu121-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
53- https://github.com/oobabooga/exllamav2/releases/download/v0.0.13.2 /exllamav2-0.0.13.2 +cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
54- https://github.com/oobabooga/exllamav2/releases/download/v0.0.13.2 /exllamav2-0.0.13.2 +cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
55- https://github.com/oobabooga/exllamav2/releases/download/v0.0.13.2 /exllamav2-0.0.13.2 +cu121-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
56- https://github.com/oobabooga/exllamav2/releases/download/v0.0.13.2 /exllamav2-0.0.13.2 +cu121-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
57- https://github.com/oobabooga/exllamav2/releases/download/v0.0.13.2 /exllamav2-0.0.13.2 -py3-none-any.whl; platform_system == "Linux" and platform_machine != "x86_64"
53+ https://github.com/oobabooga/exllamav2/releases/download/v0.0.14 /exllamav2-0.0.14 +cu121-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
54+ https://github.com/oobabooga/exllamav2/releases/download/v0.0.14 /exllamav2-0.0.14 +cu121-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
55+ https://github.com/oobabooga/exllamav2/releases/download/v0.0.14 /exllamav2-0.0.14 +cu121-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
56+ https://github.com/oobabooga/exllamav2/releases/download/v0.0.14 /exllamav2-0.0.14 +cu121-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
57+ https://github.com/oobabooga/exllamav2/releases/download/v0.0.14 /exllamav2-0.0.14 -py3-none-any.whl; platform_system == "Linux" and platform_machine != "x86_64"
5858https://github.com/jllllll/flash-attention/releases/download/v2.3.4/flash_attn-2.3.4+cu121torch2.1cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
5959https://github.com/jllllll/flash-attention/releases/download/v2.3.4/flash_attn-2.3.4+cu121torch2.1cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
6060https://github.com/Dao-AILab/flash-attention/releases/download/v2.3.4/flash_attn-2.3.4+cu122torch2.1cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
0 commit comments