File tree Expand file tree Collapse file tree 1 file changed +5
-4
lines changed
docker/transformers-pytorch-amd-gpu Expand file tree Collapse file tree 1 file changed +5
-4
lines changed Original file line number Diff line number Diff line change 1- FROM rocm/pytorch:rocm6.4.1_ubuntu24 .04_py3.12_pytorch_release_2.7.1
1+ FROM rocm/pytorch:rocm7.0.2_ubuntu24 .04_py3.12_pytorch_release_2.7.1
22LABEL maintainer="Hugging Face"
33
44ARG DEBIAN_FRONTEND=noninteractive
@@ -10,8 +10,8 @@ RUN apt update && \
1010
1111RUN git lfs install
1212
13- RUN python3 -m pip install --no-cache-dir --upgrade pip numpy
14- RUN python3 -m pip install --no-cache-dir --upgrade importlib-metadata setuptools ninja git+https://github.com/facebookresearch/detectron2.git pytesseract "itsdangerous<2.1.0"
13+ RUN python3 -m pip install --no-cache-dir --upgrade pip numpy importlib-metadata setuptools wheel ninja pytesseract "itsdangerous<2.1.0"
14+ RUN python3 -m pip install --no-cache-dir --no-build-isolation git+https://github.com/facebookresearch/detectron2.git
1515
1616ARG REF=main
1717WORKDIR /
@@ -39,6 +39,7 @@ RUN python3 -m pip install --no-cache-dir "torchcodec==0.5"
3939# Install flash attention from source. Tested with commit 6387433156558135a998d5568a9d74c1778666d8
4040RUN git clone https://github.com/ROCm/flash-attention/ -b tridao && \
4141 cd flash-attention && \
42- GPU_ARCHS="gfx942" python setup.py install
42+ GPU_ARCHS="gfx942;gfx950" python setup.py install
43+ # GPU_ARCHS builds for MI300, MI325 and MI355
4344
4445RUN python3 -m pip install --no-cache-dir einops
You can’t perform that action at this time.
0 commit comments