Skip to content

Commit ff04520

Browse files
authored
Bump AMD docker (#41792)
1 parent 01f5ac7 commit ff04520

File tree

1 file changed

+5
-4
lines changed

1 file changed

+5
-4
lines changed

docker/transformers-pytorch-amd-gpu/Dockerfile

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM rocm/pytorch:rocm6.4.1_ubuntu24.04_py3.12_pytorch_release_2.7.1
1+
FROM rocm/pytorch:rocm7.0.2_ubuntu24.04_py3.12_pytorch_release_2.7.1
22
LABEL maintainer="Hugging Face"
33

44
ARG DEBIAN_FRONTEND=noninteractive
@@ -10,8 +10,8 @@ RUN apt update && \
1010

1111
RUN git lfs install
1212

13-
RUN python3 -m pip install --no-cache-dir --upgrade pip numpy
14-
RUN python3 -m pip install --no-cache-dir --upgrade importlib-metadata setuptools ninja git+https://github.com/facebookresearch/detectron2.git pytesseract "itsdangerous<2.1.0"
13+
RUN python3 -m pip install --no-cache-dir --upgrade pip numpy importlib-metadata setuptools wheel ninja pytesseract "itsdangerous<2.1.0"
14+
RUN python3 -m pip install --no-cache-dir --no-build-isolation git+https://github.com/facebookresearch/detectron2.git
1515

1616
ARG REF=main
1717
WORKDIR /
@@ -39,6 +39,7 @@ RUN python3 -m pip install --no-cache-dir "torchcodec==0.5"
3939
# Install flash attention from source. Tested with commit 6387433156558135a998d5568a9d74c1778666d8
4040
RUN git clone https://github.com/ROCm/flash-attention/ -b tridao && \
4141
cd flash-attention && \
42-
GPU_ARCHS="gfx942" python setup.py install
42+
GPU_ARCHS="gfx942;gfx950" python setup.py install
43+
# GPU_ARCHS builds for MI300, MI325 and MI355
4344

4445
RUN python3 -m pip install --no-cache-dir einops

0 commit comments

Comments
 (0)