diff --git a/.github/workflows/docker/compose/llms-compose.yaml b/.github/workflows/docker/compose/llms-compose.yaml index 1e90cd0b30..6cbfef5634 100644 --- a/.github/workflows/docker/compose/llms-compose.yaml +++ b/.github/workflows/docker/compose/llms-compose.yaml @@ -7,6 +7,10 @@ services: build: dockerfile: comps/llms/src/text-generation/Dockerfile image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest} + llm-textgen-openeuler: + build: + dockerfile: comps/llms/src/text-generation/Dockerfile.openEuler + image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest}-openeuler llm-textgen-gaudi: build: dockerfile: comps/llms/src/text-generation/Dockerfile.intel_hpu diff --git a/comps/llms/src/text-generation/Dockerfile.openEuler b/comps/llms/src/text-generation/Dockerfile.openEuler new file mode 100644 index 0000000000..d0f33d6afe --- /dev/null +++ b/comps/llms/src/text-generation/Dockerfile.openEuler @@ -0,0 +1,30 @@ +# Copyright (C) 2025 Huawei Technologies Co., Ltd. +# SPDX-License-Identifier: Apache-2.0 + +FROM openeuler/python:3.11.13-oe2403lts + +RUN yum update -y && \ + yum install -y \ + shadow \ + jemalloc-devel && \ + yum clean all && \ + rm -rf /var/cache/yum + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +COPY comps/llms/src/text-generation/requirements.txt /home/user/comps/llms/src/text-generation/requirements.txt + +ARG uvpip='uv pip install --system --no-cache-dir' +RUN pip install --no-cache-dir --upgrade pip setuptools uv && \ + $uvpip -r /home/user/comps/llms/src/text-generation/requirements.txt + +COPY comps /home/user/comps +ENV PYTHONPATH=$PYTHONPATH:/home/user + +USER user + +WORKDIR /home/user/comps/llms/src/text-generation + +ENTRYPOINT ["bash", "entrypoint.sh"] \ No newline at end of file diff --git a/tests/llms/test_llms_text-generation_service_tgi.sh b/tests/llms/test_llms_text-generation_service_tgi.sh index 63a2e3e2ff..b490cf3772 100644 --- a/tests/llms/test_llms_text-generation_service_tgi.sh +++ b/tests/llms/test_llms_text-generation_service_tgi.sh @@ -18,7 +18,8 @@ service_name="textgen-service-tgi" function build_docker_images() { cd $WORKPATH - docker build --no-cache -t ${REGISTRY:-opea}/llm-textgen:${TAG:-latest} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/src/text-generation/Dockerfile . + dockerfile_name="comps/llms/src/text-generation/$1" + docker build --no-cache -t ${REGISTRY:-opea}/llm-textgen:${TAG:-latest} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f "${dockerfile_name}" . if [ $? -ne 0 ]; then echo "opea/llm-textgen built fail" exit 1 @@ -112,25 +113,32 @@ function validate_microservice_with_openai() { fi } -function stop_docker() { +function stop_service() { cd $WORKPATH/comps/llms/deployment/docker_compose docker compose -f compose_text-generation.yaml down --remove-orphans } function main() { - stop_docker - - build_docker_images + build_docker_images "Dockerfile" + trap stop_service EXIT + echo "Test normal env ..." pip install --no-cache-dir openai pydantic start_service validate_microservices validate_microservice_with_openai - stop_docker - echo y | docker system prune + stop_service + + echo "Test with openEuler OS ..." + build_docker_images "Dockerfile.openEuler" + start_service + validate_microservices + validate_microservice_with_openai + stop_service + docker system prune -f } main