Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/docker/compose/llms-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ services:
build:
dockerfile: comps/llms/src/text-generation/Dockerfile
image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest}
llm-textgen-openeuler:
build:
dockerfile: comps/llms/src/text-generation/Dockerfile.openEuler
image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest}-openeuler
llm-textgen-gaudi:
build:
dockerfile: comps/llms/src/text-generation/Dockerfile.intel_hpu
Expand Down
30 changes: 30 additions & 0 deletions comps/llms/src/text-generation/Dockerfile.openEuler
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Copyright (C) 2025 Huawei Technologies Co., Ltd.
# SPDX-License-Identifier: Apache-2.0

FROM openeuler/python:3.11.13-oe2403lts

RUN yum update -y && \
yum install -y \
shadow \
jemalloc-devel && \
yum clean all && \
rm -rf /var/cache/yum

RUN useradd -m -s /bin/bash user && \
mkdir -p /home/user && \
chown -R user /home/user/

COPY comps/llms/src/text-generation/requirements.txt /home/user/comps/llms/src/text-generation/requirements.txt

ARG uvpip='uv pip install --system --no-cache-dir'
RUN pip install --no-cache-dir --upgrade pip setuptools uv && \
$uvpip -r /home/user/comps/llms/src/text-generation/requirements.txt

COPY comps /home/user/comps
ENV PYTHONPATH=$PYTHONPATH:/home/user

USER user

WORKDIR /home/user/comps/llms/src/text-generation

ENTRYPOINT ["bash", "entrypoint.sh"]
22 changes: 15 additions & 7 deletions tests/llms/test_llms_text-generation_service_tgi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ service_name="textgen-service-tgi"

function build_docker_images() {
cd $WORKPATH
docker build --no-cache -t ${REGISTRY:-opea}/llm-textgen:${TAG:-latest} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/src/text-generation/Dockerfile .
dockerfile_name="comps/llms/src/text-generation/$1"
docker build --no-cache -t ${REGISTRY:-opea}/llm-textgen:${TAG:-latest} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f "${dockerfile_name}" .
if [ $? -ne 0 ]; then
echo "opea/llm-textgen built fail"
exit 1
Expand Down Expand Up @@ -112,25 +113,32 @@ function validate_microservice_with_openai() {
fi
}

function stop_docker() {
function stop_service() {
cd $WORKPATH/comps/llms/deployment/docker_compose
docker compose -f compose_text-generation.yaml down --remove-orphans
}

function main() {

stop_docker

build_docker_images
build_docker_images "Dockerfile"
trap stop_service EXIT
echo "Test normal env ..."
pip install --no-cache-dir openai pydantic
start_service

validate_microservices
validate_microservice_with_openai

stop_docker
echo y | docker system prune
stop_service

echo "Test with openEuler OS ..."
build_docker_images "Dockerfile.openEuler"
start_service
validate_microservices
validate_microservice_with_openai
stop_service

docker system prune -f
}

main
Loading