Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/docker/compose/third_parties-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -100,5 +100,8 @@ services:
image: ${REGISTRY:-opea}/lvm-llama-vision-guard:${TAG:-latest}
ipex-llm:
build:
args:
COMPILE: ON
PORT_SSH: 2345
dockerfile: comps/third_parties/ipex/src/Dockerfile
image: ${REGISTRY:-opea}/ipex-llm:${TAG:-latest}
3 changes: 2 additions & 1 deletion comps/third_parties/ipex/src/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ RUN apt-get update && \
g++-12 \
gcc-12 \
git \
make
make \
numactl \
wget

Expand Down Expand Up @@ -47,6 +47,7 @@ RUN apt-get update && \
openssh-server && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* && \
if [ -f /etc/apt/apt.conf.d/proxy.conf ]; then rm /etc/apt/apt.conf.d/proxy.conf; fi

COPY --from=dev /root/intel-extension-for-pytorch/examples/cpu/llm ./llm
COPY --from=dev /root/intel-extension-for-pytorch/tools/get_libstdcpp_lib.sh ./llm/tools
Expand Down
78 changes: 78 additions & 0 deletions tests/third_parties/test_third_parties_ipex.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

set -x

WORKPATH=$(dirname "$PWD")
ip_address=$(hostname -I | awk '{print $1}')
export DATA_PATH=${model_cache}

function build_docker_images() {
echo "Start building docker images for microservice"
cd $WORKPATH
docker build --no-cache -t opea/ipex-llm:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy --build-arg COMPILE=ON --build-arg PORT_SSH=2345 -f comps/third_parties/ipex/src/Dockerfile .
if [ $? -ne 0 ]; then
echo "opea/ipex-llm built fail"
exit 1
else
echo "opea/ipex-llm built successful"
fi
}

function start_service() {
echo "Starting microservice"
export host_ip=${ip_address}
export MODEL_ID="microsoft/phi-4"
export TAG=comps
cd $WORKPATH
cd comps/third_parties/ipex/deployment/docker_compose
docker compose -f compose.yaml up -d
echo "Microservice started"
sleep 120
}

function validate_microservice() {
echo "Validate microservice started"
result=$(http_proxy="" curl http://localhost:8688/v1/chat/completions \
-X POST \
-H "Content-Type: application/json" \
-d '{
"model": "microsoft/Phi-4-mini-instruct",
"messages": [
{"role": "user", "content": "What is Deep Learning?"}
],
"max_tokens": 32
}'
)
if [[ $result == *"Deep"* ]]; then
echo "Result correct."
else
echo "Result wrong."
docker logs ipex-llm-server
exit 1
fi
}

function stop_docker() {
cid=$(docker ps -aq --filter "name=ipex-llm-server")
echo "Shutdown legacy containers "$cid
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
}

function main() {

stop_docker

build_docker_images
start_service

validate_microservice

stop_docker
echo "cleanup container images and volumes"
echo y | docker system prune 2>&1 > /dev/null

}

main
Loading