Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .github/workflows/_comps-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,10 @@ jobs:
cd ./vllm-openvino && git checkout v0.6.1 && git rev-parse HEAD && cd ../
fi
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_yml}) != 0 ]]; then
git clone --depth 1 --branch v0.6.4.post2+Gaudi-1.19.0 https://github.com/HabanaAI/vllm-fork.git
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null && cd ../
fi
- name: Get build list
id: get-build-list
Expand Down
5 changes: 4 additions & 1 deletion .github/workflows/push-image-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,10 @@ jobs:
cd ./vllm-openvino && git checkout v0.6.1 && git rev-parse HEAD && cd ../
fi
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then
git clone --depth 1 --branch v0.6.4.post2+Gaudi-1.19.0 https://github.com/HabanaAI/vllm-fork.git
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null && cd ../
fi

- name: Build Image
Expand Down
9 changes: 7 additions & 2 deletions .github/workflows/scripts/get_test_matrix.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ cd $WORKSPACE
changed_files_full=$changed_files_full
run_matrix="{\"include\":["

# add test services when comps code change

function find_test_1() {
local pre_service_path=$1
local n=$2
Expand Down Expand Up @@ -97,7 +97,7 @@ function _fill_in_matrix() {
sleep 1s
}

# add test case when test scripts code change

function find_test_2() {
test_files=$(printf '%s\n' "${changed_files[@]}" | grep -E "\.sh") || true
for test_file in ${test_files}; do
Expand All @@ -108,6 +108,7 @@ function find_test_2() {
done
}


function find_test_3() {
yaml_files=${changed_files}
for yaml_file in ${yaml_files}; do
Expand All @@ -127,8 +128,10 @@ function find_test_3() {
done
}


function main() {

# add test services when comps code change
changed_files=$(printf '%s\n' "${changed_files_full[@]}" | grep 'comps/' | grep -vE '\.md|comps/cores|comps/third_parties|deployment|\.yaml') || true
echo "===========start find_test_1============"
echo "changed_files=${changed_files}"
Expand All @@ -137,6 +140,7 @@ function main() {
echo "run_matrix=${run_matrix}"
echo "===========finish find_test_1============"

# add test case when test scripts code change
changed_files=$(printf '%s\n' "${changed_files_full[@]}" | grep 'tests/' | grep -vE '\.md|\.txt|tests/cores') || true
echo "===========start find_test_2============"
echo "changed_files=${changed_files}"
Expand All @@ -145,6 +149,7 @@ function main() {
echo "run_matrix=${run_matrix}"
echo "===========finish find_test_2============"

# add test case when docker-compose code change
changed_files=$(printf '%s\n' "${changed_files_full[@]}" | grep 'deployment/docker_compose/compose' | grep '.yaml') || true
echo "===========start find_test_3============"
echo "changed_files=${changed_files}"
Expand Down
3 changes: 2 additions & 1 deletion comps/agent/src/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,8 @@ export vllm_volume=${YOUR_LOCAL_DIR_FOR_MODELS}
# build vLLM image
git clone https://github.com/HabanaAI/vllm-fork.git
cd ./vllm-fork
git checkout v0.6.4.post2+Gaudi-1.19.0
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
git checkout ${VLLM_VER} &> /dev/null
docker build -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy

# vllm serving on 4 Gaudi2 cards
Expand Down
7 changes: 4 additions & 3 deletions comps/third_parties/vllm/src/build_docker_vllm.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,10 @@ fi

# Build the docker image for vLLM based on the hardware mode
if [ "$hw_mode" = "hpu" ]; then
git clone https://github.com/HabanaAI/vllm-fork.git
cd ./vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
cd ..
rm -rf vllm-fork
Expand Down
6 changes: 3 additions & 3 deletions tests/agent/build_vllm_gaudi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ function build_vllm_docker_images() {
git clone https://github.com/HabanaAI/vllm-fork.git
fi
cd ./vllm-fork
# git fetch --all
# git checkout v0.6.4.post2+Gaudi-1.19.0
# sed -i 's/triton/triton==3.1.0/g' requirements-hpu.txt
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi:comps failed"
Expand Down
3 changes: 3 additions & 0 deletions tests/agent/sql_agent_test/test_sql_agent.sh
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,9 @@ function build_vllm_docker_images() {
git clone https://github.com/HabanaAI/vllm-fork.git
fi
cd ./vllm-fork
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi:comps failed"
Expand Down
4 changes: 3 additions & 1 deletion tests/agent/test_agent_langchain_on_intel_hpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,9 @@ function build_vllm_docker_images() {
git clone https://github.com/HabanaAI/vllm-fork.git
fi
cd ./vllm-fork
git checkout v0.6.4.post2+Gaudi-1.19.0
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi:comps failed"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ function build_docker_images() {
cd $WORKPATH
git clone https://github.com/HabanaAI/vllm-fork.git
cd vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build --no-cache --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g .
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi built fail"
Expand Down Expand Up @@ -44,8 +46,16 @@ function start_service() {
cd $WORKPATH
cd comps/guardrails/deployment/docker_compose/
docker compose up ${service_name} -d
if [ $? -ne 0 ]; then
echo "Microservice failed to start!"
for service in $service_name; do
echo "Logs for $service..."
docker logs $service
done
exit 1
fi
echo "Microservice started"
sleep 15
sleep 1m
}

function validate_microservice() {
Expand Down
8 changes: 6 additions & 2 deletions tests/llms/test_llms_doc-summarization_vllm_on_intel_hpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ function build_docker_images() {
cd $WORKPATH
git clone https://github.com/HabanaAI/vllm-fork.git
cd vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build --no-cache -f Dockerfile.hpu -t ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest} --shm-size=128g .
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi built fail"
Expand Down Expand Up @@ -54,10 +56,11 @@ function start_service() {
cd $WORKPATH/comps/llms/deployment/docker_compose
docker compose -f compose_doc-summarization.yaml up ${service_name} -d > ${LOG_PATH}/start_services_with_compose.log

sleep 30s
sleep 1m
}

function validate_services() {
date
local URL="$1"
local EXPECTED_RESULT="$2"
local SERVICE_NAME="$3"
Expand Down Expand Up @@ -87,6 +90,7 @@ function validate_services() {
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
exit 1
fi
date
sleep 1s
}

Expand Down
12 changes: 10 additions & 2 deletions tests/llms/test_llms_faq-generation_tgi_on_intel_hpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,16 @@ function start_service() {

cd $WORKPATH/comps/llms/deployment/docker_compose
docker compose -f compose_faq-generation.yaml up ${service_name} -d > ${LOG_PATH}/start_services_with_compose.log

sleep 30s
if [ $? -ne 0 ]; then
echo "Microservice failed to start!"
for service in $service_name; do
echo "Logs for $service..."
docker logs $service
done
exit 1
fi
echo "Microservice started"
sleep 1m
}

function validate_services() {
Expand Down
4 changes: 3 additions & 1 deletion tests/llms/test_llms_faq-generation_vllm_on_intel_hpu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ function build_docker_images() {
cd $WORKPATH
git clone https://github.com/HabanaAI/vllm-fork.git
cd vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build --no-cache -f Dockerfile.hpu -t ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest} --shm-size=128g .
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi built fail"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ function build_docker_images() {
cd $WORKPATH
git clone https://github.com/HabanaAI/vllm-fork.git
cd vllm-fork/
git checkout v0.6.4.post2+Gaudi-1.19.0
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build --no-cache -f Dockerfile.hpu -t ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest} --shm-size=128g .
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi built fail"
Expand Down