-
Notifications
You must be signed in to change notification settings - Fork 216
Expand file tree
/
Copy pathtest_sql_agent.sh
More file actions
197 lines (164 loc) · 6.02 KB
/
test_sql_agent.sh
File metadata and controls
197 lines (164 loc) · 6.02 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
#!/bin/bash
# Copyright (C) 2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#set -xe
# this script should be run from tests directory
# bash agent/sql_agent_test/test_sql_agent.sh
WORKPATH=$(dirname "$PWD")
echo $WORKPATH
LOG_PATH="$WORKPATH/tests"
# WORKDIR is one level up from GenAIComps
export WORKDIR=$(dirname "$WORKPATH")
echo $WORKDIR
export agent_image="opea/agent:comps"
export agent_container_name="test-comps-agent-endpoint"
export ip_address=$(hostname -I | awk '{print $1}')
vllm_port=8086
vllm_volume=${HF_CACHE_DIR}
export model=meta-llama/Llama-3.3-70B-Instruct #meta-llama/Meta-Llama-3.1-70B-Instruct
export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN}
export LLM_MODEL_ID="meta-llama/Llama-3.3-70B-Instruct" #"meta-llama/Meta-Llama-3.1-70B-Instruct"
export LLM_ENDPOINT_URL="http://${ip_address}:${vllm_port}"
export temperature=0.01
export max_new_tokens=4096
export TOOLSET_PATH=$WORKPATH/comps/agent/src/tools/ # $WORKPATH/tests/agent/sql_agent_test/
echo "TOOLSET_PATH=${TOOLSET_PATH}"
export recursion_limit=15
export db_name=Chinook
export db_path="sqlite:////home/user/chinook-db/Chinook_Sqlite.sqlite"
# for using Google search API
export GOOGLE_CSE_ID=${GOOGLE_CSE_ID}
export GOOGLE_API_KEY=${GOOGLE_API_KEY}
# download the test data
function prepare_data() {
cd $WORKDIR
echo "Downloading data..."
git clone https://github.com/TAG-Research/TAG-Bench.git
cd TAG-Bench/setup
chmod +x get_dbs.sh
./get_dbs.sh
echo "Split data..."
cd $WORKPATH/tests/agent/sql_agent_test
bash run_data_split.sh
echo "Data preparation done!"
}
function download_chinook_data(){
echo "Downloading chinook data..."
cd $WORKDIR
git clone https://github.com/lerocha/chinook-database.git
cp chinook-database/ChinookDatabase/DataSources/Chinook_Sqlite.sqlite $WORKDIR/GenAIComps/tests/agent/
}
function remove_chinook_data(){
echo "Removing chinook data..."
cd $WORKDIR
if [ -d "chinook-database" ]; then
rm -rf chinook-database
fi
echo "Chinook data removed!"
}
function remove_data() {
echo "Removing data..."
cd $WORKDIR
rm -rf TAG-Bench
echo "Data removed!"
}
function generate_hints_for_benchmark() {
echo "Generating hints for benchmark..."
cd $WORKPATH/tests/agent/sql_agent_test
python3 generate_hints_file.py
}
function build_docker_images() {
echo "Building the docker images"
cd $WORKPATH
echo $WORKPATH
docker build --no-cache -t $agent_image --build-arg http_proxy=$http_proxy --build-arg https_proxy=$https_proxy -f comps/agent/src/Dockerfile .
if [ $? -ne 0 ]; then
echo "opea/agent built fail"
exit 1
else
echo "opea/agent built successful"
fi
}
function build_vllm_docker_images() {
echo "Building the vllm docker images"
cd $WORKPATH
echo $WORKPATH
if [ ! -d "./vllm" ]; then
git clone https://github.com/HabanaAI/vllm-fork.git
fi
cd ./vllm-fork
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
echo "Check out vLLM tag ${VLLM_VER}"
git checkout ${VLLM_VER} &> /dev/null
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
if [ $? -ne 0 ]; then
echo "opea/vllm-gaudi:comps failed"
exit 1
else
echo "opea/vllm-gaudi:comps successful"
fi
}
function start_vllm_service() {
echo "token is ${HF_TOKEN}"
#single card
echo "start vllm gaudi service"
echo "**************model is $model**************"
docker run -d --runtime=habana --rm --name "test-comps-vllm-gaudi-service" -e HABANA_VISIBLE_DEVICES=0,1,2,3 -p $vllm_port:80 -v $vllm_volume:/data -e HF_TOKEN=$HF_TOKEN -e HF_HOME=/data -e OMPI_MCA_btl_vader_single_copy_mechanism=none -e PT_HPU_ENABLE_LAZY_COLLECTIVES=true -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e no_proxy=$no_proxy -e VLLM_SKIP_WARMUP=true --cap-add=sys_nice --ipc=host opea/vllm-gaudi:comps --model ${model} --host 0.0.0.0 --port 80 --block-size 128 --max-seq-len-to-capture 16384 --tensor-parallel-size 4
sleep 5s
echo "Waiting vllm gaudi ready"
n=0
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
docker logs test-comps-vllm-gaudi-service &> ${LOG_PATH}/vllm-gaudi-service.log
n=$((n+1))
if grep -q "Uvicorn running on" ${LOG_PATH}/vllm-gaudi-service.log; then
break
fi
if grep -q "No such container" ${LOG_PATH}/vllm-gaudi-service.log; then
echo "container test-comps-vllm-gaudi-service not found"
exit 1
fi
sleep 5s
done
sleep 5s
echo "Service started successfully"
}
# launch the agent
function start_sql_agent_llama_service() {
echo "Starting sql_agent_llama agent microservice"
docker compose -f $WORKPATH/tests/agent/sql_agent_llama.yaml up -d
sleep 3m
docker logs test-comps-agent-endpoint
echo "Service started successfully"
}
function start_sql_agent_openai_service() {
export OPENAI_API_KEY=${OPENAI_API_KEY}
echo "Starting sql_agent_openai agent microservice"
docker compose -f $WORKPATH/tests/agent/sql_agent_openai.yaml up -d
sleep 3m
docker logs test-comps-agent-endpoint
echo "Service started successfully"
}
# run the test
function run_test() {
echo "Running test..."
cd $WORKPATH/tests/agent/
python3 test.py --test-sql-agent
}
function run_benchmark() {
echo "Running benchmark..."
cd $WORKPATH/tests/agent/sql_agent_test
query_file=${WORKDIR}/TAG-Bench/query_by_db/query_california_schools.csv
outdir=$WORKDIR/sql_agent_output
outfile=california_school_agent_test_result.csv
python3 test_tag_bench.py --query_file $query_file --output_dir $outdir --output_file $outfile
}
echo "Preparing data...."
download_chinook_data
echo "launching sql_agent_llama service...."
start_sql_agent_llama_service
# echo "launching sql_agent_openai service...."
# start_sql_agent_openai_service
echo "Running test...."
run_test
echo "Removing data...."
remove_chinook_data