Skip to content

Commit 995a62c

Browse files
Add new test cases for VisualQnA (#712)
* Add new test cases for VisualQnA Signed-off-by: lvliang-intel <liang1.lv@intel.com>
1 parent 9cf1d88 commit 995a62c

File tree

7 files changed

+376
-75
lines changed

7 files changed

+376
-75
lines changed
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# Copyright (C) 2024 Intel Corporation
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
services:
5+
visualqna:
6+
build:
7+
args:
8+
http_proxy: ${http_proxy}
9+
https_proxy: ${https_proxy}
10+
no_proxy: ${no_proxy}
11+
dockerfile: ./Dockerfile
12+
image: ${REGISTRY:-opea}/visualqna:${TAG:-latest}
13+
visualqna-ui:
14+
build:
15+
context: ui
16+
dockerfile: ./docker/Dockerfile
17+
extends: visualqna
18+
image: ${REGISTRY:-opea}/visualqna-ui:${TAG:-latest}
19+
llm-visualqna-tgi:
20+
build:
21+
context: GenAIComps
22+
dockerfile: comps/lvms/Dockerfile_tgi
23+
extends: visualqna
24+
image: ${REGISTRY:-opea}/lvm-tgi:${TAG:-latest}

VisualQnA/docker/gaudi/README.md

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,10 @@ cd GenAIComps
1919
docker build --no-cache -t opea/lvm-tgi:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/Dockerfile_tgi .
2020
```
2121

22-
### 3. Build TGI Gaudi Image
23-
24-
Since TGI Gaudi has not supported llava-next in main branch, we'll need to build it from a PR branch for now.
22+
### 3. Pull TGI Gaudi Image
2523

2624
```bash
27-
git clone https://github.com/huggingface/tgi-gaudi.git
28-
cd tgi-gaudi/
29-
docker build -t opea/llava-tgi:latest .
30-
cd ../
25+
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.4
3126
```
3227

3328
### 4. Build MegaService Docker Image

VisualQnA/docker/gaudi/compose.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
services:
55
llava-tgi-service:
6-
image: ${REGISTRY:-opea}/llava-tgi:${TAG:-latest}
6+
image: ghcr.io/huggingface/tgi-gaudi:2.0.4
77
container_name: tgi-llava-gaudi-server
88
ports:
99
- "8399:80"

VisualQnA/docker/xeon/compose.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ services:
1919
command: --model-id ${LVM_MODEL_ID} --max-input-length 4096 --max-total-tokens 8192 --cuda-graphs 0
2020
lvm-tgi:
2121
image: ${REGISTRY:-opea}/lvm-tgi:${TAG:-latest}
22-
container_name: lvm-tgi-server
22+
container_name: lvm-tgi-xeon-server
2323
depends_on:
2424
- llava-tgi-service
2525
ports:

VisualQnA/tests/test_basic_inference.sh

Lines changed: 0 additions & 66 deletions
This file was deleted.
Lines changed: 174 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,174 @@
1+
#!/bin/bash
2+
# Copyright (C) 2024 Intel Corporation
3+
# SPDX-License-Identifier: Apache-2.0
4+
5+
set -x
6+
IMAGE_REPO=${IMAGE_REPO:-"opea"}
7+
IMAGE_TAG=${IMAGE_TAG:-"latest"}
8+
echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}"
9+
echo "TAG=IMAGE_TAG=${IMAGE_TAG}"
10+
export REGISTRY=${IMAGE_REPO}
11+
export TAG=${IMAGE_TAG}
12+
13+
WORKPATH=$(dirname "$PWD")
14+
LOG_PATH="$WORKPATH/tests"
15+
ip_address=$(hostname -I | awk '{print $1}')
16+
17+
function build_docker_images() {
18+
cd $WORKPATH/docker
19+
git clone https://github.com/opea-project/GenAIComps.git
20+
21+
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
22+
service_list="visualqna visualqna-ui llm-visualqna-tgi"
23+
docker compose -f docker_build_compose.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log
24+
25+
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.4
26+
docker images && sleep 1s
27+
}
28+
29+
function start_services() {
30+
cd $WORKPATH/docker/gaudi
31+
32+
export LVM_MODEL_ID="llava-hf/llava-v1.6-mistral-7b-hf"
33+
export LVM_ENDPOINT="http://${ip_address}:8399"
34+
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
35+
export LVM_SERVICE_PORT=9399
36+
export MEGA_SERVICE_HOST_IP=${ip_address}
37+
export LVM_SERVICE_HOST_IP=${ip_address}
38+
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/visualqna"
39+
40+
sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env
41+
42+
# Start Docker Containers
43+
docker compose up -d > ${LOG_PATH}/start_services_with_compose.log
44+
45+
n=0
46+
until [[ "$n" -ge 100 ]]; do
47+
docker logs lvm-tgi-gaudi-server > ${LOG_PATH}/lvm_tgi_service_start.log
48+
if grep -q Connected ${LOG_PATH}/lvm_tgi_service_start.log; then
49+
break
50+
fi
51+
sleep 5s
52+
n=$((n+1))
53+
done
54+
}
55+
56+
function validate_services() {
57+
local URL="$1"
58+
local EXPECTED_RESULT="$2"
59+
local SERVICE_NAME="$3"
60+
local DOCKER_NAME="$4"
61+
local INPUT_DATA="$5"
62+
63+
local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL")
64+
if [ "$HTTP_STATUS" -eq 200 ]; then
65+
echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..."
66+
67+
local CONTENT=$(curl -s -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL" | tee ${LOG_PATH}/${SERVICE_NAME}.log)
68+
69+
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
70+
echo "[ $SERVICE_NAME ] Content is as expected."
71+
else
72+
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
73+
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
74+
exit 1
75+
fi
76+
else
77+
echo "[ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS"
78+
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
79+
exit 1
80+
fi
81+
sleep 1s
82+
}
83+
84+
function validate_microservices() {
85+
# Check if the microservices are running correctly.
86+
87+
# lvm microservice
88+
validate_services \
89+
"${ip_address}:9399/v1/lvm" \
90+
"The image" \
91+
"lvm-tgi" \
92+
"lvm-tgi-gaudi-server" \
93+
'{"image": "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAFUlEQVR42mP8/5+hnoEIwDiqkL4KAcT9GO0U4BxoAAAAAElFTkSuQmCC", "prompt":"What is this?"}'
94+
}
95+
96+
function validate_megaservice() {
97+
# Curl the Mega Service
98+
validate_services \
99+
"${ip_address}:8888/v1/visualqna" \
100+
"The image" \
101+
"visualqna-gaudi-backend-server" \
102+
"visualqna-gaudi-backend-server" \
103+
'{
104+
"messages": [
105+
{
106+
"role": "user",
107+
"content": [
108+
{
109+
"type": "text",
110+
"text": "What'\''s in this image?"
111+
},
112+
{
113+
"type": "image_url",
114+
"image_url": {
115+
"url": "https://www.ilankelman.org/stopsigns/australia.jpg"
116+
}
117+
}
118+
]
119+
}
120+
],
121+
"max_tokens": 300
122+
}'
123+
}
124+
125+
function validate_frontend() {
126+
cd $WORKPATH/docker/ui/svelte
127+
local conda_env_name="OPEA_e2e"
128+
export PATH=${HOME}/miniforge3/bin/:$PATH
129+
if conda info --envs | grep -q "$conda_env_name"; then
130+
echo "$conda_env_name exist!"
131+
else
132+
conda create -n ${conda_env_name} python=3.12 -y
133+
fi
134+
source activate ${conda_env_name}
135+
136+
sed -i "s/localhost/$ip_address/g" playwright.config.ts
137+
138+
conda install -c conda-forge nodejs -y
139+
npm install && npm ci && npx playwright install --with-deps
140+
node -v && npm -v && pip list
141+
142+
exit_status=0
143+
npx playwright test || exit_status=$?
144+
145+
if [ $exit_status -ne 0 ]; then
146+
echo "[TEST INFO]: ---------frontend test failed---------"
147+
exit $exit_status
148+
else
149+
echo "[TEST INFO]: ---------frontend test passed---------"
150+
fi
151+
}
152+
153+
function stop_docker() {
154+
cd $WORKPATH/docker/gaudi
155+
docker compose stop && docker compose rm -f
156+
}
157+
158+
function main() {
159+
160+
stop_docker
161+
162+
if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi
163+
start_services
164+
165+
validate_microservices
166+
validate_megaservice
167+
#validate_frontend
168+
169+
stop_docker
170+
echo y | docker system prune
171+
172+
}
173+
174+
main

0 commit comments

Comments
 (0)