Skip to content

Commit 38abaab

Browse files
Fix lvms videl-llama code issue (#654)
Signed-off-by: letonghan <[email protected]> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 90a3f4b commit 38abaab

File tree

2 files changed

+10
-11
lines changed

2 files changed

+10
-11
lines changed

comps/lvms/video-llama/dependency/server.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
streamer = None
3838
chat = None
3939

40-
VIDEO_DIR = "/home/user/comps/lvms/video-llama/server/data"
40+
VIDEO_DIR = "/home/user/comps/lvms/video-llama/dependency/data"
4141

4242
CFG_PATH = "video_llama_config/video_llama_eval_only_vl.yaml"
4343
MODEL_TYPE = "llama_v2"

tests/lvms/test_lvms_video-llama.sh

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,14 @@ ip_address=$(hostname -I | awk '{print $1}')
1111
function build_docker_images() {
1212
cd $WORKPATH
1313
echo $(pwd)
14-
docker build --no-cache -t opea/video-llama-lvm-server:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/video-llama/dependency/Dockerfile .
14+
docker build --no-cache -t opea/video-llama-lvm-server:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/video-llama/dependency/Dockerfile .
1515
if $? ; then
1616
echo "opea/video-llama-lvm-server built fail"
1717
exit 1
1818
else
1919
echo "opea/video-llama-lvm-server built successful"
2020
fi
21-
docker build --no-cache -t opea/lvm-video-llama:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/video-llama/Dockerfile .
21+
docker build --no-cache -t opea/lvm-video-llama:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/video-llama/Dockerfile .
2222
if $? ; then
2323
echo "opea/lvm-video-llama built fail"
2424
exit 1
@@ -41,23 +41,22 @@ function start_service() {
4141
-e https_proxy=$https_proxy \
4242
-e no_proxy=$no_proxy \
4343
-e llm_download="True" \
44-
-v "/home/$USER/.cache:/home/user/.cache" \
45-
-v video-llama-model:/home/user/model \
46-
opea/video-llama-lvm-server:latest
44+
opea/video-llama-lvm-server:comps
4745

4846
docker run -d --name="test-comps-lvm-video-llama" -p $server_port:9000 \
4947
--ipc=host \
5048
-e http_proxy=$http_proxy \
5149
-e https_proxy=$https_proxy \
5250
-e no_proxy=$no_proxy \
5351
-e LVM_ENDPOINT=$LVM_ENDPOINT \
54-
opea/lvm-video-llama:latest
52+
opea/lvm-video-llama:comps
5553

5654
echo "Waiting for the LVM service to start"
55+
5756
# check whether lvm dependency is fully ready
5857
n=0
5958
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
60-
docker logs test-comps-lvm-video-llama-dependency >> ${LOG_PATH}/lvm-video-llama-dependency.log
59+
docker logs test-comps-lvm-video-llama-dependency &> ${LOG_PATH}/lvm-video-llama-dependency.log
6160
n=$((n+1))
6261
if grep -q "Uvicorn running on" ${LOG_PATH}/lvm-video-llama-dependency.log; then
6362
break
@@ -69,7 +68,7 @@ function start_service() {
6968
# check whether lvm service is fully ready
7069
n=0
7170
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
72-
docker logs test-comps-lvm-video-llama >> ${LOG_PATH}/lvm-video-llama.log
71+
docker logs test-comps-lvm-video-llama &> ${LOG_PATH}/lvm-video-llama.log
7372
n=$((n+1))
7473
if grep -q "Uvicorn running on" ${LOG_PATH}/lvm-video-llama.log; then
7574
break
@@ -88,8 +87,8 @@ function validate_microservice() {
8887
echo "Result correct."
8988
else
9089
echo "Result wrong."
91-
docker logs test-comps-lvm-video-llama-dependency >> ${LOG_PATH}/video-llama-dependency.log
92-
docker logs test-comps-lvm-video-llama >> ${LOG_PATH}/video-llama.log
90+
docker logs test-comps-lvm-video-llama-dependency &> ${LOG_PATH}/lvm-video-llama-dependency.log
91+
docker logs test-comps-lvm-video-llama &> ${LOG_PATH}/lvm-video-llama.log
9392
exit 1
9493
fi
9594
}

0 commit comments

Comments
 (0)