Skip to content

Commit 7714b05

Browse files
authored
Upgrade Torch and it's dependencies to v2.5.x for video-llama (opea-project#1551)
Signed-off-by: Abolfazl Shahbazi <[email protected]>
1 parent 6e30a85 commit 7714b05

File tree

5 files changed

+18
-13
lines changed

5 files changed

+18
-13
lines changed

comps/third_parties/video-llama/src/Dockerfile

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Copyright (C) 2024 Intel Corporation
22
# SPDX-License-Identifier: Apache-2.0
33

4-
FROM python:3.9-slim
4+
FROM python:3.11-slim
55

66
ENV LANG=C.UTF-8
77

@@ -30,9 +30,12 @@ RUN git clone ${VIDEO_LLAMA_REPO} Video-LLaMA && \
3030
mv video_llama ../ && \
3131
cd ../ && rm -rf Video-LLaMA
3232

33+
# Modify the degradations.py file to import rgb_to_grayscale from torchvision.transforms.functional
34+
RUN sed -i 's/from torchvision.transforms.functional_tensor import rgb_to_grayscale/from torchvision.transforms.functional import rgb_to_grayscale/' /usr/local/lib/python3.11/site-packages/pytorchvideo/transforms/augmentations.py && \
35+
sed -i 's/torchvision.transforms.functional_tensor/torchvision.transforms.v2.functional/' /usr/local/lib/python3.11/site-packages/pytorchvideo/transforms/augmentations.py
36+
3337
USER user
3438

3539
ENV PYTHONPATH=/home/user
3640

37-
3841
ENTRYPOINT ["bash", "start.sh"]

comps/third_parties/video-llama/src/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ export ip_address=$(hostname -I | awk '{print $1}')
2424
export no_proxy=$no_proxy,${ip_address}
2525
export LVM_ENDPOINT=http://${ip_address}:9009
2626
# Start service
27-
docker compose -f comps/third_parties/llama-vision/deployment/docker_compose/docker_compose.yaml up -d
27+
docker compose -f comps/third_parties/video-llama/src/docker_compose_vllama.yaml up -d
2828
# it should take about 1.5 hours for the model to download in the video-llama server, assuming a maximum download speed of 100 Mbps
2929
until docker logs lvm-video-llama 2>&1 | grep -q "Uvicorn running on"; do
3030
sleep 5m

comps/third_parties/video-llama/src/docker_compose_vllama.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,11 @@ services:
1414
https_proxy: ${https_proxy}
1515
no_proxy: ${no_proxy}
1616
llm_download: "True"
17+
healthcheck:
18+
test: ["CMD-SHELL", "curl -f http://localhost:9009/v1/health_check || exit 1"]
19+
interval: 10s
20+
timeout: 5s
21+
retries: 10
1722
volumes:
1823
- "/home/$USER/.cache:/home/user/.cache" # RECOMMENDED: use cache to avoid download
1924
- video-llama-model:/home/user/model

comps/third_parties/video-llama/src/requirements.txt

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ iopath
1212
langchain
1313
langchain-community
1414
langchain-core
15-
numpy==1.26.4
15+
numpy
1616
omegaconf
1717
opencv-python-headless
1818
opentelemetry-api
@@ -26,12 +26,11 @@ sentence-transformers
2626
sentencepiece
2727
shortuuid
2828
timm
29-
torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu
30-
torchaudio==0.13.1 --index-url https://download.pytorch.org/whl/cpu
31-
torchvision==0.14.1 --index-url https://download.pytorch.org/whl/cpu
32-
transformers==4.47.1
29+
torch==2.5.1 --index-url https://download.pytorch.org/whl/cpu
30+
torchaudio~=2.5.1 --index-url https://download.pytorch.org/whl/cpu
31+
torchvision==0.20.1 --index-url https://download.pytorch.org/whl/cpu
32+
transformers
3333
uvicorn
3434
validators
3535
webdataset
3636
werkzeug
37-

tests/lvms/test_lvms_video_llama.sh

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,20 +15,19 @@ function build_docker_images() {
1515
cd $WORKPATH
1616
echo $(pwd)
1717
docker build --no-cache --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -t opea/lvm-video-llama:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/video-llama/src/Dockerfile .
18-
if $? ; then
18+
if [ $? -ne 0 ]; then
1919
echo "opea/lvm-video-llama built fail"
2020
exit 1
2121
else
2222
echo "opea/lvm-video-llama built successful"
2323
fi
2424
docker build --no-cache --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -t opea/lvm:comps --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/lvms/src/Dockerfile .
25-
if $? ; then
25+
if [ $? -ne 0 ]; then
2626
echo "opea/lvm built fail"
2727
exit 1
2828
else
2929
echo "opea/lvm built successful"
3030
fi
31-
3231
}
3332

3433
function start_service() {
@@ -73,7 +72,6 @@ function main() {
7372

7473
stop_docker
7574
echo y | docker system prune
76-
7775
}
7876

7977
main

0 commit comments

Comments
 (0)