Skip to content

Commit 13c0e4d

Browse files
authored
Limit vllm and vllm-fork tags (opea-project#1529)
Signed-off-by: ZePan110 <[email protected]>
1 parent 9e5e9be commit 13c0e4d

15 files changed

+16
-16
lines changed

.github/workflows/_comps-workflow.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ jobs:
7676
fi
7777
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_yml}) != 0 ]]; then
7878
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork
79-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
79+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
8080
echo "Check out vLLM tag ${VLLM_VER}"
8181
git checkout ${VLLM_VER} &> /dev/null && cd ../
8282
fi

.github/workflows/push-image-build.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ jobs:
9292
fi
9393
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then
9494
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork
95-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
95+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
9696
echo "Check out vLLM tag ${VLLM_VER}"
9797
git checkout ${VLLM_VER} &> /dev/null && cd ../
9898
fi

comps/agent/src/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ export vllm_volume=${YOUR_LOCAL_DIR_FOR_MODELS}
131131
# build vLLM image
132132
git clone https://github.com/HabanaAI/vllm-fork.git
133133
cd ./vllm-fork
134-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
134+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
135135
git checkout ${VLLM_VER} &> /dev/null
136136
docker build -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
137137

comps/dataprep/src/README_finance.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ First build vllm-gaudi docker image.
4949
cd $WORKDIR
5050
git clone https://github.com/HabanaAI/vllm-fork.git
5151
# get the latest release tag of vllm gaudi
52-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
52+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
5353
echo "Check out vLLM tag ${VLLM_VER}"
5454
git checkout ${VLLM_VER}
5555
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy

comps/third_parties/vllm/src/build_docker_vllm.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ fi
3636
# Build the docker image for vLLM based on the hardware mode
3737
if [ "$hw_mode" = "hpu" ]; then
3838
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork
39-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
39+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
4040
echo "Check out vLLM tag ${VLLM_VER}"
4141
git checkout ${VLLM_VER} &> /dev/null
4242
docker build -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy
@@ -45,7 +45,7 @@ if [ "$hw_mode" = "hpu" ]; then
4545
else
4646
git clone https://github.com/vllm-project/vllm.git
4747
cd ./vllm/
48-
VLLM_VER="$(git describe --tags "$(git rev-list --tags --max-count=1)" )"
48+
VLLM_VER="v0.8.2"
4949
echo "Check out vLLM tag ${VLLM_VER}"
5050
git checkout ${VLLM_VER} &> /dev/null
5151
docker build -f Dockerfile.cpu -t opea/vllm-cpu:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy

tests/agent/build_vllm_gaudi.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ function build_vllm_docker_images() {
99
git clone https://github.com/HabanaAI/vllm-fork.git
1010
fi
1111
cd ./vllm-fork
12-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
12+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
1313
echo "Check out vLLM tag ${VLLM_VER}"
1414
git checkout ${VLLM_VER} &> /dev/null
1515
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy

tests/agent/sql_agent_test/test_sql_agent.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ function build_vllm_docker_images() {
108108
git clone https://github.com/HabanaAI/vllm-fork.git
109109
fi
110110
cd ./vllm-fork
111-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
111+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
112112
echo "Check out vLLM tag ${VLLM_VER}"
113113
git checkout ${VLLM_VER} &> /dev/null
114114
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy

tests/agent/test_agent_langchain_on_intel_hpu.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ function build_vllm_docker_images() {
5959
git clone https://github.com/HabanaAI/vllm-fork.git
6060
fi
6161
cd ./vllm-fork
62-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
62+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
6363
echo "Check out vLLM tag ${VLLM_VER}"
6464
git checkout ${VLLM_VER} &> /dev/null
6565
docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy

tests/dataprep/test_dataprep_redis_finance_on_intel_hpu.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ function build_vllm_docker_images() {
3434
git clone https://github.com/HabanaAI/vllm-fork.git
3535
fi
3636
cd ./vllm-fork
37-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
37+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
3838
echo "Check out vLLM tag ${VLLM_VER}"
3939
git checkout ${VLLM_VER} &> /dev/null
4040

tests/guardrails/test_guardrails_hallucination_detection_on_intel_hpu.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ function build_docker_images() {
1313
cd $WORKPATH
1414
git clone https://github.com/HabanaAI/vllm-fork.git
1515
cd vllm-fork/
16-
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
16+
VLLM_VER=v0.6.6.post1+Gaudi-1.20.0
1717
echo "Check out vLLM tag ${VLLM_VER}"
1818
git checkout ${VLLM_VER} &> /dev/null
1919
docker build --no-cache --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile.hpu -t opea/vllm-gaudi:comps --shm-size=128g .

0 commit comments

Comments
 (0)