Skip to content

Commit 4c0f527

Browse files
Fix intent detection code issue (#651)
* update lvm tgi llama requirements Signed-off-by: letonghan <[email protected]> * fix intent detection issue Signed-off-by: letonghan <[email protected]> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Signed-off-by: letonghan <[email protected]> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 17bfb30 commit 4c0f527

File tree

5 files changed

+28
-16
lines changed

5 files changed

+28
-16
lines changed

comps/intent_detection/langchain/intent_detection.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ def llm_generate(input: LLMParamsDoc):
3131
timeout=600,
3232
)
3333

34-
prompt = PromptTemplate(template=IntentTemplate.generate_intent_template, input_variables=["query"])
34+
prompt_template = 'Please identify the intent of the user query. You may only respond with "chitchat" or \QA" without explanations or engaging in conversation.### User Query: {query}, ### Response: '
35+
prompt = PromptTemplate(template=prompt_template, input_variables=["query"])
3536

3637
llm_chain = LLMChain(prompt=prompt, llm=llm)
3738

comps/intent_detection/langchain/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,4 @@ opentelemetry-exporter-otlp
77
opentelemetry-sdk
88
prometheus-fastapi-instrumentator
99
shortuuid
10+
uvicorn

comps/intent_detection/langchain/template.py

Lines changed: 0 additions & 8 deletions
This file was deleted.

comps/lvms/tgi-llava/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ datasets
22
docarray[full]
33
fastapi
44
huggingface_hub
5+
langchain-core
56
opentelemetry-api
67
opentelemetry-exporter-otlp
78
opentelemetry-sdk

tests/intent_detection/test_intent_detection_langchain.sh

Lines changed: 24 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,10 @@ set -xe
77
WORKPATH=$(dirname "$PWD")
88
LOG_PATH="$WORKPATH/tests"
99
ip_address=$(hostname -I | awk '{print $1}')
10+
1011
function build_docker_images() {
1112
cd $WORKPATH
12-
docker build --no-cache -t opea/llm-tgi:latest -f comps/intent_detection/langchain/Dockerfile .
13+
docker build --no-cache -t opea/intent-detection:comps -f comps/intent_detection/langchain/Dockerfile .
1314
}
1415

1516
function start_service() {
@@ -22,8 +23,19 @@ function start_service() {
2223
export TGI_LLM_ENDPOINT="http://${ip_address}:${tgi_endpoint}"
2324
intent_port=5043
2425
unset http_proxy
25-
docker run -d --name="test-comps-intent-server" -p ${intent_port}:9000 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e TGI_LLM_ENDPOINT=$TGI_LLM_ENDPOINT -e HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN opea/llm-tgi:latest
26-
sleep 5m
26+
docker run -d --name="test-comps-intent-server" -p ${intent_port}:9000 --ipc=host -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e TGI_LLM_ENDPOINT=$TGI_LLM_ENDPOINT -e HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN opea/intent-detection:comps
27+
28+
# check whether tgi is fully ready
29+
n=0
30+
until [[ "$n" -ge 100 ]] || [[ $ready == true ]]; do
31+
docker logs test-comps-intent-tgi-endpoint > ${LOG_PATH}/tgi.log
32+
n=$((n+1))
33+
if grep -q Connected ${LOG_PATH}/tgi.log; then
34+
break
35+
fi
36+
sleep 5s
37+
done
38+
sleep 5s
2739
}
2840

2941
function validate_microservice() {
@@ -33,11 +45,16 @@ function validate_microservice() {
3345
-d '{"query":"What is Deep Learning?","max_new_tokens":10,"top_k":1,"temperature":0.001,"streaming":false}' \
3446
-H 'Content-Type: application/json')
3547

36-
echo "==============="
37-
echo $result
48+
if [[ $result == *"QA"* ]]; then
49+
echo $result
50+
echo "Result correct."
51+
else
52+
echo "Result wrong. Received was $result"
53+
docker logs test-comps-intent-server > ${LOG_PATH}/intent_detection.log
54+
docker logs test-comps-intent-tgi-endpoint > ${LOG_PATH}/tgi.log
55+
exit 1
56+
fi
3857

39-
docker logs test-comps-intent-server >> ${LOG_PATH}/intent_detection.log
40-
docker logs test-comps-intent-tgi-endpoint >> ${LOG_PATH}/tgi-endpoint.log
4158
}
4259

4360
function stop_docker() {

0 commit comments

Comments
 (0)