File tree Expand file tree Collapse file tree 5 files changed +9
-9
lines changed Expand file tree Collapse file tree 5 files changed +9
-9
lines changed Original file line number Diff line number Diff line change @@ -69,7 +69,7 @@ export TEI_EMBEDDING_MODEL_NAME="BAAI/bge-large-en-v1.5"
69
69
export LANGCHAIN_TRACING_V2=true
70
70
export LANGCHAIN_API_KEY=${your_langchain_api_key}
71
71
export LANGCHAIN_PROJECT=" opea/gen-ai-comps:embeddings"
72
- python embedding_tei_gaudi .py
72
+ python embedding_tei .py
73
73
```
74
74
75
75
### Start Embedding Service with Local Model
Original file line number Diff line number Diff line change @@ -24,5 +24,5 @@ ENV PYTHONPATH=$PYTHONPATH:/home/user
24
24
25
25
WORKDIR /home/user/comps/embeddings/langchain
26
26
27
- ENTRYPOINT ["python" , "embedding_tei_gaudi .py" ]
27
+ ENTRYPOINT ["python" , "embedding_tei .py" ]
28
28
Original file line number Diff line number Diff line change 19
19
20
20
21
21
@register_microservice (
22
- name = "opea_service@embedding_tgi_gaudi " ,
22
+ name = "opea_service@embedding_tei_langchain " ,
23
23
service_type = ServiceType .EMBEDDING ,
24
24
endpoint = "/v1/embeddings" ,
25
25
host = "0.0.0.0" ,
28
28
output_datatype = EmbedDoc768 ,
29
29
)
30
30
@traceable (run_type = "embedding" )
31
- @register_statistics (names = ["opea_service@embedding_tgi_gaudi " ])
31
+ @register_statistics (names = ["opea_service@embedding_tei_langchain " ])
32
32
def embedding (input : TextDoc ) -> EmbedDoc768 :
33
33
start = time .time ()
34
34
embed_vector = embeddings .embed_query (input .text )
35
35
embed_vector = embed_vector [:768 ] # Keep only the first 768 elements
36
36
res = EmbedDoc768 (text = input .text , embedding = embed_vector )
37
- statistics_dict ["opea_service@embedding_tgi_gaudi " ].append_latency (time .time () - start , None )
37
+ statistics_dict ["opea_service@embedding_tei_langchain " ].append_latency (time .time () - start , None )
38
38
return res
39
39
40
40
41
41
if __name__ == "__main__" :
42
42
tei_embedding_endpoint = os .getenv ("TEI_EMBEDDING_ENDPOINT" , "http://localhost:8080" )
43
43
embeddings = HuggingFaceHubEmbeddings (model = tei_embedding_endpoint )
44
44
print ("TEI Gaudi Embedding initialized." )
45
- opea_microservices ["opea_service@embedding_tgi_gaudi " ].start ()
45
+ opea_microservices ["opea_service@embedding_tei_langchain " ].start ()
Original file line number Diff line number Diff line change @@ -26,5 +26,5 @@ ENV PYTHONPATH=$PYTHONPATH:/home/user
26
26
27
27
WORKDIR /home/user/comps/embeddings/llama_index
28
28
29
- ENTRYPOINT ["python3" , "embedding_tei_gaudi .py" ]
29
+ ENTRYPOINT ["python3" , "embedding_tei .py" ]
30
30
Original file line number Diff line number Diff line change 10
10
11
11
12
12
@register_microservice (
13
- name = "opea_service@embedding_tgi_gaudi " ,
13
+ name = "opea_service@embedding_tei_llamaindex " ,
14
14
service_type = ServiceType .EMBEDDING ,
15
15
endpoint = "/v1/embeddings" ,
16
16
host = "0.0.0.0" ,
@@ -31,4 +31,4 @@ def embedding(input: TextDoc) -> EmbedDoc768:
31
31
tei_embedding_endpoint = os .getenv ("TEI_EMBEDDING_ENDPOINT" , "http://localhost:8090" )
32
32
embeddings = TextEmbeddingsInference (model_name = tei_embedding_model_name , base_url = tei_embedding_endpoint )
33
33
print ("TEI Gaudi Embedding initialized." )
34
- opea_microservices ["opea_service@embedding_tgi_gaudi " ].start ()
34
+ opea_microservices ["opea_service@embedding_tei_llamaindex " ].start ()
You can’t perform that action at this time.
0 commit comments