|
| 1 | + |
| 2 | +# Copyright (C) 2024 Intel Corporation |
| 3 | +# SPDX-License-Identifier: Apache-2.0 |
| 4 | + |
| 5 | +version: "3.8" |
| 6 | + |
| 7 | +services: |
| 8 | + redis-vector-db: |
| 9 | + image: redis/redis-stack:7.2.0-v9 |
| 10 | + container_name: redis-vector-db |
| 11 | + ports: |
| 12 | + - "6379:6379" |
| 13 | + - "8001:8001" |
| 14 | + dataprep-redis-service: |
| 15 | + image: opea/dataprep-redis:latest |
| 16 | + container_name: dataprep-redis-server |
| 17 | + depends_on: |
| 18 | + - redis-vector-db |
| 19 | + ports: |
| 20 | + - "6007:6007" |
| 21 | + - "6008:6008" |
| 22 | + - "6009:6009" |
| 23 | + environment: |
| 24 | + no_proxy: ${no_proxy} |
| 25 | + http_proxy: ${http_proxy} |
| 26 | + https_proxy: ${https_proxy} |
| 27 | + REDIS_URL: ${REDIS_URL} |
| 28 | + INDEX_NAME: ${INDEX_NAME} |
| 29 | + tei-embedding-service: |
| 30 | + image: opea/tei-gaudi:latest |
| 31 | + container_name: tei-embedding-gaudi-server |
| 32 | + ports: |
| 33 | + - "8090:80" |
| 34 | + volumes: |
| 35 | + - "./data:/data" |
| 36 | + runtime: habana |
| 37 | + cap_add: |
| 38 | + - SYS_NICE |
| 39 | + ipc: host |
| 40 | + environment: |
| 41 | + no_proxy: ${no_proxy} |
| 42 | + http_proxy: ${http_proxy} |
| 43 | + https_proxy: ${https_proxy} |
| 44 | + HABANA_VISIBLE_DEVICES: all |
| 45 | + OMPI_MCA_btl_vader_single_copy_mechanism: none |
| 46 | + MAX_WARMUP_SEQUENCE_LENGTH: 512 |
| 47 | + command: --model-id ${EMBEDDING_MODEL_ID} |
| 48 | + embedding: |
| 49 | + image: opea/embedding-tei:latest |
| 50 | + container_name: embedding-tei-server |
| 51 | + depends_on: |
| 52 | + - tei-embedding-service |
| 53 | + ports: |
| 54 | + - "6000:6000" |
| 55 | + ipc: host |
| 56 | + environment: |
| 57 | + no_proxy: ${no_proxy} |
| 58 | + http_proxy: ${http_proxy} |
| 59 | + https_proxy: ${https_proxy} |
| 60 | + TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} |
| 61 | + LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} |
| 62 | + LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} |
| 63 | + LANGCHAIN_PROJECT: "opea-embedding-service" |
| 64 | + restart: unless-stopped |
| 65 | + retriever: |
| 66 | + image: opea/retriever-redis:latest |
| 67 | + container_name: retriever-redis-server |
| 68 | + depends_on: |
| 69 | + - redis-vector-db |
| 70 | + ports: |
| 71 | + - "7000:7000" |
| 72 | + ipc: host |
| 73 | + environment: |
| 74 | + no_proxy: ${no_proxy} |
| 75 | + http_proxy: ${http_proxy} |
| 76 | + https_proxy: ${https_proxy} |
| 77 | + REDIS_URL: ${REDIS_URL} |
| 78 | + INDEX_NAME: ${INDEX_NAME} |
| 79 | + LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} |
| 80 | + LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} |
| 81 | + LANGCHAIN_PROJECT: "opea-retriever-service" |
| 82 | + restart: unless-stopped |
| 83 | + tei-reranking-service: |
| 84 | + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.2 |
| 85 | + container_name: tei-reranking-gaudi-server |
| 86 | + ports: |
| 87 | + - "8808:80" |
| 88 | + volumes: |
| 89 | + - "./data:/data" |
| 90 | + shm_size: 1g |
| 91 | + environment: |
| 92 | + no_proxy: ${no_proxy} |
| 93 | + http_proxy: ${http_proxy} |
| 94 | + https_proxy: ${https_proxy} |
| 95 | + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} |
| 96 | + HF_HUB_DISABLE_PROGRESS_BARS: 1 |
| 97 | + HF_HUB_ENABLE_HF_TRANSFER: 0 |
| 98 | + command: --model-id ${RERANK_MODEL_ID} --auto-truncate |
| 99 | + reranking: |
| 100 | + image: opea/reranking-tei:latest |
| 101 | + container_name: reranking-tei-gaudi-server |
| 102 | + depends_on: |
| 103 | + - tei-reranking-service |
| 104 | + ports: |
| 105 | + - "8000:8000" |
| 106 | + ipc: host |
| 107 | + environment: |
| 108 | + no_proxy: ${no_proxy} |
| 109 | + http_proxy: ${http_proxy} |
| 110 | + https_proxy: ${https_proxy} |
| 111 | + TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} |
| 112 | + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} |
| 113 | + HF_HUB_DISABLE_PROGRESS_BARS: 1 |
| 114 | + HF_HUB_ENABLE_HF_TRANSFER: 0 |
| 115 | + LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} |
| 116 | + LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} |
| 117 | + LANGCHAIN_PROJECT: "opea-reranking-service" |
| 118 | + restart: unless-stopped |
| 119 | + ray-service: |
| 120 | + image: ray_serve:habana |
| 121 | + container_name: ray-gaudi-server |
| 122 | + ports: |
| 123 | + - "8008:80" |
| 124 | + volumes: |
| 125 | + - "./data:/data" |
| 126 | + environment: |
| 127 | + no_proxy: ${no_proxy} |
| 128 | + http_proxy: ${http_proxy} |
| 129 | + https_proxy: ${https_proxy} |
| 130 | + HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} |
| 131 | + HABANA_VISIBLE_DEVICES: all |
| 132 | + OMPI_MCA_btl_vader_single_copy_mechanism: none |
| 133 | + LLM_MODEL: ${LLM_MODEL_ID} |
| 134 | + TRUST_REMOTE_CODE: True |
| 135 | + runtime: habana |
| 136 | + cap_add: |
| 137 | + - SYS_NICE |
| 138 | + ipc: host |
| 139 | + command: /bin/bash -c "ray start --head && python api_server_openai.py --port_number 80 --model_id_or_path $LLM_MODEL --chat_processor ChatModelLlama --num_cpus_per_worker 8 --num_hpus_per_worker 1" |
| 140 | + llm: |
| 141 | + image: opea/llm-ray:latest |
| 142 | + container_name: llm-ray-gaudi-server |
| 143 | + depends_on: |
| 144 | + - ray-service |
| 145 | + ports: |
| 146 | + - "9000:9000" |
| 147 | + ipc: host |
| 148 | + environment: |
| 149 | + no_proxy: ${no_proxy} |
| 150 | + http_proxy: ${http_proxy} |
| 151 | + https_proxy: ${https_proxy} |
| 152 | + RAY_Serve_ENDPOINT: ${RAY_Serve_LLM_ENDPOINT} |
| 153 | + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} |
| 154 | + LLM_MODEL: ${LLM_MODEL_ID} |
| 155 | + restart: unless-stopped |
| 156 | + chaqna-gaudi-backend-server: |
| 157 | + image: opea/chatqna:latest |
| 158 | + container_name: chatqna-gaudi-backend-server |
| 159 | + depends_on: |
| 160 | + - redis-vector-db |
| 161 | + - tei-embedding-service |
| 162 | + - embedding |
| 163 | + - retriever |
| 164 | + - tei-reranking-service |
| 165 | + - reranking |
| 166 | + - ray-service |
| 167 | + - llm |
| 168 | + ports: |
| 169 | + - "8888:8888" |
| 170 | + environment: |
| 171 | + - no_proxy=${no_proxy} |
| 172 | + - https_proxy=${https_proxy} |
| 173 | + - http_proxy=${http_proxy} |
| 174 | + - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} |
| 175 | + - EMBEDDING_SERVICE_HOST_IP=${EMBEDDING_SERVICE_HOST_IP} |
| 176 | + - RETRIEVER_SERVICE_HOST_IP=${RETRIEVER_SERVICE_HOST_IP} |
| 177 | + - RERANK_SERVICE_HOST_IP=${RERANK_SERVICE_HOST_IP} |
| 178 | + - LLM_SERVICE_HOST_IP=${LLM_SERVICE_HOST_IP} |
| 179 | + - LLM_SERVICE_PORT=${LLM_SERVICE_PORT} |
| 180 | + ipc: host |
| 181 | + restart: always |
| 182 | + chaqna-gaudi-ui-server: |
| 183 | + image: opea/chatqna-ui:latest |
| 184 | + container_name: chatqna-gaudi-ui-server |
| 185 | + depends_on: |
| 186 | + - chaqna-gaudi-backend-server |
| 187 | + ports: |
| 188 | + - "5173:5173" |
| 189 | + environment: |
| 190 | + - no_proxy=${no_proxy} |
| 191 | + - https_proxy=${https_proxy} |
| 192 | + - http_proxy=${http_proxy} |
| 193 | + - CHAT_BASE_URL=${BACKEND_SERVICE_ENDPOINT} |
| 194 | + - UPLOAD_FILE_BASE_URL=${DATAPREP_SERVICE_ENDPOINT} |
| 195 | + - GET_FILE=${DATAPREP_GET_FILE_ENDPOINT} |
| 196 | + - DELETE_FILE=${DATAPREP_DELETE_FILE_ENDPOINT} |
| 197 | + ipc: host |
| 198 | + restart: always |
| 199 | + |
| 200 | +networks: |
| 201 | + default: |
| 202 | + driver: bridge |
0 commit comments