Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Commit 0561342

Browse files
committed
feat: allow to parameterize urls in docker image
Right now the only url that could be parameterized is VLLM_URL. Expose the other urls as well so they can be customized Closes: #243
1 parent a81aa0f commit 0561342

File tree

3 files changed

+42
-16
lines changed

3 files changed

+42
-16
lines changed

Dockerfile

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -99,8 +99,11 @@ EXPOSE 80
9999
ENV PYTHONPATH=/app/src
100100

101101
# Define an argument for vlm_url with a default value
102-
ENV VLLM_URL=https://inference.codegate.ai
102+
ENV CODEGATE_VLLM_URL=https://inference.codegate.ai
103+
ENV CODEGATE_OPENAI_URL=
104+
ENV CODEGATE_ANTHROPIC_URL=
105+
ENV CODEGATE_OLLAMA_URL=
103106

104107
# Set the container's default entrypoint
105108
EXPOSE 8989
106-
ENTRYPOINT ["/app/scripts/entrypoint.sh", "/tmp/weaviate_backup", "backup"]
109+
ENTRYPOINT ["/app/scripts/entrypoint.sh"]

README.md

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -150,11 +150,25 @@ make image-build
150150

151151
### Run the Container
152152
```bash
153-
# Basic usage
153+
# Basic usage with local image
154154
docker run -p 8989:8989 codegate:latest
155155

156+
# With pre-built pulled image
157+
docker pull ghcr.io/stacklok/codegate/codegate:latest
158+
docker run -p 8989:8989 ghcr.io/stacklok/codegate/codegate:latest
159+
156160
# With persistent data
157-
docker run -p 8989:8989 -v /path/to/volume:/app/weaviate_data codegate:latest
161+
docker run -p 8989:8989 -v /path/to/volume:/app/weaviate_data ghcr.io/stacklok/codegate/codegate:latest
162+
```
163+
164+
### Exposed parameters
165+
- CODEGATE_VLLM_URL: URL for the inference engine (defaults to [https://inference.codegate.ai](https://inference.codegate.ai))
166+
- CODEGATE_OPENAI_URL: URL for OpenAI inference engine (defaults to [https://api.openai.com/v1](https://api.openai.com/v1))
167+
- CODEGATE_ANTHROPIC_URL: URL for Anthropic inference engine (defaults to [https://api.anthropic.com/v1](https://api.anthropic.com/v1))
168+
- CODEGATE_OLLAMA_URL: URL for OLlama inference engine (defaults to [http://localhost:11434/api](http://localhost:11434/api))
169+
170+
```bash
171+
docker run -p 8989:8989 -e CODEGATE_OLLAMA_URL=http://1.2.3.4:11434/api ghcr.io/stacklok/codegate/codegate:latest
158172
```
159173

160174
## 🤝 Contributing

scripts/entrypoint.sh

Lines changed: 21 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,20 @@
11
#!/bin/bash
2-
DEFAULT_VLLM_URL="https://inference.codegate.ai"
3-
VLLM_URL=${VLLM_URL:-$DEFAULT_VLLM_URL}
2+
DEFAULT_CODEGATE_VLLM_URL="https://inference.codegate.ai"
3+
CODEGATE_VLLM_URL=${CODEGATE_VLLM_URL:-$DEFAULT_CODEGATE_VLLM_URL}
44

5-
# Parse arguments
6-
BACKUP_PATH=$1
7-
BACKUP_MODE=$2
5+
# those are hardcoded on the image, will not change
6+
BACKUP_PATH="/tmp/weaviate_backup"
7+
BACKUP_NAME="backup"
8+
MODEL_BASE_PATH="/app/models"
89

910
# Function to restore backup if paths are provided
1011
restore_backup() {
11-
if [ -n "$BACKUP_PATH" ] && [ -n "$BACKUP_MODE" ]; then
12-
if [ -d "$BACKUP_PATH" ] && [ -d "$BACKUP_PATH/$BACKUP_MODE" ]; then
13-
echo "Restoring backup from $BACKUP_PATH/$BACKUP_MODE..."
14-
python -m src.codegate.cli restore-backup --backup-path "$BACKUP_PATH" --backup-name "$BACKUP_MODE"
12+
if [ -n "$BACKUP_PATH" ] && [ -n "$BACKUP_NAME" ]; then
13+
if [ -d "$BACKUP_PATH" ] && [ -d "$BACKUP_PATH/$BACKUP_NAME" ]; then
14+
echo "Restoring backup from $BACKUP_PATH/$BACKUP_NAME..."
15+
python -m src.codegate.cli restore-backup --backup-path "$BACKUP_PATH" --backup-name "$BACKUP_NAME"
1516
else
16-
echo "No backup found at $BACKUP_PATH/$BACKUP_MODE. Skipping restore."
17+
echo "No backup found at $BACKUP_PATH/$BACKUP_NAME. Skipping restore."
1718
fi
1819
else
1920
echo "Backup path or mode not provided. Skipping restore."
@@ -28,13 +29,21 @@ start_dashboard() {
2829

2930
# Function to start the main application
3031
start_application() {
31-
echo "Starting the application with VLLM URL: $VLLM_URL"
32-
exec python -m src.codegate.cli serve --port 8989 --host 0.0.0.0 --vllm-url "$VLLM_URL" --model-base-path /app/models
32+
echo "Starting the application with VLLM URL: $CODEGATE_VLLM_URL"
33+
CMD_ARGS="--port 8989 --host 0.0.0.0 --vllm-url \"$CODEGATE_VLLM_URL\" --model-base-path \"$MODEL_BASE_PATH\""
34+
35+
# Check and append additional URLs if they are set
36+
[ -n "$CODEGATE_OPENAI_URL" ] && CMD_ARGS+=" --openai-url \"$CODEGATE_OPENAI_URL\""
37+
[ -n "$CODEGATE_ANTHROPIC_URL" ] && CMD_ARGS+=" --anthropic-url \"$CODEGATE_ANTHROPIC_URL\""
38+
[ -n "$CODEGATE_OLLAMA_URL" ] && CMD_ARGS+=" --ollama-url \"$CODEGATE_OLLAMA_URL\""
39+
40+
exec python -m src.codegate.cli serve $CMD_ARGS
3341
}
3442

3543
# Main execution flow
3644
echo "Initializing entrypoint script..."
3745

46+
3847
# Step 1: Restore backup if applicable
3948
restore_backup
4049

0 commit comments

Comments
 (0)