Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

feat: allow to parameterize urls in docker image #245

Merged
merged 3 commits into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,14 @@ EXPOSE 80
# Set the PYTHONPATH environment variable
ENV PYTHONPATH=/app/src

# Define an argument for vlm_url with a default value
ENV VLLM_URL=https://inference.codegate.ai
# Expose additional env vars
ENV CODEGATE_VLLM_URL=https://inference.codegate.ai
ENV CODEGATE_OPENAI_URL=
ENV CODEGATE_ANTHROPIC_URL=
ENV CODEGATE_OLLAMA_URL=
ENV CODEGATE_APP_LOG_LEVEL=WARNING
ENV CODEGATE_LOG_FORMAT=TEXT

# Set the container's default entrypoint
EXPOSE 8989
ENTRYPOINT ["/app/scripts/entrypoint.sh", "/tmp/weaviate_backup", "backup"]
ENTRYPOINT ["/app/scripts/entrypoint.sh"]
20 changes: 18 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,27 @@ make image-build

### Run the Container
```bash
# Basic usage
# Basic usage with local image
docker run -p 8989:8989 codegate:latest

# With pre-built pulled image
docker pull ghcr.io/stacklok/codegate/codegate:latest
docker run -p 8989:8989 ghcr.io/stacklok/codegate/codegate:latest

# With persistent data
docker run -p 8989:8989 -v /path/to/volume:/app/weaviate_data codegate:latest
docker run -p 8989:8989 -v /path/to/volume:/app/weaviate_data ghcr.io/stacklok/codegate/codegate:latest
```

### Exposed parameters
- CODEGATE_VLLM_URL: URL for the inference engine (defaults to [https://inference.codegate.ai](https://inference.codegate.ai))
- CODEGATE_OPENAI_URL: URL for OpenAI inference engine (defaults to [https://api.openai.com/v1](https://api.openai.com/v1))
- CODEGATE_ANTHROPIC_URL: URL for Anthropic inference engine (defaults to [https://api.anthropic.com/v1](https://api.anthropic.com/v1))
- CODEGATE_OLLAMA_URL: URL for OLlama inference engine (defaults to [http://localhost:11434/api](http://localhost:11434/api))
- CODEGATE_APP_LOG_LEVEL: Level of debug desired when running the codegate server (defaults to WARNING, can be ERROR/WARNING/INFO/DEBUG)
- CODEGATE_LOG_FORMAT: Type of log formatting desired when running the codegate server (default to TEXT, can be JSON/TEXT)

```bash
docker run -p 8989:8989 -e CODEGATE_OLLAMA_URL=http://1.2.3.4:11434/api ghcr.io/stacklok/codegate/codegate:latest
```

## 🤝 Contributing
Expand Down
37 changes: 25 additions & 12 deletions scripts/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
#!/bin/bash
DEFAULT_VLLM_URL="https://inference.codegate.ai"
VLLM_URL=${VLLM_URL:-$DEFAULT_VLLM_URL}
DEFAULT_CODEGATE_VLLM_URL="https://inference.codegate.ai"
CODEGATE_VLLM_URL=${CODEGATE_VLLM_URL:-$DEFAULT_CODEGATE_VLLM_URL}

# Parse arguments
BACKUP_PATH=$1
BACKUP_MODE=$2
# those are hardcoded on the image, will not change
BACKUP_PATH="/tmp/weaviate_backup"
BACKUP_NAME="backup"
MODEL_BASE_PATH="/app/models"

# Function to restore backup if paths are provided
restore_backup() {
if [ -n "$BACKUP_PATH" ] && [ -n "$BACKUP_MODE" ]; then
if [ -d "$BACKUP_PATH" ] && [ -d "$BACKUP_PATH/$BACKUP_MODE" ]; then
echo "Restoring backup from $BACKUP_PATH/$BACKUP_MODE..."
python -m src.codegate.cli restore-backup --backup-path "$BACKUP_PATH" --backup-name "$BACKUP_MODE"
if [ -n "$BACKUP_PATH" ] && [ -n "$BACKUP_NAME" ]; then
if [ -d "$BACKUP_PATH" ] && [ -d "$BACKUP_PATH/$BACKUP_NAME" ]; then
echo "Restoring backup from $BACKUP_PATH/$BACKUP_NAME..."
python -m src.codegate.cli restore-backup --backup-path "$BACKUP_PATH" --backup-name "$BACKUP_NAME"
else
echo "No backup found at $BACKUP_PATH/$BACKUP_MODE. Skipping restore."
echo "No backup found at $BACKUP_PATH/$BACKUP_NAME. Skipping restore."
fi
else
echo "Backup path or mode not provided. Skipping restore."
Expand All @@ -28,13 +29,25 @@ start_dashboard() {

# Function to start the main application
start_application() {
echo "Starting the application with VLLM URL: $VLLM_URL"
exec python -m src.codegate.cli serve --port 8989 --host 0.0.0.0 --vllm-url "$VLLM_URL" --model-base-path /app/models
CMD_ARGS="--port 8989 --host 0.0.0.0 --vllm-url \"$CODEGATE_VLLM_URL\" --model-base-path \"$MODEL_BASE_PATH\""

# Check and append additional URLs if they are set
[ -n "$CODEGATE_OPENAI_URL" ] && CMD_ARGS+=" --openai-url \"$CODEGATE_OPENAI_URL\""
[ -n "$CODEGATE_ANTHROPIC_URL" ] && CMD_ARGS+=" --anthropic-url \"$CODEGATE_ANTHROPIC_URL\""
[ -n "$CODEGATE_OLLAMA_URL" ] && CMD_ARGS+=" --ollama-url \"$CODEGATE_OLLAMA_URL\""

# Check and append debug level if set
[ -n "$CODEGATE_APP_LOG_LEVEL" ] && CMD_ARGS+=" --log-level $CODEGATE_APP_LOG_LEVEL"
[ -n "$CODEGATE_LOG_FORMAT" ] && CMD_ARGS+=" --log-format $CODEGATE_LOG_FORMAT"
echo "Starting the application with args: $CMD_ARGS"

exec python -m src.codegate.cli serve $CMD_ARGS
}

# Main execution flow
echo "Initializing entrypoint script..."


# Step 1: Restore backup if applicable
restore_backup

Expand Down
Loading