Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Commit 702585c

Browse files
committed
allow to parameterize log level
1 parent d0de22f commit 702585c

File tree

3 files changed

+7
-2
lines changed

3 files changed

+7
-2
lines changed

Dockerfile

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,11 +98,12 @@ EXPOSE 80
9898
# Set the PYTHONPATH environment variable
9999
ENV PYTHONPATH=/app/src
100100

101-
# Define an argument for vlm_url with a default value
101+
# Expose additional env vars
102102
ENV CODEGATE_VLLM_URL=https://inference.codegate.ai
103103
ENV CODEGATE_OPENAI_URL=
104104
ENV CODEGATE_ANTHROPIC_URL=
105105
ENV CODEGATE_OLLAMA_URL=
106+
ENV CODEGATE_APP_LOG_LEVEL=WARNING
106107

107108
# Set the container's default entrypoint
108109
EXPOSE 8989

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -166,6 +166,7 @@ docker run -p 8989:8989 -v /path/to/volume:/app/weaviate_data ghcr.io/stacklok/c
166166
- CODEGATE_OPENAI_URL: URL for OpenAI inference engine (defaults to [https://api.openai.com/v1](https://api.openai.com/v1))
167167
- CODEGATE_ANTHROPIC_URL: URL for Anthropic inference engine (defaults to [https://api.anthropic.com/v1](https://api.anthropic.com/v1))
168168
- CODEGATE_OLLAMA_URL: URL for OLlama inference engine (defaults to [http://localhost:11434/api](http://localhost:11434/api))
169+
- CODEGATE_APP_LOG_LEVEL: Level of debug desired when running the codegate server (defaults to WARNING, can be ERROR/WARNING/INFO/DEBUG)
169170

170171
```bash
171172
docker run -p 8989:8989 -e CODEGATE_OLLAMA_URL=http://1.2.3.4:11434/api ghcr.io/stacklok/codegate/codegate:latest

scripts/entrypoint.sh

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,17 @@ start_dashboard() {
2929

3030
# Function to start the main application
3131
start_application() {
32-
echo "Starting the application with VLLM URL: $CODEGATE_VLLM_URL"
3332
CMD_ARGS="--port 8989 --host 0.0.0.0 --vllm-url \"$CODEGATE_VLLM_URL\" --model-base-path \"$MODEL_BASE_PATH\""
3433

3534
# Check and append additional URLs if they are set
3635
[ -n "$CODEGATE_OPENAI_URL" ] && CMD_ARGS+=" --openai-url \"$CODEGATE_OPENAI_URL\""
3736
[ -n "$CODEGATE_ANTHROPIC_URL" ] && CMD_ARGS+=" --anthropic-url \"$CODEGATE_ANTHROPIC_URL\""
3837
[ -n "$CODEGATE_OLLAMA_URL" ] && CMD_ARGS+=" --ollama-url \"$CODEGATE_OLLAMA_URL\""
3938

39+
# Check and append debug level if set
40+
[ -n "$CODEGATE_APP_LOG_LEVEL" ] && CMD_ARGS+=" --log-level $CODEGATE_APP_LOG_LEVEL"
41+
echo "Starting the application with args: $CMD_ARGS"
42+
4043
exec python -m src.codegate.cli serve $CMD_ARGS
4144
}
4245

0 commit comments

Comments
 (0)