Skip to content

LiteLLM and WebUI

Somkiat Puisungnoen edited this page Oct 23, 2024 · 5 revisions

LLM Proxy

litellm_config.yaml

model_list:
  - model_name: gpt-4o
    litellm_params:
      model: openai/gpt-4o
  - model_name: claude-3-5-sonnet
    litellm_params:
      model: anthropic/claude-3-5-sonnet-20240620
  - model_name: gemini-1.5-pro-latest
    litellm_params:
      model: gemini/gemini-1.5-pro-latest
  - model_name: "ollama3.xxxx"
    litellm_params:
      model: "ollama/llama3.1"
      api_base: "http://159.223.53.44:11434"

docker-compose.yml

services:
  webui:
    image: ghcr.io/open-webui/open-webui:main
    restart: unless-stopped
    ports:
      - '127.0.0.1:33371:8080'
    environment:
      - OPENAI_API_KEY=dummy
      - OPENAI_API_BASE_URL=http://litellm:4000/v1
    volumes:
      - open-webui:/app/backend/data
  litellm:
    image: ghcr.io/berriai/litellm:main-latest
    restart: unless-stopped
    command:
      - '--config=/litellm_config.yaml'
      - '--detailed_debug'
    ports:
      - '127.0.0.1:33372:4000'
    environment:
      - LITELLM_MASTER_KEY=dummy
      - OPENAI_API_KEY
      - GEMINI_API_KEY
      - ANTHROPIC_API_KEY
    # volumes:
    #   - ./litellm_config.yaml:/litellm_config.yaml
volumes:
  open-webui:

Run

$docker compose up -d
$docker compose ps 
Clone this wiki locally