Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions nanobot/cli/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -624,11 +624,13 @@ def status():
has_openrouter = bool(config.providers.openrouter.api_key)
has_anthropic = bool(config.providers.anthropic.api_key)
has_openai = bool(config.providers.openai.api_key)
has_gemini = bool(config.providers.gemini.api_key)
has_vllm = bool(config.providers.vllm.api_base)

console.print(f"OpenRouter API: {'[green]✓[/green]' if has_openrouter else '[dim]not set[/dim]'}")
console.print(f"Anthropic API: {'[green]✓[/green]' if has_anthropic else '[dim]not set[/dim]'}")
console.print(f"OpenAI API: {'[green]✓[/green]' if has_openai else '[dim]not set[/dim]'}")
console.print(f"Gemini API: {'[green]✓[/green]' if has_gemini else '[dim]not set[/dim]'}")
vllm_status = f"[green]✓ {config.providers.vllm.api_base}[/green]" if has_vllm else "[dim]not set[/dim]"
console.print(f"vLLM/Local: {vllm_status}")

Expand Down
4 changes: 3 additions & 1 deletion nanobot/config/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ class ProvidersConfig(BaseModel):
openai: ProviderConfig = Field(default_factory=ProviderConfig)
openrouter: ProviderConfig = Field(default_factory=ProviderConfig)
vllm: ProviderConfig = Field(default_factory=ProviderConfig)
gemini: ProviderConfig = Field(default_factory=ProviderConfig)


class GatewayConfig(BaseModel):
Expand Down Expand Up @@ -89,11 +90,12 @@ def workspace_path(self) -> Path:
return Path(self.agents.defaults.workspace).expanduser()

def get_api_key(self) -> str | None:
"""Get API key in priority order: OpenRouter > Anthropic > OpenAI > vLLM."""
"""Get API key in priority order: OpenRouter > Anthropic > OpenAI > Gemini > vLLM."""
return (
self.providers.openrouter.api_key or
self.providers.anthropic.api_key or
self.providers.openai.api_key or
self.providers.gemini.api_key or
self.providers.vllm.api_key or
None
)
Expand Down
8 changes: 7 additions & 1 deletion nanobot/providers/litellm_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class LiteLLMProvider(LLMProvider):
"""
LLM provider using LiteLLM for multi-provider support.

Supports OpenRouter, Anthropic, OpenAI, and many other providers through
Supports OpenRouter, Anthropic, OpenAI, Gemini, and many other providers through
a unified interface.
"""

Expand Down Expand Up @@ -47,6 +47,8 @@ def __init__(
os.environ.setdefault("ANTHROPIC_API_KEY", api_key)
elif "openai" in default_model or "gpt" in default_model:
os.environ.setdefault("OPENAI_API_KEY", api_key)
elif "gemini" in default_model.lower():
os.environ.setdefault("GEMINI_API_KEY", api_key)

if api_base:
litellm.api_base = api_base
Expand Down Expand Up @@ -86,6 +88,10 @@ async def chat(
if self.is_vllm:
model = f"hosted_vllm/{model}"

# For Gemini, ensure gemini/ prefix if not already present
if "gemini" in model.lower() and not model.startswith("gemini/"):
model = f"gemini/{model}"

kwargs: dict[str, Any] = {
"model": model,
"messages": messages,
Expand Down