Skip to content

Commit 91e694c

Browse files
committed
add tests for edge cases
1 parent 2184a3d commit 91e694c

File tree

1 file changed

+34
-0
lines changed

1 file changed

+34
-0
lines changed

tests/litellm/llms/test_openai_get_models_url.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,3 +51,37 @@ def test_localhost_with_port_and_v1(self):
5151
def test_localhost_without_v1(self):
5252
"""Localhost with port but no /v1 should get /v1/models appended."""
5353
assert self._get_url_for_api_base("http://localhost:11434") == "http://localhost:11434/v1/models"
54+
55+
def test_default_api_base(self):
56+
"""If api_base is None, it should default to OpenAI and append /v1/models."""
57+
mock_response = MagicMock()
58+
mock_response.status_code = 200
59+
mock_response.json.return_value = {"data": []}
60+
61+
with patch("litellm.module_level_client.get", return_value=mock_response) as mock_get:
62+
OpenAIGPTConfig().get_models(api_key="fake-key", api_base=None)
63+
assert mock_get.call_args.kwargs["url"] == "https://api.openai.com/v1/models"
64+
65+
@patch("litellm.llms.openai.chat.gpt_transformation.get_secret_str", return_value="default-key")
66+
def test_default_api_key(self, mock_get_secret):
67+
"""If api_key is None, it should fetch from secrets."""
68+
mock_response = MagicMock()
69+
mock_response.status_code = 200
70+
mock_response.json.return_value = {"data": []}
71+
72+
with patch("litellm.module_level_client.get", return_value=mock_response) as mock_get:
73+
OpenAIGPTConfig().get_models(api_key=None, api_base="https://example.com")
74+
assert mock_get.call_args.kwargs["url"] == "https://example.com/v1/models"
75+
assert mock_get.call_args.kwargs["headers"]["Authorization"] == "Bearer default-key"
76+
77+
def test_get_models_error(self):
78+
"""If the API returns an error, get_models should raise an exception."""
79+
mock_response = MagicMock()
80+
mock_response.status_code = 400
81+
mock_response.text = "Error message"
82+
83+
import pytest
84+
with patch("litellm.module_level_client.get", return_value=mock_response):
85+
with pytest.raises(Exception) as exc:
86+
OpenAIGPTConfig().get_models(api_key="fake-key", api_base="https://example.com")
87+
assert "Failed to get models: Error message" in str(exc.value)

0 commit comments

Comments
 (0)