We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 9765206 commit d43d663Copy full SHA for d43d663
2 files changed
rdagent/oai/backend/base.py
@@ -360,7 +360,7 @@ def _try_create_chat_completion_or_embedding( # type: ignore[no-untyped-def]
360
)
361
):
362
timeout_count += 1
363
- if timeout_count >= 3:
+ if timeout_count >= LLM_SETTINGS.timeout_fail_limit:
364
logger.warning("Timeout error, please check your network connection.")
365
raise e
366
rdagent/oai/llm_conf.py
@@ -37,6 +37,7 @@ class LLMSettings(ExtendedBaseSettings):
37
use_embedding_cache: bool = False
38
prompt_cache_path: str = str(Path.cwd() / "prompt_cache.db")
39
max_past_message_include: int = 10
40
+ timeout_fail_limit: int = 10
41
42
# Behavior of returning answers to the same question when caching is enabled
43
use_auto_chat_cache_seed_gen: bool = False
0 commit comments