Skip to content

Commit 6fc3ea5

Browse files
authored
feat: move deepseek to new agent loop [LET-4081] (#4266)
* feat: move deepseek to new agent loop * fixes
1 parent 7fe7a64 commit 6fc3ea5

File tree

2 files changed

+19
-4
lines changed

2 files changed

+19
-4
lines changed

letta/llm_api/deepseek_client.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import json
12
import os
23
from typing import List, Optional
34

@@ -10,6 +11,7 @@
1011
from letta.otel.tracing import trace_method
1112
from letta.schemas.llm_config import LLMConfig
1213
from letta.schemas.message import Message as PydanticMessage
14+
from letta.schemas.openai.chat_completion_request import ChatMessage, cast_message_to_subtype
1315
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
1416
from letta.settings import model_settings
1517

@@ -36,15 +38,19 @@ def build_request_data(
3638
data = super().build_request_data(messages, llm_config, tools, force_tool_call)
3739

3840
def add_functions_to_system_message(system_message: ChatMessage):
39-
system_message.content += f"<available functions> {''.join(json.dumps(f) for f in functions)} </available functions>"
41+
system_message.content += f"<available functions> {''.join(json.dumps(f) for f in tools)} </available functions>"
4042
system_message.content += 'Select best function to call simply respond with a single json block with the fields "name" and "arguments". Use double quotes around the arguments.'
4143

44+
openai_message_list = [cast_message_to_subtype(m.to_openai_dict(put_inner_thoughts_in_kwargs=False)) for m in messages]
45+
4246
if llm_config.model == "deepseek-reasoner": # R1 currently doesn't support function calling natively
4347
add_functions_to_system_message(
44-
data["messages"][0]
48+
openai_message_list[0]
4549
) # Inject additional instructions to the system prompt with the available functions
4650

47-
data["messages"] = map_messages_to_deepseek_format(data["messages"])
51+
openai_message_list = map_messages_to_deepseek_format(openai_message_list)
52+
53+
data["messages"] = [m.dict() for m in openai_message_list]
4854

4955
return data
5056

@@ -94,4 +100,6 @@ def convert_response_to_chat_completion(
94100
Handles potential extraction of inner thoughts if they were added via kwargs.
95101
"""
96102
response = ChatCompletionResponse(**response_data)
103+
if response.choices[0].message.tool_calls:
104+
return super().convert_response_to_chat_completion(response_data, input_messages, llm_config)
97105
return convert_deepseek_response_to_chatcompletion(response)

letta/server/rest_api/routers/v1/agents.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1078,6 +1078,7 @@ async def send_message(
10781078
"azure",
10791079
"xai",
10801080
"groq",
1081+
"deepseek",
10811082
]
10821083

10831084
# Create a new run for execution tracking
@@ -1223,8 +1224,9 @@ async def send_message_streaming(
12231224
"azure",
12241225
"xai",
12251226
"groq",
1227+
"deepseek",
12261228
]
1227-
model_compatible_token_streaming = agent.llm_config.model_endpoint_type in ["anthropic", "openai", "bedrock"]
1229+
model_compatible_token_streaming = agent.llm_config.model_endpoint_type in ["anthropic", "openai", "bedrock", "deepseek"]
12281230

12291231
# Create a new job for execution tracking
12301232
if settings.track_agent_run:
@@ -1480,7 +1482,10 @@ async def _process_message_background(
14801482
"google_vertex",
14811483
"bedrock",
14821484
"ollama",
1485+
"azure",
1486+
"xai",
14831487
"groq",
1488+
"deepseek",
14841489
]
14851490
if agent_eligible and model_compatible:
14861491
if agent.enable_sleeptime and agent.agent_type != AgentType.voice_convo_agent:
@@ -1671,6 +1676,7 @@ async def preview_raw_payload(
16711676
"azure",
16721677
"xai",
16731678
"groq",
1679+
"deepseek",
16741680
]
16751681

16761682
if agent_eligible and model_compatible:
@@ -1742,6 +1748,7 @@ async def summarize_agent_conversation(
17421748
"azure",
17431749
"xai",
17441750
"groq",
1751+
"deepseek",
17451752
]
17461753

17471754
if agent_eligible and model_compatible:

0 commit comments

Comments
 (0)