Skip to content

Commit 42ead7f

Browse files
committed
refactor(assistants): remove deprecated assistant-related files and update prompt type references in responder
1 parent 51a985e commit 42ead7f

File tree

10 files changed

+90
-57
lines changed

10 files changed

+90
-57
lines changed
File renamed without changes.

agentle/assistants/assistant.py renamed to agentle/autonomous_systems/agent.py

Lines changed: 24 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
1-
from collections.abc import Callable, Sequence
21
import time
32
import uuid
3+
from collections.abc import Callable, Sequence
44
from typing import Any, Literal, Optional
55

66
from rsb.models.base_model import BaseModel
77
from rsb.models.field import Field
88

9-
from agentle.assistants.assistant_input_type import AssistantInputType
9+
from agentle.autonomous_systems.agent_input_type import AgentInputType
1010
from agentle.prompts.models.prompt import Prompt
1111
from agentle.responses.definitions.conversation_param import ConversationParam
1212
from agentle.responses.definitions.include_enum import IncludeEnum
@@ -21,13 +21,12 @@
2121
from agentle.responses.definitions.tool_choice_function import ToolChoiceFunction
2222
from agentle.responses.definitions.tool_choice_mcp import ToolChoiceMCP
2323
from agentle.responses.definitions.tool_choice_options import ToolChoiceOptions
24-
2524
from agentle.responses.definitions.tool_choice_types import ToolChoiceTypes
2625
from agentle.responses.definitions.truncation import Truncation
2726
from agentle.responses.responder import Responder
2827

2928

30-
class Assistant[ResponseSchema = None](BaseModel):
29+
class Agent[ResponseSchema = None](BaseModel):
3130
created_at: int = Field(
3231
default=int(time.time()),
3332
description="""The time the assistant was created.""",
@@ -91,21 +90,34 @@ class Assistant[ResponseSchema = None](BaseModel):
9190
)
9291

9392
include: Optional[list[IncludeEnum]] = None
93+
9494
parallel_tool_calls: Optional[bool] = None
95+
9596
store: Optional[bool] = None
97+
9698
instructions: Optional[str | Prompt] = None
9799

98100
stream_options: Optional[ResponseStreamOptions] = None
101+
99102
conversation: Optional[str | ConversationParam] = None
103+
100104
text_format: type[ResponseSchema] | None = None
105+
101106
# ResponseProperties parameters
102107
previous_response_id: Optional[str] = None
108+
103109
reasoning: Optional[Reasoning] = None
110+
104111
background: Optional[bool] = None
112+
105113
max_output_tokens: Optional[int] = None
114+
106115
max_tool_calls: Optional[int] = None
116+
107117
text: Optional[Text] = None
118+
108119
tools: Optional[Sequence[Tool | Callable[..., Any]]] = None
120+
109121
tool_choice: Optional[
110122
ToolChoiceOptions
111123
| ToolChoiceAllowed
@@ -114,17 +126,24 @@ class Assistant[ResponseSchema = None](BaseModel):
114126
| ToolChoiceMCP
115127
| ToolChoiceCustom
116128
] = None
129+
117130
prompt: Optional[Prompt] = None
131+
118132
truncation: Optional[Truncation] = None
133+
119134
# ModelResponseProperties parameters
120135
top_logprobs: Optional[int] = None
136+
121137
user: Optional[str] = None
138+
122139
safety_identifier: Optional[str] = None
140+
123141
prompt_cache_key: Optional[str] = None
142+
124143
service_tier: Optional[ServiceTier] = None
125144

126145
async def execute_async(
127146
self,
128-
input: AssistantInputType,
147+
input: AgentInputType,
129148
stream: Optional[Literal[False] | Literal[True]] = None,
130149
) -> Response[ResponseSchema]: ...

agentle/assistants/assistant_input_type.py renamed to agentle/autonomous_systems/agent_input_type.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@
44
from agentle.responses.definitions.input_item import InputItem
55

66

7-
AssistantInputType = Optional[str | list[InputItem] | Prompt]
7+
AgentInputType = Optional[str | list[InputItem] | Prompt]

agentle/responses/definitions/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@
7373

7474
# from .drag_point import DragPoint
7575

76-
# from .easy_input_message import EasyInputMessage
76+
# from .message import Message
7777

7878
# from .file_citation_body import FileCitationBody
7979

@@ -774,7 +774,7 @@
774774
# 'DoubleClickAction',
775775
# 'Drag',
776776
# 'DragPoint',
777-
# 'EasyInputMessage',
777+
# 'Message',
778778
# 'FileCitationBody',
779779
# 'FilePath',
780780
# 'FileSearchTool',

agentle/responses/definitions/easy_input_message.py

Lines changed: 0 additions & 30 deletions
This file was deleted.

agentle/responses/definitions/input_item.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313

1414
# Model dependencies
15-
from .easy_input_message import EasyInputMessage
15+
from .message import Message
1616
from .item import Item
1717
from .item_reference_param import ItemReferenceParam
1818

@@ -22,6 +22,6 @@
2222
# Using Item without generic parameter since it's an Annotated type alias
2323
# The generic type handling is done at the Item level
2424
InputItem = Annotated[
25-
Union[EasyInputMessage, Item, ItemReferenceParam],
25+
Union[Message, Item, ItemReferenceParam],
2626
Field(discriminator="type"),
2727
]
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
# Auto-generated from responses_api.py
2+
# Model: Message
3+
4+
# generated by datamodel-codegen:
5+
# filename: filtered_openapi.yaml
6+
# timestamp: 2025-10-18T15:02:20+00:00
7+
8+
from __future__ import annotations
9+
10+
from typing import Literal
11+
12+
from pydantic import BaseModel, Field
13+
14+
from agentle.responses.definitions.message_content_type import MessageContentType
15+
16+
from .role import Role
17+
18+
19+
class Message(BaseModel):
20+
role: Role = Field(
21+
...,
22+
description="The role of the message input. One of `user`, `assistant`, `system`, or\n`developer`.\n",
23+
)
24+
content: MessageContentType = Field(
25+
...,
26+
description="Text, image, or audio input to the model, used to generate a response.\nCan also contain previous assistant responses.\n",
27+
)
28+
type: Literal["input_message"] = Field(
29+
default="input_message",
30+
description="The type of the message input. Always `message`.\n",
31+
)
32+
33+
@classmethod
34+
def user(cls, content: MessageContentType) -> Message:
35+
return Message(role="user", content=content)
36+
37+
@classmethod
38+
def assistant(cls, content: MessageContentType) -> Message:
39+
return Message(role="assistant", content=content)
40+
41+
@classmethod
42+
def system(cls, content: MessageContentType) -> Message:
43+
return Message(role="system", content=content)
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
from typing import Union
2+
3+
from .input_message_content_list import InputMessageContentList
4+
5+
MessageContentType = Union[str, InputMessageContentList]

agentle/responses/definitions/role.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,7 @@
66
# timestamp: 2025-10-18T15:02:20+00:00
77

88

9-
from enum import Enum
9+
from typing import Literal
1010

1111

12-
class Role(Enum):
13-
user = "user"
14-
assistant = "assistant"
15-
system = "system"
16-
developer = "developer"
12+
Role = Literal["user", "assistant", "system"]

agentle/responses/responder.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from rsb.models.field import Field
1212

1313
from agentle.generations.tracing.otel_client_type import OtelClientType
14-
from agentle.prompts.models.prompt import Prompt as PromptModel
14+
from agentle.prompts.models.prompt import Prompt as AgentlePromptType
1515
from agentle.responses.async_stream import AsyncStream
1616
from agentle.responses.definitions.conversation_param import ConversationParam
1717
from agentle.responses.definitions.create_response import CreateResponse
@@ -73,12 +73,12 @@ def from_openai(cls, api_key: str | None = None) -> Responder:
7373
async def respond_async[TextFormatT = None](
7474
self,
7575
*,
76-
input: Optional[Union[str, list[InputItem], PromptModel]] = None,
76+
input: Optional[Union[str, list[InputItem], AgentlePromptType]] = None,
7777
model: Optional[str] = None,
7878
include: Optional[list[IncludeEnum]] = None,
7979
parallel_tool_calls: Optional[bool] = None,
8080
store: Optional[bool] = None,
81-
instructions: Optional[Union[str, PromptModel]] = None,
81+
instructions: Optional[Union[str, AgentlePromptType]] = None,
8282
stream: Optional[Literal[False]] = False,
8383
stream_options: Optional[ResponseStreamOptions] = None,
8484
conversation: Optional[Union[str, ConversationParam]] = None,
@@ -118,12 +118,12 @@ async def respond_async[TextFormatT = None](
118118
async def respond_async[TextFormatT = None](
119119
self,
120120
*,
121-
input: Optional[Union[str, list[InputItem], PromptModel]] = None,
121+
input: Optional[Union[str, list[InputItem], AgentlePromptType]] = None,
122122
model: Optional[str] = None,
123123
include: Optional[list[IncludeEnum]] = None,
124124
parallel_tool_calls: Optional[bool] = None,
125125
store: Optional[bool] = None,
126-
instructions: Optional[Union[str, PromptModel]] = None,
126+
instructions: Optional[Union[str, AgentlePromptType]] = None,
127127
stream: Literal[True],
128128
stream_options: Optional[ResponseStreamOptions] = None,
129129
conversation: Optional[Union[str, ConversationParam]] = None,
@@ -163,12 +163,12 @@ async def respond_async[TextFormatT = None](
163163
async def respond_async[TextFormatT = None](
164164
self,
165165
*,
166-
input: Optional[Union[str, list[InputItem], PromptModel]] = None,
166+
input: Optional[Union[str, list[InputItem], AgentlePromptType]] = None,
167167
model: Optional[str] = None,
168168
include: Optional[list[IncludeEnum]] = None,
169169
parallel_tool_calls: Optional[bool] = None,
170170
store: Optional[bool] = None,
171-
instructions: Optional[Union[str, PromptModel]] = None,
171+
instructions: Optional[Union[str, AgentlePromptType]] = None,
172172
stream: bool,
173173
stream_options: Optional[ResponseStreamOptions] = None,
174174
conversation: Optional[Union[str, ConversationParam]] = None,
@@ -207,12 +207,12 @@ async def respond_async[TextFormatT = None](
207207
async def respond_async[TextFormatT = None](
208208
self,
209209
*,
210-
input: Optional[Union[str, list[InputItem], PromptModel]] = None,
210+
input: Optional[Union[str, list[InputItem], AgentlePromptType]] = None,
211211
model: Optional[str] = None,
212212
include: Optional[list[IncludeEnum]] = None,
213213
parallel_tool_calls: Optional[bool] = None,
214214
store: Optional[bool] = None,
215-
instructions: Optional[Union[str, PromptModel]] = None,
215+
instructions: Optional[Union[str, AgentlePromptType]] = None,
216216
stream: Optional[Literal[False] | Literal[True]] = None,
217217
stream_options: Optional[ResponseStreamOptions] = None,
218218
conversation: Optional[Union[str, ConversationParam]] = None,
@@ -256,13 +256,13 @@ async def respond_async[TextFormatT = None](
256256
_tools.append(tool)
257257

258258
create_response = CreateResponse(
259-
input=str(input) if isinstance(input, PromptModel) else input,
259+
input=str(input) if isinstance(input, AgentlePromptType) else input,
260260
model=model,
261261
include=include,
262262
parallel_tool_calls=parallel_tool_calls,
263263
store=store,
264264
instructions=str(instructions)
265-
if isinstance(instructions, PromptModel)
265+
if isinstance(instructions, AgentlePromptType)
266266
else instructions,
267267
stream=stream,
268268
stream_options=stream_options,

0 commit comments

Comments
 (0)