Skip to content

Commit b79a67c

Browse files
carenthomascliandykl2806sarahwoodersjnjpng
authored
chore: bump version 0.7.19 (#2643)
Co-authored-by: Andy Li <[email protected]> Co-authored-by: Kevin Lin <[email protected]> Co-authored-by: Sarah Wooders <[email protected]> Co-authored-by: jnjpng <[email protected]>
1 parent b268861 commit b79a67c

File tree

6 files changed

+10
-15
lines changed

6 files changed

+10
-15
lines changed

letta/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "0.7.18"
1+
__version__ = "0.7.19"
22

33
# import clients
44
from letta.client.client import LocalClient, RESTClient, create_client

letta/groups/sleeptime_multi_agent_v2.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,7 @@ async def _participant_agent_step(
231231
# Update job status
232232
job_update = JobUpdate(
233233
status=JobStatus.completed,
234-
completed_at=datetime.now(timezone.utc),
234+
completed_at=datetime.now(timezone.utc).replace(tzinfo=None),
235235
metadata={
236236
"result": result.model_dump(mode="json"),
237237
"agent_id": sleeptime_agent_id,
@@ -242,7 +242,7 @@ async def _participant_agent_step(
242242
except Exception as e:
243243
job_update = JobUpdate(
244244
status=JobStatus.failed,
245-
completed_at=datetime.now(timezone.utc),
245+
completed_at=datetime.now(timezone.utc).replace(tzinfo=None),
246246
metadata={"error": str(e)},
247247
)
248248
self.job_manager.update_job_by_id(job_id=run_id, job_update=job_update, actor=self.actor)

letta/llm_api/google_vertex_client.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -235,6 +235,8 @@ def convert_response_to_chat_completion(
235235
)
236236

237237
except json.decoder.JSONDecodeError:
238+
if candidate.finish_reason == "MAX_TOKENS":
239+
raise ValueError(f"Could not parse response data from LLM: exceeded max token limit")
238240
# Inner thoughts are the content by default
239241
inner_thoughts = response_message.text
240242

letta/services/job_manager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def update_job_by_id(self, job_id: str, job_update: JobUpdate, actor: PydanticUs
7272
setattr(job, key, value)
7373

7474
if update_data.get("status") == JobStatus.completed and not job.completed_at:
75-
job.completed_at = get_utc_time()
75+
job.completed_at = get_utc_time().replace(tzinfo=None)
7676
if job.callback_url:
7777
self._dispatch_callback(session, job)
7878

@@ -96,7 +96,7 @@ async def update_job_by_id_async(self, job_id: str, job_update: JobUpdate, actor
9696
setattr(job, key, value)
9797

9898
if update_data.get("status") == JobStatus.completed and not job.completed_at:
99-
job.completed_at = get_utc_time()
99+
job.completed_at = get_utc_time().replace(tzinfo=None)
100100
if job.callback_url:
101101
await self._dispatch_callback_async(session, job)
102102

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "letta"
3-
version = "0.7.18"
3+
version = "0.7.19"
44
packages = [
55
{include = "letta"},
66
]

tests/integration_test_batch_api_cron_jobs.py

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
BetaMessageBatchSucceededResult,
1717
)
1818
from dotenv import load_dotenv
19-
from letta_client import Letta
2019

2120
from letta.config import LettaConfig
2221
from letta.helpers import ToolRulesSolver
@@ -75,12 +74,6 @@ def server():
7574
return SyncServer()
7675

7776

78-
@pytest.fixture(scope="session")
79-
def client(server_url):
80-
"""Creates a REST client for testing."""
81-
return Letta(base_url=server_url)
82-
83-
8477
# --- Dummy Response Factories --- #
8578

8679

@@ -263,7 +256,7 @@ async def dummy_results(batch_resp_id: str):
263256
# End-to-End Test
264257
# -----------------------------
265258
@pytest.mark.asyncio(loop_scope="session")
266-
async def test_polling_simple_real_batch(client, default_user, server):
259+
async def test_polling_simple_real_batch(default_user, server):
267260
# --- Step 1: Prepare test data ---
268261
# Create batch responses with different statuses
269262
# NOTE: This is a REAL batch id!
@@ -404,7 +397,7 @@ async def test_polling_simple_real_batch(client, default_user, server):
404397

405398

406399
@pytest.mark.asyncio(loop_scope="session")
407-
async def test_polling_mixed_batch_jobs(client, default_user, server):
400+
async def test_polling_mixed_batch_jobs(default_user, server):
408401
"""
409402
End-to-end test for polling batch jobs with mixed statuses and idempotency.
410403

0 commit comments

Comments
 (0)