(.venv) C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent>uipath run agent "{""topic"": ""test""}" ▶ START HTTP Request: GET https://cloud.uipath.com/ABeamRPA/AB_DXCOE/agenthub_/llm/api/capabilities "HTTP/1.1 200 OK" HTTP Request: POST https://cloud.uipath.com/ABeamRPA/AB_DXCOE/agenthub_/llm/api/chat/completions "HTTP/1.1 417 Expectation Failed" ❌ Error: Error code: 417 - {'StatusCode': 417, 'StatusDescription': 'ExpectationFailed', 'Message': 'No llm routing rule found for product agenthubb in SI using model gpt-5-2025-08-07 - check data residency configuration.', 'Description': '', 'TraceId': '00-b42c4c930cbde5a04f13ba39314924bf-5f0a8ad6a08a20cf-00', 'ErrorCode': '1040'} Traceback (most recent call last): File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\uipath_langchain\_utils\_request_mixin.py", line 257, in _arequest response.raise_for_status() File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\httpx\_models.py", line 829, in raise_for_status raise HTTPStatusError(message, request=request, response=self) httpx.HTTPStatusError: Client error '417 Expectation Failed' for url 'https://cloud.uipath.com/ABeamRPA/AB_DXCOE/agenthub_/llm/api/chat/completions' For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/417 The above exception was the direct cause of the following exception: Traceback (most recent call last): File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\uipath_langchain\_cli\_runtime\_runtime.py", line 149, in stream async for stream_chunk in compiled_graph.astream( File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langgraph\pregel\main.py", line 2993, in astream async for _ in runner.atick( File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langgraph\pregel\_runner.py", line 401, in atick _panic_or_proceed( File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langgraph\pregel\_runner.py", line 511, in _panic_or_proceed raise exc File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langgraph\pregel\_retry.py", line 137, in arun_with_retry return await task.proc.ainvoke(task.input, config) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langgraph\_internal\_runnable.py", line 706, in ainvoke input = await asyncio.create_task( ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langgraph\_internal\_runnable.py", line 474, in ainvoke ret = await self.afunc(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\main.py", line 19, in generate_report output = await llm.ainvoke( ^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 417, in ainvoke llm_result = await self.agenerate_prompt( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 1036, in agenerate_prompt return await self.agenerate( ^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 994, in agenerate raise exceptions[0] File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 1153, in _agenerate_with_cache async for chunk in self._astream(messages, stop=stop, **kwargs): File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\uipath_langchain\chat\models.py", line 337, in _astream response = await self._acall(self.url, payload, self.auth_headers) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\uipath_langchain\_utils\_request_mixin.py", line 331, in _acall raise err File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\uipath_langchain\_utils\_request_mixin.py", line 299, in _acall response: Any = await retryer(self._arequest, url, request_body, headers) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\tenacity\asyncio\__init__.py", line 111, in __call__ do = await self.iter(retry_state=retry_state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\tenacity\asyncio\__init__.py", line 153, in iter result = await action(retry_state) ^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\tenacity\_utils.py", line 99, in inner return call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\tenacity\__init__.py", line 400, in self._add_action_func(lambda rs: rs.outcome.result()) ^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\AppData\Local\Programs\Python\Python312\Lib\concurrent\futures\_base.py", line 449, in result return self.__get_result() ^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\AppData\Local\Programs\Python\Python312\Lib\concurrent\futures\_base.py", line 401, in __get_result raise self._exception File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\tenacity\asyncio\__init__.py", line 114, in __call__ result = await fn(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\weng\OneDrive - ABeam Consulting Ltd\Desktop\aiagent\.venv\Lib\site-packages\uipath_langchain\_utils\_request_mixin.py", line 271, in _arequest raise self._make_status_error_from_response(err.response) from err openai.APIStatusError: Error code: 417 - {'StatusCode': 417, 'StatusDescription': 'ExpectationFailed', 'Message': 'No llm routing rule found for product agenthub in SI using model gpt-5-2025-08-07 - check data residency configuration.', 'Description': '', 'TraceId': '00-b42c4c930cbde5a04f13ba39314924bf-5f0a8ad6a08a20cf-00', 'ErrorCode': '1040'} During task with name 'generate_report' and id '3a9c0f76-0f19-bac8-4e9f-cf2ed8b562ba'