diff --git a/pyproject.toml b/pyproject.toml index b5001646..1c0f4e2b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ classifiers = [ keywords = ["apify", "api", "client", "automation", "crawling", "scraping"] dependencies = [ "apify-shared>=1.4.1", + "colorama~=0.4.0", "httpx>=0.25", "more_itertools>=10.0.0", ] @@ -52,6 +53,7 @@ dev = [ "respx~=0.22.0", "ruff~=0.11.0", "setuptools", # setuptools are used by pytest but not explicitly required + "types-colorama~=0.4.15.20240106", ] [tool.hatch.build.targets.wheel] diff --git a/src/apify_client/_logging.py b/src/apify_client/_logging.py index 86a8528f..e2f08b6f 100644 --- a/src/apify_client/_logging.py +++ b/src/apify_client/_logging.py @@ -7,6 +7,8 @@ from contextvars import ContextVar from typing import TYPE_CHECKING, Any, Callable, NamedTuple +from colorama import Fore, Style + # Conditional import only executed when type checking, otherwise we'd get circular dependency issues if TYPE_CHECKING: from apify_client.clients.base.base_client import _BaseBaseClient @@ -120,3 +122,47 @@ def format(self, record: logging.LogRecord) -> str: if extra: log_string = f'{log_string} ({json.dumps(extra)})' return log_string + + +def create_redirect_logger( + name: str, +) -> logging.Logger: + """Create a logger for redirecting logs from another Actor. + + Args: + name: The name of the logger. It can be used to inherit from other loggers. Example: `apify.xyz` will use logger + named `xyz` and make it a children of `apify` logger. + + Returns: + The created logger. + """ + to_logger = logging.getLogger(name) + to_logger.propagate = False + + # Remove filters and handlers in case this logger already exists and was set up in some way. + for handler in to_logger.handlers: + to_logger.removeHandler(handler) + for log_filter in to_logger.filters: + to_logger.removeFilter(log_filter) + + handler = logging.StreamHandler() + handler.setFormatter(RedirectLogFormatter()) + to_logger.addHandler(handler) + to_logger.setLevel(logging.DEBUG) + return to_logger + + +class RedirectLogFormatter(logging.Formatter): + """Formater applied to default redirect logger.""" + + def format(self, record: logging.LogRecord) -> str: + """Format the log by prepending logger name to the original message. + + Args: + record: Log record to be formated. + + Returns: + Formated log message. + """ + formated_logger_name = f'{Fore.CYAN}[{record.name}]{Style.RESET_ALL} ' + return f'{formated_logger_name}-> {record.msg}' diff --git a/src/apify_client/clients/resource_clients/actor.py b/src/apify_client/clients/resource_clients/actor.py index e88d1078..8adcd1ab 100644 --- a/src/apify_client/clients/resource_clients/actor.py +++ b/src/apify_client/clients/resource_clients/actor.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Literal from apify_shared.utils import ( filter_out_none_values_recursively, @@ -27,6 +27,7 @@ if TYPE_CHECKING: from decimal import Decimal + from logging import Logger from apify_shared.consts import ActorJobStatus, MetaOrigin @@ -289,6 +290,7 @@ def call( timeout_secs: int | None = None, webhooks: list[dict] | None = None, wait_secs: int | None = None, + logger: Logger | None | Literal['default'] = 'default', ) -> dict | None: """Start the Actor and wait for it to finish before returning the Run object. @@ -313,6 +315,9 @@ def call( a webhook set up for the Actor, you do not have to add it again here. wait_secs: The maximum number of seconds the server waits for the run to finish. If not provided, waits indefinitely. + logger: Logger used to redirect logs from the Actor run. Using "default" literal means that a predefined + default logger will be used. Setting `None` will disable any log propagation. Passing custom logger + will redirect logs to the provided logger. Returns: The run object. @@ -327,8 +332,17 @@ def call( timeout_secs=timeout_secs, webhooks=webhooks, ) + if not logger: + return self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) - return self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + run_client = self.root_client.run(run_id=started_run['id']) + if logger == 'default': + log_context = run_client.get_streamed_log() + else: + log_context = run_client.get_streamed_log(to_logger=logger) + + with log_context: + return self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) def build( self, @@ -681,6 +695,7 @@ async def call( timeout_secs: int | None = None, webhooks: list[dict] | None = None, wait_secs: int | None = None, + logger: Logger | None | Literal['default'] = 'default', ) -> dict | None: """Start the Actor and wait for it to finish before returning the Run object. @@ -705,6 +720,9 @@ async def call( a webhook set up for the Actor, you do not have to add it again here. wait_secs: The maximum number of seconds the server waits for the run to finish. If not provided, waits indefinitely. + logger: Logger used to redirect logs from the Actor run. Using "default" literal means that a predefined + default logger will be used. Setting `None` will disable any log propagation. Passing custom logger + will redirect logs to the provided logger. Returns: The run object. @@ -720,7 +738,17 @@ async def call( webhooks=webhooks, ) - return await self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + if not logger: + return await self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) + + run_client = self.root_client.run(run_id=started_run['id']) + if logger == 'default': + log_context = await run_client.get_streamed_log() + else: + log_context = await run_client.get_streamed_log(to_logger=logger) + + async with log_context: + return await self.root_client.run(started_run['id']).wait_for_finish(wait_secs=wait_secs) async def build( self, diff --git a/src/apify_client/clients/resource_clients/log.py b/src/apify_client/clients/resource_clients/log.py index 62883634..3b671d7a 100644 --- a/src/apify_client/clients/resource_clients/log.py +++ b/src/apify_client/clients/resource_clients/log.py @@ -1,7 +1,14 @@ from __future__ import annotations +import asyncio +import logging +import re +import threading +from asyncio import Task from contextlib import asynccontextmanager, contextmanager -from typing import TYPE_CHECKING, Any +from datetime import datetime, timezone +from threading import Thread +from typing import TYPE_CHECKING, Any, cast from apify_shared.utils import ignore_docs @@ -11,8 +18,10 @@ if TYPE_CHECKING: from collections.abc import AsyncIterator, Iterator + from types import TracebackType import httpx + from typing_extensions import Self class LogClient(ResourceClient): @@ -23,11 +32,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'logs') super().__init__(*args, resource_path=resource_path, **kwargs) - def get(self) -> str | None: + def get(self, *, raw: bool = False) -> str | None: """Retrieve the log as text. https://docs.apify.com/api/v2#/reference/logs/log/get-log + Args: + raw: If true, the log will include formating. For example, coloring character sequences. + Returns: The retrieved log, or None, if it does not exist. """ @@ -35,7 +47,7 @@ def get(self) -> str | None: response = self.http_client.call( url=self.url, method='GET', - params=self._params(), + params=self._params(raw=raw), ) return response.text # noqa: TRY300 @@ -45,11 +57,14 @@ def get(self) -> str | None: return None - def get_as_bytes(self) -> bytes | None: + def get_as_bytes(self, *, raw: bool = False) -> bytes | None: """Retrieve the log as raw bytes. https://docs.apify.com/api/v2#/reference/logs/log/get-log + Args: + raw: If true, the log will include formating. For example, coloring character sequences. + Returns: The retrieved log as raw bytes, or None, if it does not exist. """ @@ -57,7 +72,7 @@ def get_as_bytes(self) -> bytes | None: response = self.http_client.call( url=self.url, method='GET', - params=self._params(), + params=self._params(raw=raw), parse_response=False, ) @@ -69,11 +84,14 @@ def get_as_bytes(self) -> bytes | None: return None @contextmanager - def stream(self) -> Iterator[httpx.Response | None]: + def stream(self, *, raw: bool = False) -> Iterator[httpx.Response | None]: """Retrieve the log as a stream. https://docs.apify.com/api/v2#/reference/logs/log/get-log + Args: + raw: If true, the log will include formating. For example, coloring character sequences. + Returns: The retrieved log as a context-managed streaming `Response`, or None, if it does not exist. """ @@ -82,7 +100,7 @@ def stream(self) -> Iterator[httpx.Response | None]: response = self.http_client.call( url=self.url, method='GET', - params=self._params(stream=True), + params=self._params(stream=True, raw=raw), stream=True, parse_response=False, ) @@ -104,11 +122,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: resource_path = kwargs.pop('resource_path', 'logs') super().__init__(*args, resource_path=resource_path, **kwargs) - async def get(self) -> str | None: + async def get(self, *, raw: bool = False) -> str | None: """Retrieve the log as text. https://docs.apify.com/api/v2#/reference/logs/log/get-log + Args: + raw: If true, the log will include formating. For example, coloring character sequences. + Returns: The retrieved log, or None, if it does not exist. """ @@ -116,7 +137,7 @@ async def get(self) -> str | None: response = await self.http_client.call( url=self.url, method='GET', - params=self._params(), + params=self._params(raw=raw), ) return response.text # noqa: TRY300 @@ -126,11 +147,14 @@ async def get(self) -> str | None: return None - async def get_as_bytes(self) -> bytes | None: + async def get_as_bytes(self, *, raw: bool = False) -> bytes | None: """Retrieve the log as raw bytes. https://docs.apify.com/api/v2#/reference/logs/log/get-log + Args: + raw: If true, the log will include formating. For example, coloring character sequences. + Returns: The retrieved log as raw bytes, or None, if it does not exist. """ @@ -138,7 +162,7 @@ async def get_as_bytes(self) -> bytes | None: response = await self.http_client.call( url=self.url, method='GET', - params=self._params(), + params=self._params(raw=raw), parse_response=False, ) @@ -150,11 +174,14 @@ async def get_as_bytes(self) -> bytes | None: return None @asynccontextmanager - async def stream(self) -> AsyncIterator[httpx.Response | None]: + async def stream(self, *, raw: bool = False) -> AsyncIterator[httpx.Response | None]: """Retrieve the log as a stream. https://docs.apify.com/api/v2#/reference/logs/log/get-log + Args: + raw: If true, the log will include formating. For example, coloring character sequences. + Returns: The retrieved log as a context-managed streaming `Response`, or None, if it does not exist. """ @@ -163,7 +190,7 @@ async def stream(self) -> AsyncIterator[httpx.Response | None]: response = await self.http_client.call( url=self.url, method='GET', - params=self._params(stream=True), + params=self._params(stream=True, raw=raw), stream=True, parse_response=False, ) @@ -175,3 +202,179 @@ async def stream(self) -> AsyncIterator[httpx.Response | None]: finally: if response: await response.aclose() + + +class StreamedLog: + """Utility class for streaming logs from another Actor. + + It uses buffer to deal with possibly chunked logs. Chunked logs are stored in buffer. Chunks are expected to contain + specific markers that indicate the start of the log message. Each time a new chunk with complete split marker + arrives, the buffer is processed, logged and emptied. + + This works only if the logs have datetime marker in ISO format. For example, `2025-05-12T15:35:59.429Z` This is the + default log standard for the actors. + """ + + # Test related flag to enable propagation of logs to the `caplog` fixture during tests. + _force_propagate = False + + def __init__(self, to_logger: logging.Logger, *, from_start: bool = True) -> None: + """Initialize `StreamedLog`. + + Args: + to_logger: The logger to which the logs will be redirected. + from_start: If `True`, all logs from the start of the actor run will be redirected. If `False`, only newly + arrived logs will be redirected. This can be useful for redirecting only a small portion of relevant + logs for long-running actors in stand-by. + + """ + self._to_logger = to_logger + if self._force_propagate: + to_logger.propagate = True + self._stream_buffer = list[bytes]() + self._split_marker = re.compile(rb'(?:\n|^)(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z)') + self._relevancy_time_limit: datetime | None = None if from_start else datetime.now(tz=timezone.utc) + + def _process_new_data(self, data: bytes) -> None: + new_chunk = data + self._stream_buffer.append(new_chunk) + if re.findall(self._split_marker, new_chunk): + # If complete split marker was found in new chunk, then log the buffer. + self._log_buffer_content(include_last_part=False) + + def _log_buffer_content(self, *, include_last_part: bool = False) -> None: + """Merge the whole buffer and split it into parts based on the marker. + + Log the messages created from the split parts and remove them from buffer. + The last part could be incomplete, and so it can be left unprocessed in the buffer until later. + """ + all_parts = re.split(self._split_marker, b''.join(self._stream_buffer))[1:] # The First split is empty + if include_last_part: + message_markers = all_parts[0::2] + message_contents = all_parts[1::2] + self._stream_buffer = [] + else: + message_markers = all_parts[0:-2:2] + message_contents = all_parts[1:-2:2] + # The last two parts (marker and message) are possibly not complete and will be left in the buffer + self._stream_buffer = all_parts[-2:] + + for marker, content in zip(message_markers, message_contents): + decoded_marker = marker.decode('utf-8') + decoded_content = content.decode('utf-8') + if self._relevancy_time_limit: + log_time = datetime.fromisoformat(decoded_marker.replace('Z', '+00:00')) + if log_time < self._relevancy_time_limit: + # Skip irrelevant logs + continue + message = decoded_marker + decoded_content + self._to_logger.log(level=self._guess_log_level_from_message(message), msg=message.strip()) + + @staticmethod + def _guess_log_level_from_message(message: str) -> int: + """Guess the log level from the message.""" + # Using only levels explicitly mentioned in the logging module + known_levels = ('CRITICAL', 'FATAL', 'ERROR', 'WARN', 'WARNING', 'INFO', 'DEBUG', 'NOTSET') + for level in known_levels: + if level in message: + # `getLevelName` returns an `int` when string is passed as input. + return cast('int', logging.getLevelName(level)) + # Unknown log level. Fall back to the default. + return logging.INFO + + +class StreamedLogSync(StreamedLog): + """Sync variant of `StreamedLog` that is logging in threads.""" + + def __init__(self, log_client: LogClient, *, to_logger: logging.Logger, from_start: bool = True) -> None: + super().__init__(to_logger=to_logger, from_start=from_start) + self._log_client = log_client + self._streaming_thread: Thread | None = None + self._stop_logging = False + + def start(self) -> Thread: + """Start the streaming thread. The caller has to handle any cleanup by manually calling the `stop` method.""" + if self._streaming_thread: + raise RuntimeError('Streaming thread already active') + self._stop_logging = False + self._streaming_thread = threading.Thread(target=self._stream_log) + self._streaming_thread.start() + return self._streaming_thread + + def stop(self) -> None: + """Signal the streaming thread to stop logging and wait for it to finish.""" + if not self._streaming_thread: + raise RuntimeError('Streaming thread is not active') + self._stop_logging = True + self._streaming_thread.join() + self._streaming_thread = None + self._stop_logging = False + + def __enter__(self) -> Self: + """Start the streaming thread within the context. Exiting the context will finish the streaming thread.""" + self.start() + return self + + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: + """Stop the streaming thread.""" + self.stop() + + def _stream_log(self) -> None: + with self._log_client.stream(raw=True) as log_stream: + if not log_stream: + return + for data in log_stream.iter_bytes(): + self._process_new_data(data) + if self._stop_logging: + break + + # If the stream is finished, then the last part will be also processed. + self._log_buffer_content(include_last_part=True) + return + + +class StreamedLogAsync(StreamedLog): + """Async variant of `StreamedLog` that is logging in tasks.""" + + def __init__(self, log_client: LogClientAsync, *, to_logger: logging.Logger, from_start: bool = True) -> None: + super().__init__(to_logger=to_logger, from_start=from_start) + self._log_client = log_client + self._streaming_task: Task | None = None + + def start(self) -> Task: + """Start the streaming task. The caller has to handle any cleanup by manually calling the `stop` method.""" + if self._streaming_task: + raise RuntimeError('Streaming task already active') + self._streaming_task = asyncio.create_task(self._stream_log()) + return self._streaming_task + + def stop(self) -> None: + """Stop the streaming task.""" + if not self._streaming_task: + raise RuntimeError('Streaming task is not active') + + self._streaming_task.cancel() + self._streaming_task = None + + async def __aenter__(self) -> Self: + """Start the streaming task within the context. Exiting the context will cancel the streaming task.""" + self.start() + return self + + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: + """Cancel the streaming task.""" + self.stop() + + async def _stream_log(self) -> None: + async with self._log_client.stream(raw=True) as log_stream: + if not log_stream: + return + async for data in log_stream.aiter_bytes(): + self._process_new_data(data) + + # If the stream is finished, then the last part will be also processed. + self._log_buffer_content(include_last_part=True) diff --git a/src/apify_client/clients/resource_clients/run.py b/src/apify_client/clients/resource_clients/run.py index 0ed0c412..3297c36f 100644 --- a/src/apify_client/clients/resource_clients/run.py +++ b/src/apify_client/clients/resource_clients/run.py @@ -8,14 +8,21 @@ from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, parse_date_fields +from apify_client._logging import create_redirect_logger from apify_client._utils import encode_key_value_store_record_value, pluck_data, to_safe_id from apify_client.clients.base import ActorJobBaseClient, ActorJobBaseClientAsync from apify_client.clients.resource_clients.dataset import DatasetClient, DatasetClientAsync from apify_client.clients.resource_clients.key_value_store import KeyValueStoreClient, KeyValueStoreClientAsync -from apify_client.clients.resource_clients.log import LogClient, LogClientAsync +from apify_client.clients.resource_clients.log import ( + LogClient, + LogClientAsync, + StreamedLogAsync, + StreamedLogSync, +) from apify_client.clients.resource_clients.request_queue import RequestQueueClient, RequestQueueClientAsync if TYPE_CHECKING: + import logging from decimal import Decimal from apify_shared.consts import RunGeneralAccess @@ -248,6 +255,33 @@ def log(self) -> LogClient: **self._sub_resource_init_options(resource_path='log'), ) + def get_streamed_log(self, to_logger: logging.Logger | None = None, *, from_start: bool = True) -> StreamedLogSync: + """Get `StreamedLog` instance that can be used to redirect logs. + + `StreamedLog` can be directly called or used as a context manager. + + Args: + to_logger: `Logger` used for logging the redirected messages. If not provided, a new logger is created + from_start: If `True`, all logs from the start of the actor run will be redirected. If `False`, only newly + arrived logs will be redirected. This can be useful for redirecting only a small portion of relevant + logs for long-running actors in stand-by. + + Returns: + `StreamedLog` instance for redirected logs. + """ + run_data = self.get() + run_id = run_data.get('id', '') if run_data else '' + + actor_id = run_data.get('actId', '') if run_data else '' + actor_data = self.root_client.actor(actor_id=actor_id).get() or {} + actor_name = actor_data.get('name', '') if run_data else '' + + if not to_logger: + name = '-'.join(part for part in (actor_name, run_id) if part) + to_logger = create_redirect_logger(f'apify.{name}') + + return StreamedLogSync(log_client=self.log(), to_logger=to_logger, from_start=from_start) + def charge( self, event_name: str, @@ -515,6 +549,35 @@ def log(self) -> LogClientAsync: **self._sub_resource_init_options(resource_path='log'), ) + async def get_streamed_log( + self, to_logger: logging.Logger | None = None, *, from_start: bool = True + ) -> StreamedLogAsync: + """Get `StreamedLog` instance that can be used to redirect logs. + + `StreamedLog` can be directly called or used as a context manager. + + Args: + to_logger: `Logger` used for logging the redirected messages. If not provided, a new logger is created + from_start: If `True`, all logs from the start of the actor run will be redirected. If `False`, only newly + arrived logs will be redirected. This can be useful for redirecting only a small portion of relevant + logs for long-running actors in stand-by. + + Returns: + `StreamedLog` instance for redirected logs. + """ + run_data = await self.get() + run_id = run_data.get('id', '') if run_data else '' + + actor_id = run_data.get('actId', '') if run_data else '' + actor_data = await self.root_client.actor(actor_id=actor_id).get() or {} + actor_name = actor_data.get('name', '') if run_data else '' + + if not to_logger: + name = '-'.join(part for part in (actor_name, run_id) if part) + to_logger = create_redirect_logger(f'apify.{name}') + + return StreamedLogAsync(log_client=self.log(), to_logger=to_logger, from_start=from_start) + async def charge( self, event_name: str, diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py new file mode 100644 index 00000000..c77c75a0 --- /dev/null +++ b/tests/unit/test_logging.py @@ -0,0 +1,334 @@ +import asyncio +import json +import logging +import time +from collections.abc import AsyncIterator, Iterator +from datetime import datetime +from unittest.mock import patch + +import httpx +import pytest +import respx +from _pytest.logging import LogCaptureFixture +from apify_shared.consts import ActorJobStatus + +from apify_client import ApifyClient, ApifyClientAsync +from apify_client._logging import RedirectLogFormatter +from apify_client.clients.resource_clients.log import StreamedLog + +_MOCKED_API_URL = 'https://example.com' +_MOCKED_RUN_ID = 'mocked_run_id' +_MOCKED_ACTOR_NAME = 'mocked_actor_name' +_MOCKED_ACTOR_ID = 'mocked_actor_id' +_MOCKED_ACTOR_LOGS = ( + b'2025-05-13T07:24:12.588Z ACTOR: Pulling Docker image of build.\n' + b'2025-05-13T07:24:12.686Z ACTOR: Creating Docker container.\n' + b'2025-05-13T07:24:12.745Z ACTOR: Starting Docker container.\n', # Several logs merged into one chunk + b'2025-05-13T07:26:14.132Z [apify] DEBUG \xc3', # Chunked log split in the middle of the multibyte character + b'\xa1\n', # part 2 + b'2025-05-13T07:24:14.132Z [apify] INFO multiline \n log\n', + b'2025-05-13T07:25:14.132Z [apify] WARNING some warning\n', + b'2025-05-13T07:26:14.132Z [apify] DEBUG c\n', + b'2025-05-13T0', # Chunked log that got split in the marker + b'7:26:14.132Z [apify] DEBUG d\n' # part 2 + b'2025-05-13T07:27:14.132Z [apify] DEB', # Chunked log that got split outside of marker + b'UG e\n', # part 2 + # Already redirected message + b'2025-05-13T07:28:14.132Z [apify.redirect-logger-4U1oAnKau6jpzjUuA] -> 2025-05-13T07:27:14.132Z ACTOR: Pulling\n', +) +_EXISTING_LOGS_BEFORE_REDIRECT_ATTACH = 3 + +_EXPECTED_MESSAGES_AND_LEVELS = ( + ('2025-05-13T07:24:12.588Z ACTOR: Pulling Docker image of build.', logging.INFO), + ('2025-05-13T07:24:12.686Z ACTOR: Creating Docker container.', logging.INFO), + ('2025-05-13T07:24:12.745Z ACTOR: Starting Docker container.', logging.INFO), + ('2025-05-13T07:26:14.132Z [apify] DEBUG รก', logging.DEBUG), + ('2025-05-13T07:24:14.132Z [apify] INFO multiline \n log', logging.INFO), + ('2025-05-13T07:25:14.132Z [apify] WARNING some warning', logging.WARNING), + ('2025-05-13T07:26:14.132Z [apify] DEBUG c', logging.DEBUG), + ('2025-05-13T07:26:14.132Z [apify] DEBUG d', logging.DEBUG), + ('2025-05-13T07:27:14.132Z [apify] DEBUG e', logging.DEBUG), + ( + '2025-05-13T07:28:14.132Z [apify.redirect-logger-4U1oAnKau6jpzjUuA] -> 2025-05-13T07:27:14.132Z ACTOR: Pulling', + logging.INFO, + ), +) + + +@pytest.fixture +def mock_api() -> None: + actor_runs_responses = iter( + ( + httpx.Response( + content=json.dumps( + {'data': {'id': _MOCKED_RUN_ID, 'actId': _MOCKED_ACTOR_ID, 'status': ActorJobStatus.RUNNING}} + ), + status_code=200, + ), + httpx.Response( + content=json.dumps( + {'data': {'id': _MOCKED_RUN_ID, 'actId': _MOCKED_ACTOR_ID, 'status': ActorJobStatus.RUNNING}} + ), + status_code=200, + ), + httpx.Response( + content=json.dumps( + {'data': {'id': _MOCKED_RUN_ID, 'actId': _MOCKED_ACTOR_ID, 'status': ActorJobStatus.SUCCEEDED}} + ), + status_code=200, + ), + ) + ) + + def actor_runs_side_effect(_: httpx.Request) -> httpx.Response: + time.sleep(0.1) + return next(actor_runs_responses) + + respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}').mock(side_effect=actor_runs_side_effect) + + respx.get(url=f'{_MOCKED_API_URL}/v2/acts/{_MOCKED_ACTOR_ID}').mock( + return_value=httpx.Response(content=json.dumps({'data': {'name': _MOCKED_ACTOR_NAME}}), status_code=200) + ) + + respx.post(url=f'{_MOCKED_API_URL}/v2/acts/{_MOCKED_ACTOR_ID}/runs').mock( + return_value=httpx.Response(content=json.dumps({'data': {'id': _MOCKED_RUN_ID}}), status_code=200) + ) + + +@pytest.fixture +def mock_api_async(mock_api: None) -> None: # noqa: ARG001, fixture + class AsyncByteStream(httpx._types.AsyncByteStream): + async def __aiter__(self) -> AsyncIterator[bytes]: + for i in _MOCKED_ACTOR_LOGS: + yield i + await asyncio.sleep(0.01) + + async def aclose(self) -> None: + pass + + respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}/log?stream=1&raw=1').mock( + return_value=httpx.Response(stream=AsyncByteStream(), status_code=200) + ) + + +@pytest.fixture +def mock_api_sync(mock_api: None) -> None: # noqa: ARG001, fixture + class SyncByteStream(httpx._types.SyncByteStream): + def __iter__(self) -> Iterator[bytes]: + for i in _MOCKED_ACTOR_LOGS: + yield i + time.sleep(0.01) + + def close(self) -> None: + pass + + respx.get(url=f'{_MOCKED_API_URL}/v2/actor-runs/{_MOCKED_RUN_ID}/log?stream=1&raw=1').mock( + return_value=httpx.Response(stream=SyncByteStream(), status_code=200) + ) + + +@pytest.fixture +def propagate_stream_logs() -> None: + StreamedLog._force_propagate = True # Enable propagation of logs to the caplog fixture + logging.getLogger(f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}').setLevel(logging.DEBUG) + + +@pytest.mark.parametrize( + ('log_from_start', 'expected_log_count'), + [ + (True, len(_EXPECTED_MESSAGES_AND_LEVELS)), + (False, len(_EXPECTED_MESSAGES_AND_LEVELS) - _EXISTING_LOGS_BEFORE_REDIRECT_ATTACH), + ], +) +@respx.mock +async def test_redirected_logs_async( + *, + caplog: LogCaptureFixture, + mock_api_async: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + log_from_start: bool, + expected_log_count: int, +) -> None: + """Test that redirected logs are formatted correctly.""" + + run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + + with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: + # Mock `now()` so that it has timestamp bigger than the first 3 logs + mocked_datetime.now.return_value = datetime.fromisoformat('2025-05-13T07:24:14.132+00:00') + streamed_log = await run_client.get_streamed_log(from_start=log_from_start) + + # Set `propagate=True` during the tests, so that caplog can see the logs.. + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + + with caplog.at_level(logging.DEBUG, logger=logger_name): + async with streamed_log: + # Do stuff while the log from the other actor is being redirected to the logs. + await asyncio.sleep(2) + + assert len(caplog.records) == expected_log_count + for expected_message_and_level, record in zip(_EXPECTED_MESSAGES_AND_LEVELS[-expected_log_count:], caplog.records): + assert expected_message_and_level[0] == record.message + assert expected_message_and_level[1] == record.levelno + + +@pytest.mark.parametrize( + ('log_from_start', 'expected_log_count'), + [ + (True, len(_EXPECTED_MESSAGES_AND_LEVELS)), + (False, len(_EXPECTED_MESSAGES_AND_LEVELS) - _EXISTING_LOGS_BEFORE_REDIRECT_ATTACH), + ], +) +@respx.mock +def test_redirected_logs_sync( + *, + caplog: LogCaptureFixture, + mock_api_sync: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture + log_from_start: bool, + expected_log_count: int, +) -> None: + """Test that redirected logs are formatted correctly.""" + + run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).run(run_id=_MOCKED_RUN_ID) + + with patch('apify_client.clients.resource_clients.log.datetime') as mocked_datetime: + # Mock `now()` so that it has timestamp bigger than the first 3 logs + mocked_datetime.now.return_value = datetime.fromisoformat('2025-05-13T07:24:14.132+00:00') + streamed_log = run_client.get_streamed_log(from_start=log_from_start) + + # Set `propagate=True` during the tests, so that caplog can see the logs.. + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + + with caplog.at_level(logging.DEBUG, logger=logger_name), streamed_log: + # Do stuff while the log from the other actor is being redirected to the logs. + time.sleep(2) + + assert len(caplog.records) == expected_log_count + for expected_message_and_level, record in zip(_EXPECTED_MESSAGES_AND_LEVELS[-expected_log_count:], caplog.records): + assert expected_message_and_level[0] == record.message + assert expected_message_and_level[1] == record.levelno + + +@respx.mock +async def test_actor_call_redirect_logs_to_default_logger_async( + caplog: LogCaptureFixture, + mock_api_async: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture +) -> None: + """Test that logs are redirected correctly to the default logger. + + Caplog contains logs before formatting, so formatting is not included in the test expectations.""" + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + logger = logging.getLogger(logger_name) + run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + await run_client.call() + + # Ensure expected handler and formater + assert isinstance(logger.handlers[0].formatter, RedirectLogFormatter) + assert isinstance(logger.handlers[0], logging.StreamHandler) + + # Ensure logs are propagated + assert len(caplog.records) == len(_EXPECTED_MESSAGES_AND_LEVELS) + for expected_message_and_level, record in zip(_EXPECTED_MESSAGES_AND_LEVELS, caplog.records): + assert expected_message_and_level[0] == record.message + assert expected_message_and_level[1] == record.levelno + + +@respx.mock +def test_actor_call_redirect_logs_to_default_logger_sync( + caplog: LogCaptureFixture, + mock_api_sync: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture +) -> None: + """Test that logs are redirected correctly to the default logger. + + Caplog contains logs before formatting, so formatting is not included in the test expectations.""" + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + logger = logging.getLogger(logger_name) + run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + run_client.call() + + # Ensure expected handler and formater + assert isinstance(logger.handlers[0].formatter, RedirectLogFormatter) + assert isinstance(logger.handlers[0], logging.StreamHandler) + + # Ensure logs are propagated + assert len(caplog.records) == len(_EXPECTED_MESSAGES_AND_LEVELS) + for expected_message_and_level, record in zip(_EXPECTED_MESSAGES_AND_LEVELS, caplog.records): + assert expected_message_and_level[0] == record.message + assert expected_message_and_level[1] == record.levelno + + +@respx.mock +async def test_actor_call_no_redirect_logs_async( + caplog: LogCaptureFixture, + mock_api_async: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture +) -> None: + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + await run_client.call(logger=None) + + assert len(caplog.records) == 0 + + +@respx.mock +def test_actor_call_no_redirect_logs_sync( + caplog: LogCaptureFixture, + mock_api_sync: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture +) -> None: + logger_name = f'apify.{_MOCKED_ACTOR_NAME}-{_MOCKED_RUN_ID}' + run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + run_client.call(logger=None) + + assert len(caplog.records) == 0 + + +@respx.mock +async def test_actor_call_redirect_logs_to_custom_logger_async( + caplog: LogCaptureFixture, + mock_api_async: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture +) -> None: + """Test that logs are redirected correctly to the custom logger.""" + logger_name = 'custom_logger' + logger = logging.getLogger(logger_name) + run_client = ApifyClientAsync(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + await run_client.call(logger=logger) + + assert len(caplog.records) == len(_EXPECTED_MESSAGES_AND_LEVELS) + for expected_message_and_level, record in zip(_EXPECTED_MESSAGES_AND_LEVELS, caplog.records): + assert expected_message_and_level[0] == record.message + assert expected_message_and_level[1] == record.levelno + + +@respx.mock +def test_actor_call_redirect_logs_to_custom_logger_sync( + caplog: LogCaptureFixture, + mock_api_sync: None, # noqa: ARG001, fixture + propagate_stream_logs: None, # noqa: ARG001, fixture +) -> None: + """Test that logs are redirected correctly to the custom logger.""" + logger_name = 'custom_logger' + logger = logging.getLogger(logger_name) + run_client = ApifyClient(token='mocked_token', api_url=_MOCKED_API_URL).actor(actor_id=_MOCKED_ACTOR_ID) + + with caplog.at_level(logging.DEBUG, logger=logger_name): + run_client.call(logger=logger) + + assert len(caplog.records) == len(_EXPECTED_MESSAGES_AND_LEVELS) + for expected_message_and_level, record in zip(_EXPECTED_MESSAGES_AND_LEVELS, caplog.records): + assert expected_message_and_level[0] == record.message + assert expected_message_and_level[1] == record.levelno diff --git a/uv.lock b/uv.lock index 7f85ea8e..dbaab02b 100644 --- a/uv.lock +++ b/uv.lock @@ -27,6 +27,7 @@ version = "1.10.1" source = { editable = "." } dependencies = [ { name = "apify-shared" }, + { name = "colorama" }, { name = "httpx" }, { name = "more-itertools" }, ] @@ -46,11 +47,13 @@ dev = [ { name = "respx" }, { name = "ruff" }, { name = "setuptools" }, + { name = "types-colorama" }, ] [package.metadata] requires-dist = [ { name = "apify-shared", specifier = ">=1.4.1" }, + { name = "colorama", specifier = "~=0.4.0" }, { name = "httpx", specifier = ">=0.25" }, { name = "more-itertools", specifier = ">=10.0.0" }, ] @@ -70,6 +73,7 @@ dev = [ { name = "respx", specifier = "~=0.22.0" }, { name = "ruff", specifier = "~=0.11.0" }, { name = "setuptools" }, + { name = "types-colorama", specifier = "~=0.4.15.20240106" }, ] [[package]] @@ -756,11 +760,11 @@ wheels = [ [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955, upload-time = "2024-04-20T21:34:42.531Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" }, + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] [[package]] @@ -979,36 +983,36 @@ wheels = [ [[package]] name = "ruff" -version = "0.11.9" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/e7/e55dda1c92cdcf34b677ebef17486669800de01e887b7831a1b8fdf5cb08/ruff-0.11.9.tar.gz", hash = "sha256:ebd58d4f67a00afb3a30bf7d383e52d0e036e6195143c6db7019604a05335517", size = 4132134, upload-time = "2025-05-09T16:19:41.511Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/71/75dfb7194fe6502708e547941d41162574d1f579c4676a8eb645bf1a6842/ruff-0.11.9-py3-none-linux_armv6l.whl", hash = "sha256:a31a1d143a5e6f499d1fb480f8e1e780b4dfdd580f86e05e87b835d22c5c6f8c", size = 10335453, upload-time = "2025-05-09T16:18:58.2Z" }, - { url = "https://files.pythonhosted.org/packages/74/fc/ad80c869b1732f53c4232bbf341f33c5075b2c0fb3e488983eb55964076a/ruff-0.11.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:66bc18ca783b97186a1f3100e91e492615767ae0a3be584e1266aa9051990722", size = 11072566, upload-time = "2025-05-09T16:19:01.432Z" }, - { url = "https://files.pythonhosted.org/packages/87/0d/0ccececef8a0671dae155cbf7a1f90ea2dd1dba61405da60228bbe731d35/ruff-0.11.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bd576cd06962825de8aece49f28707662ada6a1ff2db848d1348e12c580acbf1", size = 10435020, upload-time = "2025-05-09T16:19:03.897Z" }, - { url = "https://files.pythonhosted.org/packages/52/01/e249e1da6ad722278094e183cbf22379a9bbe5f21a3e46cef24ccab76e22/ruff-0.11.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b1d18b4be8182cc6fddf859ce432cc9631556e9f371ada52f3eaefc10d878de", size = 10593935, upload-time = "2025-05-09T16:19:06.455Z" }, - { url = "https://files.pythonhosted.org/packages/ed/9a/40cf91f61e3003fe7bd43f1761882740e954506c5a0f9097b1cff861f04c/ruff-0.11.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0f3f46f759ac623e94824b1e5a687a0df5cd7f5b00718ff9c24f0a894a683be7", size = 10172971, upload-time = "2025-05-09T16:19:10.261Z" }, - { url = "https://files.pythonhosted.org/packages/61/12/d395203de1e8717d7a2071b5a340422726d4736f44daf2290aad1085075f/ruff-0.11.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f34847eea11932d97b521450cf3e1d17863cfa5a94f21a056b93fb86f3f3dba2", size = 11748631, upload-time = "2025-05-09T16:19:12.307Z" }, - { url = "https://files.pythonhosted.org/packages/66/d6/ef4d5eba77677eab511644c37c55a3bb8dcac1cdeb331123fe342c9a16c9/ruff-0.11.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f33b15e00435773df97cddcd263578aa83af996b913721d86f47f4e0ee0ff271", size = 12409236, upload-time = "2025-05-09T16:19:15.006Z" }, - { url = "https://files.pythonhosted.org/packages/c5/8f/5a2c5fc6124dd925a5faf90e1089ee9036462118b619068e5b65f8ea03df/ruff-0.11.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b27613a683b086f2aca8996f63cb3dd7bc49e6eccf590563221f7b43ded3f65", size = 11881436, upload-time = "2025-05-09T16:19:17.063Z" }, - { url = "https://files.pythonhosted.org/packages/39/d1/9683f469ae0b99b95ef99a56cfe8c8373c14eba26bd5c622150959ce9f64/ruff-0.11.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e0d88756e63e8302e630cee3ce2ffb77859797cc84a830a24473939e6da3ca6", size = 13982759, upload-time = "2025-05-09T16:19:19.693Z" }, - { url = "https://files.pythonhosted.org/packages/4e/0b/c53a664f06e0faab596397867c6320c3816df479e888fe3af63bc3f89699/ruff-0.11.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:537c82c9829d7811e3aa680205f94c81a2958a122ac391c0eb60336ace741a70", size = 11541985, upload-time = "2025-05-09T16:19:21.831Z" }, - { url = "https://files.pythonhosted.org/packages/23/a0/156c4d7e685f6526a636a60986ee4a3c09c8c4e2a49b9a08c9913f46c139/ruff-0.11.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:440ac6a7029f3dee7d46ab7de6f54b19e34c2b090bb4f2480d0a2d635228f381", size = 10465775, upload-time = "2025-05-09T16:19:24.401Z" }, - { url = "https://files.pythonhosted.org/packages/43/d5/88b9a6534d9d4952c355e38eabc343df812f168a2c811dbce7d681aeb404/ruff-0.11.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:71c539bac63d0788a30227ed4d43b81353c89437d355fdc52e0cda4ce5651787", size = 10170957, upload-time = "2025-05-09T16:19:27.08Z" }, - { url = "https://files.pythonhosted.org/packages/f0/b8/2bd533bdaf469dc84b45815ab806784d561fab104d993a54e1852596d581/ruff-0.11.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c67117bc82457e4501473c5f5217d49d9222a360794bfb63968e09e70f340abd", size = 11143307, upload-time = "2025-05-09T16:19:29.462Z" }, - { url = "https://files.pythonhosted.org/packages/2f/d9/43cfba291788459b9bfd4e09a0479aa94d05ab5021d381a502d61a807ec1/ruff-0.11.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e4b78454f97aa454586e8a5557facb40d683e74246c97372af3c2d76901d697b", size = 11603026, upload-time = "2025-05-09T16:19:31.569Z" }, - { url = "https://files.pythonhosted.org/packages/22/e6/7ed70048e89b01d728ccc950557a17ecf8df4127b08a56944b9d0bae61bc/ruff-0.11.9-py3-none-win32.whl", hash = "sha256:7fe1bc950e7d7b42caaee2a8a3bc27410547cc032c9558ee2e0f6d3b209e845a", size = 10548627, upload-time = "2025-05-09T16:19:33.657Z" }, - { url = "https://files.pythonhosted.org/packages/90/36/1da5d566271682ed10f436f732e5f75f926c17255c9c75cefb77d4bf8f10/ruff-0.11.9-py3-none-win_amd64.whl", hash = "sha256:52edaa4a6d70f8180343a5b7f030c7edd36ad180c9f4d224959c2d689962d964", size = 11634340, upload-time = "2025-05-09T16:19:35.815Z" }, - { url = "https://files.pythonhosted.org/packages/40/f7/70aad26e5877c8f7ee5b161c4c9fa0100e63fc4c944dc6d97b9c7e871417/ruff-0.11.9-py3-none-win_arm64.whl", hash = "sha256:bcf42689c22f2e240f496d0c183ef2c6f7b35e809f12c1db58f75d9aa8d630ca", size = 10741080, upload-time = "2025-05-09T16:19:39.605Z" }, +version = "0.11.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/4c/4a3c5a97faaae6b428b336dcca81d03ad04779f8072c267ad2bd860126bf/ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6", size = 4165632, upload-time = "2025-05-15T14:08:56.76Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/9f/596c628f8824a2ce4cd12b0f0b4c0629a62dfffc5d0f742c19a1d71be108/ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58", size = 10316243, upload-time = "2025-05-15T14:08:12.884Z" }, + { url = "https://files.pythonhosted.org/packages/3c/38/c1e0b77ab58b426f8c332c1d1d3432d9fc9a9ea622806e208220cb133c9e/ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed", size = 11083636, upload-time = "2025-05-15T14:08:16.551Z" }, + { url = "https://files.pythonhosted.org/packages/23/41/b75e15961d6047d7fe1b13886e56e8413be8467a4e1be0a07f3b303cd65a/ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca", size = 10441624, upload-time = "2025-05-15T14:08:19.032Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2c/e396b6703f131406db1811ea3d746f29d91b41bbd43ad572fea30da1435d/ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2", size = 10624358, upload-time = "2025-05-15T14:08:21.542Z" }, + { url = "https://files.pythonhosted.org/packages/bd/8c/ee6cca8bdaf0f9a3704796022851a33cd37d1340bceaf4f6e991eb164e2e/ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5", size = 10176850, upload-time = "2025-05-15T14:08:23.682Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ce/4e27e131a434321b3b7c66512c3ee7505b446eb1c8a80777c023f7e876e6/ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641", size = 11759787, upload-time = "2025-05-15T14:08:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/58/de/1e2e77fc72adc7cf5b5123fd04a59ed329651d3eab9825674a9e640b100b/ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947", size = 12430479, upload-time = "2025-05-15T14:08:28.013Z" }, + { url = "https://files.pythonhosted.org/packages/07/ed/af0f2340f33b70d50121628ef175523cc4c37619e98d98748c85764c8d88/ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4", size = 11919760, upload-time = "2025-05-15T14:08:30.956Z" }, + { url = "https://files.pythonhosted.org/packages/24/09/d7b3d3226d535cb89234390f418d10e00a157b6c4a06dfbe723e9322cb7d/ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f", size = 14041747, upload-time = "2025-05-15T14:08:33.297Z" }, + { url = "https://files.pythonhosted.org/packages/62/b3/a63b4e91850e3f47f78795e6630ee9266cb6963de8f0191600289c2bb8f4/ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b", size = 11550657, upload-time = "2025-05-15T14:08:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/46/63/a4f95c241d79402ccdbdb1d823d156c89fbb36ebfc4289dce092e6c0aa8f/ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2", size = 10489671, upload-time = "2025-05-15T14:08:38.437Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9b/c2238bfebf1e473495659c523d50b1685258b6345d5ab0b418ca3f010cd7/ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523", size = 10160135, upload-time = "2025-05-15T14:08:41.247Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ef/ba7251dd15206688dbfba7d413c0312e94df3b31b08f5d695580b755a899/ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125", size = 11170179, upload-time = "2025-05-15T14:08:43.762Z" }, + { url = "https://files.pythonhosted.org/packages/73/9f/5c336717293203ba275dbfa2ea16e49b29a9fd9a0ea8b6febfc17e133577/ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad", size = 11626021, upload-time = "2025-05-15T14:08:46.451Z" }, + { url = "https://files.pythonhosted.org/packages/d9/2b/162fa86d2639076667c9aa59196c020dc6d7023ac8f342416c2f5ec4bda0/ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19", size = 10494958, upload-time = "2025-05-15T14:08:49.601Z" }, + { url = "https://files.pythonhosted.org/packages/24/f3/66643d8f32f50a4b0d09a4832b7d919145ee2b944d43e604fbd7c144d175/ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224", size = 11650285, upload-time = "2025-05-15T14:08:52.392Z" }, + { url = "https://files.pythonhosted.org/packages/95/3a/2e8704d19f376c799748ff9cb041225c1d59f3e7711bc5596c8cfdc24925/ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1", size = 10765278, upload-time = "2025-05-15T14:08:54.56Z" }, ] [[package]] name = "setuptools" -version = "80.4.0" +version = "80.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/32/0cc40fe41fd2adb80a2f388987f4f8db3c866c69e33e0b4c8b093fdf700e/setuptools-80.4.0.tar.gz", hash = "sha256:5a78f61820bc088c8e4add52932ae6b8cf423da2aff268c23f813cfbb13b4006", size = 1315008, upload-time = "2025-05-09T20:42:27.972Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/8b/dc1773e8e5d07fd27c1632c45c1de856ac3dbf09c0147f782ca6d990cf15/setuptools-80.7.1.tar.gz", hash = "sha256:f6ffc5f0142b1bd8d0ca94ee91b30c0ca862ffd50826da1ea85258a06fd94552", size = 1319188, upload-time = "2025-05-15T02:41:00.955Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/93/dba5ed08c2e31ec7cdc2ce75705a484ef0be1a2fecac8a58272489349de8/setuptools-80.4.0-py3-none-any.whl", hash = "sha256:6cdc8cb9a7d590b237dbe4493614a9b75d0559b888047c1f67d49ba50fc3edb2", size = 1200812, upload-time = "2025-05-09T20:42:25.325Z" }, + { url = "https://files.pythonhosted.org/packages/a1/18/0e835c3a557dc5faffc8f91092f62fc337c1dab1066715842e7a4b318ec4/setuptools-80.7.1-py3-none-any.whl", hash = "sha256:ca5cc1069b85dc23070a6628e6bcecb3292acac802399c7f8edc0100619f9009", size = 1200776, upload-time = "2025-05-15T02:40:58.887Z" }, ] [[package]] @@ -1080,6 +1084,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/33/38da585b06978d262cc2b2b45bc57ee75f0ce5e0b4ef1cab1b86461e9298/typeapi-2.2.4-py3-none-any.whl", hash = "sha256:bd6d5e5907fa47e0303bf254e7cc8712d4be4eb26d7ffaedb67c9e7844c53bb8", size = 26387, upload-time = "2025-01-29T11:40:12.328Z" }, ] +[[package]] +name = "types-colorama" +version = "0.4.15.20240311" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/59/73/0fb0b9fe4964b45b2a06ed41b60c352752626db46aa0fb70a49a9e283a75/types-colorama-0.4.15.20240311.tar.gz", hash = "sha256:a28e7f98d17d2b14fb9565d32388e419f4108f557a7d939a66319969b2b99c7a", size = 5608, upload-time = "2024-03-11T02:15:51.557Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/83/6944b4fa01efb2e63ac62b791a8ddf0fee358f93be9f64b8f152648ad9d3/types_colorama-0.4.15.20240311-py3-none-any.whl", hash = "sha256:6391de60ddc0db3f147e31ecb230006a6823e81e380862ffca1e4695c13a0b8e", size = 5840, upload-time = "2024-03-11T02:15:50.43Z" }, +] + [[package]] name = "typing-extensions" version = "4.13.2"