Skip to content

Commit f3b3f65

Browse files
authored
feat(arq): add arq integration (#1872)
Initial integration for arq
1 parent 710f3c4 commit f3b3f65

File tree

8 files changed

+452
-0
lines changed

8 files changed

+452
-0
lines changed
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
name: Test arq
2+
3+
on:
4+
push:
5+
branches:
6+
- master
7+
- release/**
8+
9+
pull_request:
10+
11+
# Cancel in progress workflows on pull_requests.
12+
# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
13+
concurrency:
14+
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
15+
cancel-in-progress: true
16+
17+
permissions:
18+
contents: read
19+
20+
env:
21+
BUILD_CACHE_KEY: ${{ github.sha }}
22+
CACHED_BUILD_PATHS: |
23+
${{ github.workspace }}/dist-serverless
24+
25+
jobs:
26+
test:
27+
name: arq, python ${{ matrix.python-version }}, ${{ matrix.os }}
28+
runs-on: ${{ matrix.os }}
29+
timeout-minutes: 45
30+
31+
strategy:
32+
fail-fast: false
33+
matrix:
34+
python-version: ["3.7","3.8","3.9","3.10","3.11"]
35+
# python3.6 reached EOL and is no longer being supported on
36+
# new versions of hosted runners on Github Actions
37+
# ubuntu-20.04 is the last version that supported python3.6
38+
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
39+
os: [ubuntu-20.04]
40+
41+
steps:
42+
- uses: actions/checkout@v3
43+
- uses: actions/setup-python@v4
44+
with:
45+
python-version: ${{ matrix.python-version }}
46+
47+
- name: Setup Test Env
48+
run: |
49+
pip install codecov "tox>=3,<4"
50+
51+
- name: Test arq
52+
timeout-minutes: 45
53+
shell: bash
54+
run: |
55+
set -x # print commands that are executed
56+
coverage erase
57+
58+
./scripts/runtox.sh "${{ matrix.python-version }}-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
59+
coverage combine .coverage*
60+
coverage xml -i
61+
codecov --file coverage.xml
62+
63+
check_required_tests:
64+
name: All arq tests passed or skipped
65+
needs: test
66+
# Always run this, even if a dependent job failed
67+
if: always()
68+
runs-on: ubuntu-20.04
69+
steps:
70+
- name: Check for failures
71+
if: contains(needs.test.result, 'failure')
72+
run: |
73+
echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1

mypy.ini

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,3 +65,5 @@ ignore_missing_imports = True
6565
ignore_missing_imports = True
6666
[mypy-huey.*]
6767
ignore_missing_imports = True
68+
[mypy-arq.*]
69+
ignore_missing_imports = True

sentry_sdk/consts.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,8 @@ class OP:
6565
MIDDLEWARE_STARLITE = "middleware.starlite"
6666
MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive"
6767
MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send"
68+
QUEUE_SUBMIT_ARQ = "queue.submit.arq"
69+
QUEUE_TASK_ARQ = "queue.task.arq"
6870
QUEUE_SUBMIT_CELERY = "queue.submit.celery"
6971
QUEUE_TASK_CELERY = "queue.task.celery"
7072
QUEUE_TASK_RQ = "queue.task.rq"

sentry_sdk/integrations/arq.py

Lines changed: 203 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,203 @@
1+
from __future__ import absolute_import
2+
3+
import sys
4+
5+
from sentry_sdk._compat import reraise
6+
from sentry_sdk._types import MYPY
7+
from sentry_sdk import Hub
8+
from sentry_sdk.consts import OP
9+
from sentry_sdk.hub import _should_send_default_pii
10+
from sentry_sdk.integrations import DidNotEnable, Integration
11+
from sentry_sdk.integrations.logging import ignore_logger
12+
from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
13+
from sentry_sdk.utils import (
14+
capture_internal_exceptions,
15+
event_from_exception,
16+
SENSITIVE_DATA_SUBSTITUTE,
17+
)
18+
19+
try:
20+
import arq.worker
21+
from arq.version import VERSION as ARQ_VERSION
22+
from arq.connections import ArqRedis
23+
from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker
24+
except ImportError:
25+
raise DidNotEnable("Arq is not installed")
26+
27+
if MYPY:
28+
from typing import Any, Dict, Optional
29+
30+
from sentry_sdk._types import EventProcessor, Event, ExcInfo, Hint
31+
32+
from arq.jobs import Job
33+
from arq.typing import WorkerCoroutine
34+
from arq.worker import Function
35+
36+
ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob)
37+
38+
39+
class ArqIntegration(Integration):
40+
identifier = "arq"
41+
42+
@staticmethod
43+
def setup_once():
44+
# type: () -> None
45+
46+
try:
47+
if isinstance(ARQ_VERSION, str):
48+
version = tuple(map(int, ARQ_VERSION.split(".")[:2]))
49+
else:
50+
version = ARQ_VERSION.version[:2]
51+
except (TypeError, ValueError):
52+
raise DidNotEnable("arq version unparsable: {}".format(ARQ_VERSION))
53+
54+
if version < (0, 23):
55+
raise DidNotEnable("arq 0.23 or newer required.")
56+
57+
patch_enqueue_job()
58+
patch_run_job()
59+
patch_func()
60+
61+
ignore_logger("arq.worker")
62+
63+
64+
def patch_enqueue_job():
65+
# type: () -> None
66+
old_enqueue_job = ArqRedis.enqueue_job
67+
68+
async def _sentry_enqueue_job(self, function, *args, **kwargs):
69+
# type: (ArqRedis, str, *Any, **Any) -> Optional[Job]
70+
hub = Hub.current
71+
72+
if hub.get_integration(ArqIntegration) is None:
73+
return await old_enqueue_job(self, function, *args, **kwargs)
74+
75+
with hub.start_span(op=OP.QUEUE_SUBMIT_ARQ, description=function):
76+
return await old_enqueue_job(self, function, *args, **kwargs)
77+
78+
ArqRedis.enqueue_job = _sentry_enqueue_job
79+
80+
81+
def patch_run_job():
82+
# type: () -> None
83+
old_run_job = Worker.run_job
84+
85+
async def _sentry_run_job(self, job_id, score):
86+
# type: (Worker, str, int) -> None
87+
hub = Hub(Hub.current)
88+
89+
if hub.get_integration(ArqIntegration) is None:
90+
return await old_run_job(self, job_id, score)
91+
92+
with hub.push_scope() as scope:
93+
scope._name = "arq"
94+
scope.clear_breadcrumbs()
95+
96+
transaction = Transaction(
97+
name="unknown arq task",
98+
status="ok",
99+
op=OP.QUEUE_TASK_ARQ,
100+
source=TRANSACTION_SOURCE_TASK,
101+
)
102+
103+
with hub.start_transaction(transaction):
104+
return await old_run_job(self, job_id, score)
105+
106+
Worker.run_job = _sentry_run_job
107+
108+
109+
def _capture_exception(exc_info):
110+
# type: (ExcInfo) -> None
111+
hub = Hub.current
112+
113+
if hub.scope.transaction is not None:
114+
if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS:
115+
hub.scope.transaction.set_status("aborted")
116+
return
117+
118+
hub.scope.transaction.set_status("internal_error")
119+
120+
event, hint = event_from_exception(
121+
exc_info,
122+
client_options=hub.client.options if hub.client else None,
123+
mechanism={"type": ArqIntegration.identifier, "handled": False},
124+
)
125+
hub.capture_event(event, hint=hint)
126+
127+
128+
def _make_event_processor(ctx, *args, **kwargs):
129+
# type: (Dict[Any, Any], *Any, **Any) -> EventProcessor
130+
def event_processor(event, hint):
131+
# type: (Event, Hint) -> Optional[Event]
132+
133+
hub = Hub.current
134+
135+
with capture_internal_exceptions():
136+
if hub.scope.transaction is not None:
137+
hub.scope.transaction.name = ctx["job_name"]
138+
event["transaction"] = ctx["job_name"]
139+
140+
tags = event.setdefault("tags", {})
141+
tags["arq_task_id"] = ctx["job_id"]
142+
tags["arq_task_retry"] = ctx["job_try"] > 1
143+
extra = event.setdefault("extra", {})
144+
extra["arq-job"] = {
145+
"task": ctx["job_name"],
146+
"args": args
147+
if _should_send_default_pii()
148+
else SENSITIVE_DATA_SUBSTITUTE,
149+
"kwargs": kwargs
150+
if _should_send_default_pii()
151+
else SENSITIVE_DATA_SUBSTITUTE,
152+
"retry": ctx["job_try"],
153+
}
154+
155+
return event
156+
157+
return event_processor
158+
159+
160+
def _wrap_coroutine(name, coroutine):
161+
# type: (str, WorkerCoroutine) -> WorkerCoroutine
162+
async def _sentry_coroutine(ctx, *args, **kwargs):
163+
# type: (Dict[Any, Any], *Any, **Any) -> Any
164+
hub = Hub.current
165+
if hub.get_integration(ArqIntegration) is None:
166+
return await coroutine(*args, **kwargs)
167+
168+
hub.scope.add_event_processor(
169+
_make_event_processor({**ctx, "job_name": name}, *args, **kwargs)
170+
)
171+
172+
try:
173+
result = await coroutine(ctx, *args, **kwargs)
174+
except Exception:
175+
exc_info = sys.exc_info()
176+
_capture_exception(exc_info)
177+
reraise(*exc_info)
178+
179+
return result
180+
181+
return _sentry_coroutine
182+
183+
184+
def patch_func():
185+
# type: () -> None
186+
old_func = arq.worker.func
187+
188+
def _sentry_func(*args, **kwargs):
189+
# type: (*Any, **Any) -> Function
190+
hub = Hub.current
191+
192+
if hub.get_integration(ArqIntegration) is None:
193+
return old_func(*args, **kwargs)
194+
195+
func = old_func(*args, **kwargs)
196+
197+
if not getattr(func, "_sentry_is_patched", False):
198+
func.coroutine = _wrap_coroutine(func.name, func.coroutine)
199+
func._sentry_is_patched = True
200+
201+
return func
202+
203+
arq.worker.func = _sentry_func

setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ def get_file_text(file_name):
5353
"celery": ["celery>=3"],
5454
"huey": ["huey>=2"],
5555
"beam": ["apache-beam>=2.12"],
56+
"arq": ["arq>=0.23"],
5657
"rq": ["rq>=0.6"],
5758
"aiohttp": ["aiohttp>=3.5"],
5859
"tornado": ["tornado>=5"],

tests/integrations/arq/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
import pytest
2+
3+
pytest.importorskip("arq")

0 commit comments

Comments
 (0)