Skip to content

Commit c2dfbcc

Browse files
Fix(Arq): fix integration with Worker settings as a dict (#3742)
1 parent 24e5359 commit c2dfbcc

File tree

2 files changed

+110
-14
lines changed

2 files changed

+110
-14
lines changed

sentry_sdk/integrations/arq.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,17 @@ def _sentry_create_worker(*args, **kwargs):
198198
# type: (*Any, **Any) -> Worker
199199
settings_cls = args[0]
200200

201+
if isinstance(settings_cls, dict):
202+
if "functions" in settings_cls:
203+
settings_cls["functions"] = [
204+
_get_arq_function(func) for func in settings_cls["functions"]
205+
]
206+
if "cron_jobs" in settings_cls:
207+
settings_cls["cron_jobs"] = [
208+
_get_arq_cron_job(cron_job)
209+
for cron_job in settings_cls["cron_jobs"]
210+
]
211+
201212
if hasattr(settings_cls, "functions"):
202213
settings_cls.functions = [
203214
_get_arq_function(func) for func in settings_cls.functions

tests/integrations/arq/test_arq.py

Lines changed: 99 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -83,14 +83,65 @@ class WorkerSettings:
8383
return inner
8484

8585

86+
@pytest.fixture
87+
def init_arq_with_dict_settings(sentry_init):
88+
def inner(
89+
cls_functions=None,
90+
cls_cron_jobs=None,
91+
kw_functions=None,
92+
kw_cron_jobs=None,
93+
allow_abort_jobs_=False,
94+
):
95+
cls_functions = cls_functions or []
96+
cls_cron_jobs = cls_cron_jobs or []
97+
98+
kwargs = {}
99+
if kw_functions is not None:
100+
kwargs["functions"] = kw_functions
101+
if kw_cron_jobs is not None:
102+
kwargs["cron_jobs"] = kw_cron_jobs
103+
104+
sentry_init(
105+
integrations=[ArqIntegration()],
106+
traces_sample_rate=1.0,
107+
send_default_pii=True,
108+
)
109+
110+
server = FakeRedis()
111+
pool = ArqRedis(pool_or_conn=server.connection_pool)
112+
113+
worker_settings = {
114+
"functions": cls_functions,
115+
"cron_jobs": cls_cron_jobs,
116+
"redis_pool": pool,
117+
"allow_abort_jobs": allow_abort_jobs_,
118+
}
119+
120+
if not worker_settings["functions"]:
121+
del worker_settings["functions"]
122+
if not worker_settings["cron_jobs"]:
123+
del worker_settings["cron_jobs"]
124+
125+
worker = arq.worker.create_worker(worker_settings, **kwargs)
126+
127+
return pool, worker
128+
129+
return inner
130+
131+
86132
@pytest.mark.asyncio
87-
async def test_job_result(init_arq):
133+
@pytest.mark.parametrize(
134+
"init_arq_settings", ["init_arq", "init_arq_with_dict_settings"]
135+
)
136+
async def test_job_result(init_arq_settings, request):
88137
async def increase(ctx, num):
89138
return num + 1
90139

140+
init_fixture_method = request.getfixturevalue(init_arq_settings)
141+
91142
increase.__qualname__ = increase.__name__
92143

93-
pool, worker = init_arq([increase])
144+
pool, worker = init_fixture_method([increase])
94145

95146
job = await pool.enqueue_job("increase", 3)
96147

@@ -105,14 +156,19 @@ async def increase(ctx, num):
105156

106157

107158
@pytest.mark.asyncio
108-
async def test_job_retry(capture_events, init_arq):
159+
@pytest.mark.parametrize(
160+
"init_arq_settings", ["init_arq", "init_arq_with_dict_settings"]
161+
)
162+
async def test_job_retry(capture_events, init_arq_settings, request):
109163
async def retry_job(ctx):
110164
if ctx["job_try"] < 2:
111165
raise arq.worker.Retry
112166

167+
init_fixture_method = request.getfixturevalue(init_arq_settings)
168+
113169
retry_job.__qualname__ = retry_job.__name__
114170

115-
pool, worker = init_arq([retry_job])
171+
pool, worker = init_fixture_method([retry_job])
116172

117173
job = await pool.enqueue_job("retry_job")
118174

@@ -139,11 +195,18 @@ async def retry_job(ctx):
139195
"source", [("cls_functions", "cls_cron_jobs"), ("kw_functions", "kw_cron_jobs")]
140196
)
141197
@pytest.mark.parametrize("job_fails", [True, False], ids=["error", "success"])
198+
@pytest.mark.parametrize(
199+
"init_arq_settings", ["init_arq", "init_arq_with_dict_settings"]
200+
)
142201
@pytest.mark.asyncio
143-
async def test_job_transaction(capture_events, init_arq, source, job_fails):
202+
async def test_job_transaction(
203+
capture_events, init_arq_settings, source, job_fails, request
204+
):
144205
async def division(_, a, b=0):
145206
return a / b
146207

208+
init_fixture_method = request.getfixturevalue(init_arq_settings)
209+
147210
division.__qualname__ = division.__name__
148211

149212
cron_func = async_partial(division, a=1, b=int(not job_fails))
@@ -152,7 +215,9 @@ async def division(_, a, b=0):
152215
cron_job = cron(cron_func, minute=0, run_at_startup=True)
153216

154217
functions_key, cron_jobs_key = source
155-
pool, worker = init_arq(**{functions_key: [division], cron_jobs_key: [cron_job]})
218+
pool, worker = init_fixture_method(
219+
**{functions_key: [division], cron_jobs_key: [cron_job]}
220+
)
156221

157222
events = capture_events()
158223

@@ -213,12 +278,17 @@ async def division(_, a, b=0):
213278

214279

215280
@pytest.mark.parametrize("source", ["cls_functions", "kw_functions"])
281+
@pytest.mark.parametrize(
282+
"init_arq_settings", ["init_arq", "init_arq_with_dict_settings"]
283+
)
216284
@pytest.mark.asyncio
217-
async def test_enqueue_job(capture_events, init_arq, source):
285+
async def test_enqueue_job(capture_events, init_arq_settings, source, request):
218286
async def dummy_job(_):
219287
pass
220288

221-
pool, _ = init_arq(**{source: [dummy_job]})
289+
init_fixture_method = request.getfixturevalue(init_arq_settings)
290+
291+
pool, _ = init_fixture_method(**{source: [dummy_job]})
222292

223293
events = capture_events()
224294

@@ -236,13 +306,18 @@ async def dummy_job(_):
236306

237307

238308
@pytest.mark.asyncio
239-
async def test_execute_job_without_integration(init_arq):
309+
@pytest.mark.parametrize(
310+
"init_arq_settings", ["init_arq", "init_arq_with_dict_settings"]
311+
)
312+
async def test_execute_job_without_integration(init_arq_settings, request):
240313
async def dummy_job(_ctx):
241314
pass
242315

316+
init_fixture_method = request.getfixturevalue(init_arq_settings)
317+
243318
dummy_job.__qualname__ = dummy_job.__name__
244319

245-
pool, worker = init_arq([dummy_job])
320+
pool, worker = init_fixture_method([dummy_job])
246321
# remove the integration to trigger the edge case
247322
get_client().integrations.pop("arq")
248323

@@ -254,12 +329,17 @@ async def dummy_job(_ctx):
254329

255330

256331
@pytest.mark.parametrize("source", ["cls_functions", "kw_functions"])
332+
@pytest.mark.parametrize(
333+
"init_arq_settings", ["init_arq", "init_arq_with_dict_settings"]
334+
)
257335
@pytest.mark.asyncio
258-
async def test_span_origin_producer(capture_events, init_arq, source):
336+
async def test_span_origin_producer(capture_events, init_arq_settings, source, request):
259337
async def dummy_job(_):
260338
pass
261339

262-
pool, _ = init_arq(**{source: [dummy_job]})
340+
init_fixture_method = request.getfixturevalue(init_arq_settings)
341+
342+
pool, _ = init_fixture_method(**{source: [dummy_job]})
263343

264344
events = capture_events()
265345

@@ -272,13 +352,18 @@ async def dummy_job(_):
272352

273353

274354
@pytest.mark.asyncio
275-
async def test_span_origin_consumer(capture_events, init_arq):
355+
@pytest.mark.parametrize(
356+
"init_arq_settings", ["init_arq", "init_arq_with_dict_settings"]
357+
)
358+
async def test_span_origin_consumer(capture_events, init_arq_settings, request):
276359
async def job(ctx):
277360
pass
278361

362+
init_fixture_method = request.getfixturevalue(init_arq_settings)
363+
279364
job.__qualname__ = job.__name__
280365

281-
pool, worker = init_arq([job])
366+
pool, worker = init_fixture_method([job])
282367

283368
job = await pool.enqueue_job("retry_job")
284369

0 commit comments

Comments
 (0)