@@ -83,14 +83,65 @@ class WorkerSettings:
83
83
return inner
84
84
85
85
86
+ @pytest .fixture
87
+ def init_arq_with_dict_settings (sentry_init ):
88
+ def inner (
89
+ cls_functions = None ,
90
+ cls_cron_jobs = None ,
91
+ kw_functions = None ,
92
+ kw_cron_jobs = None ,
93
+ allow_abort_jobs_ = False ,
94
+ ):
95
+ cls_functions = cls_functions or []
96
+ cls_cron_jobs = cls_cron_jobs or []
97
+
98
+ kwargs = {}
99
+ if kw_functions is not None :
100
+ kwargs ["functions" ] = kw_functions
101
+ if kw_cron_jobs is not None :
102
+ kwargs ["cron_jobs" ] = kw_cron_jobs
103
+
104
+ sentry_init (
105
+ integrations = [ArqIntegration ()],
106
+ traces_sample_rate = 1.0 ,
107
+ send_default_pii = True ,
108
+ )
109
+
110
+ server = FakeRedis ()
111
+ pool = ArqRedis (pool_or_conn = server .connection_pool )
112
+
113
+ worker_settings = {
114
+ "functions" : cls_functions ,
115
+ "cron_jobs" : cls_cron_jobs ,
116
+ "redis_pool" : pool ,
117
+ "allow_abort_jobs" : allow_abort_jobs_ ,
118
+ }
119
+
120
+ if not worker_settings ["functions" ]:
121
+ del worker_settings ["functions" ]
122
+ if not worker_settings ["cron_jobs" ]:
123
+ del worker_settings ["cron_jobs" ]
124
+
125
+ worker = arq .worker .create_worker (worker_settings , ** kwargs )
126
+
127
+ return pool , worker
128
+
129
+ return inner
130
+
131
+
86
132
@pytest .mark .asyncio
87
- async def test_job_result (init_arq ):
133
+ @pytest .mark .parametrize (
134
+ "init_arq_settings" , ["init_arq" , "init_arq_with_dict_settings" ]
135
+ )
136
+ async def test_job_result (init_arq_settings , request ):
88
137
async def increase (ctx , num ):
89
138
return num + 1
90
139
140
+ init_fixture_method = request .getfixturevalue (init_arq_settings )
141
+
91
142
increase .__qualname__ = increase .__name__
92
143
93
- pool , worker = init_arq ([increase ])
144
+ pool , worker = init_fixture_method ([increase ])
94
145
95
146
job = await pool .enqueue_job ("increase" , 3 )
96
147
@@ -105,14 +156,19 @@ async def increase(ctx, num):
105
156
106
157
107
158
@pytest .mark .asyncio
108
- async def test_job_retry (capture_events , init_arq ):
159
+ @pytest .mark .parametrize (
160
+ "init_arq_settings" , ["init_arq" , "init_arq_with_dict_settings" ]
161
+ )
162
+ async def test_job_retry (capture_events , init_arq_settings , request ):
109
163
async def retry_job (ctx ):
110
164
if ctx ["job_try" ] < 2 :
111
165
raise arq .worker .Retry
112
166
167
+ init_fixture_method = request .getfixturevalue (init_arq_settings )
168
+
113
169
retry_job .__qualname__ = retry_job .__name__
114
170
115
- pool , worker = init_arq ([retry_job ])
171
+ pool , worker = init_fixture_method ([retry_job ])
116
172
117
173
job = await pool .enqueue_job ("retry_job" )
118
174
@@ -139,11 +195,18 @@ async def retry_job(ctx):
139
195
"source" , [("cls_functions" , "cls_cron_jobs" ), ("kw_functions" , "kw_cron_jobs" )]
140
196
)
141
197
@pytest .mark .parametrize ("job_fails" , [True , False ], ids = ["error" , "success" ])
198
+ @pytest .mark .parametrize (
199
+ "init_arq_settings" , ["init_arq" , "init_arq_with_dict_settings" ]
200
+ )
142
201
@pytest .mark .asyncio
143
- async def test_job_transaction (capture_events , init_arq , source , job_fails ):
202
+ async def test_job_transaction (
203
+ capture_events , init_arq_settings , source , job_fails , request
204
+ ):
144
205
async def division (_ , a , b = 0 ):
145
206
return a / b
146
207
208
+ init_fixture_method = request .getfixturevalue (init_arq_settings )
209
+
147
210
division .__qualname__ = division .__name__
148
211
149
212
cron_func = async_partial (division , a = 1 , b = int (not job_fails ))
@@ -152,7 +215,9 @@ async def division(_, a, b=0):
152
215
cron_job = cron (cron_func , minute = 0 , run_at_startup = True )
153
216
154
217
functions_key , cron_jobs_key = source
155
- pool , worker = init_arq (** {functions_key : [division ], cron_jobs_key : [cron_job ]})
218
+ pool , worker = init_fixture_method (
219
+ ** {functions_key : [division ], cron_jobs_key : [cron_job ]}
220
+ )
156
221
157
222
events = capture_events ()
158
223
@@ -213,12 +278,17 @@ async def division(_, a, b=0):
213
278
214
279
215
280
@pytest .mark .parametrize ("source" , ["cls_functions" , "kw_functions" ])
281
+ @pytest .mark .parametrize (
282
+ "init_arq_settings" , ["init_arq" , "init_arq_with_dict_settings" ]
283
+ )
216
284
@pytest .mark .asyncio
217
- async def test_enqueue_job (capture_events , init_arq , source ):
285
+ async def test_enqueue_job (capture_events , init_arq_settings , source , request ):
218
286
async def dummy_job (_ ):
219
287
pass
220
288
221
- pool , _ = init_arq (** {source : [dummy_job ]})
289
+ init_fixture_method = request .getfixturevalue (init_arq_settings )
290
+
291
+ pool , _ = init_fixture_method (** {source : [dummy_job ]})
222
292
223
293
events = capture_events ()
224
294
@@ -236,13 +306,18 @@ async def dummy_job(_):
236
306
237
307
238
308
@pytest .mark .asyncio
239
- async def test_execute_job_without_integration (init_arq ):
309
+ @pytest .mark .parametrize (
310
+ "init_arq_settings" , ["init_arq" , "init_arq_with_dict_settings" ]
311
+ )
312
+ async def test_execute_job_without_integration (init_arq_settings , request ):
240
313
async def dummy_job (_ctx ):
241
314
pass
242
315
316
+ init_fixture_method = request .getfixturevalue (init_arq_settings )
317
+
243
318
dummy_job .__qualname__ = dummy_job .__name__
244
319
245
- pool , worker = init_arq ([dummy_job ])
320
+ pool , worker = init_fixture_method ([dummy_job ])
246
321
# remove the integration to trigger the edge case
247
322
get_client ().integrations .pop ("arq" )
248
323
@@ -254,12 +329,17 @@ async def dummy_job(_ctx):
254
329
255
330
256
331
@pytest .mark .parametrize ("source" , ["cls_functions" , "kw_functions" ])
332
+ @pytest .mark .parametrize (
333
+ "init_arq_settings" , ["init_arq" , "init_arq_with_dict_settings" ]
334
+ )
257
335
@pytest .mark .asyncio
258
- async def test_span_origin_producer (capture_events , init_arq , source ):
336
+ async def test_span_origin_producer (capture_events , init_arq_settings , source , request ):
259
337
async def dummy_job (_ ):
260
338
pass
261
339
262
- pool , _ = init_arq (** {source : [dummy_job ]})
340
+ init_fixture_method = request .getfixturevalue (init_arq_settings )
341
+
342
+ pool , _ = init_fixture_method (** {source : [dummy_job ]})
263
343
264
344
events = capture_events ()
265
345
@@ -272,13 +352,18 @@ async def dummy_job(_):
272
352
273
353
274
354
@pytest .mark .asyncio
275
- async def test_span_origin_consumer (capture_events , init_arq ):
355
+ @pytest .mark .parametrize (
356
+ "init_arq_settings" , ["init_arq" , "init_arq_with_dict_settings" ]
357
+ )
358
+ async def test_span_origin_consumer (capture_events , init_arq_settings , request ):
276
359
async def job (ctx ):
277
360
pass
278
361
362
+ init_fixture_method = request .getfixturevalue (init_arq_settings )
363
+
279
364
job .__qualname__ = job .__name__
280
365
281
- pool , worker = init_arq ([job ])
366
+ pool , worker = init_fixture_method ([job ])
282
367
283
368
job = await pool .enqueue_job ("retry_job" )
284
369
0 commit comments