22from datetime import timedelta
33
44from temporalio import workflow
5+ from temporalio .common import RetryPolicy
56
67with workflow .unsafe .imports_passed_through ():
78 from hivemind_etl .mediawiki .activities import (
@@ -31,6 +32,10 @@ async def run(self, platform_id: str | None = None) -> None:
3132 get_hivemind_mediawiki_platforms ,
3233 platform_id ,
3334 start_to_close_timeout = timedelta (minutes = 1 ),
35+ retry_policy = RetryPolicy (
36+ initial_interval = timedelta (minutes = 1 ),
37+ maximum_attempts = 3 ,
38+ ),
3439 )
3540
3641 for platform in platforms :
@@ -45,13 +50,21 @@ async def run(self, platform_id: str | None = None) -> None:
4550 extract_mediawiki ,
4651 mediawiki_platform ,
4752 start_to_close_timeout = timedelta (days = 5 ),
53+ retry_policy = RetryPolicy (
54+ initial_interval = timedelta (minutes = 1 ),
55+ maximum_attempts = 3 ,
56+ ),
4857 )
4958
5059 # Transform the extracted data
5160 documents = await workflow .execute_activity (
5261 transform_mediawiki_data ,
5362 platform ["community_id" ],
5463 start_to_close_timeout = timedelta (minutes = 30 ),
64+ retry_policy = RetryPolicy (
65+ initial_interval = timedelta (minutes = 1 ),
66+ maximum_attempts = 3 ,
67+ ),
5568 )
5669
5770 # Load the transformed data
@@ -60,6 +73,10 @@ async def run(self, platform_id: str | None = None) -> None:
6073 documents ,
6174 platform ["community_id" ],
6275 start_to_close_timeout = timedelta (minutes = 30 ),
76+ retry_policy = RetryPolicy (
77+ initial_interval = timedelta (minutes = 1 ),
78+ maximum_attempts = 3 ,
79+ ),
6380 )
6481
6582 logging .info (
0 commit comments