Skip to content

Commit d6adb39

Browse files
⚙️ Availability check to skip task when Redshift is already in the desired state
1 parent 44be838 commit d6adb39

File tree

1 file changed

+7
-2
lines changed

1 file changed

+7
-2
lines changed

dags/utils.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ def _attach_datetime(filename: str, destination: str):
3030
now = datetime.utcnow()
3131
todays_day = now.weekday()
3232

33-
if todays_day == 0:
33+
if todays_day:
3434
return
3535

3636
df = pd.read_csv(filename)
@@ -103,6 +103,9 @@ def _resume_redshift_cluster(cluster_identifier: str):
103103
cluster_state = redshift_hook.cluster_status(cluster_identifier=cluster_identifier)
104104

105105
try:
106+
if cluster_state == "available":
107+
return
108+
106109
redshift_hook.get_conn().resume_cluster(ClusterIdentifier=cluster_identifier)
107110
while cluster_state != "available":
108111
time.sleep(1)
@@ -128,11 +131,13 @@ def _pause_redshift_cluster(cluster_identifier: str):
128131
AirflowException: should fail the pipeline, and (possibly?) send an
129132
alert to notify that your money is leaking.
130133
"""
131-
132134
redshift_hook = RedshiftHook()
133135
cluster_state = redshift_hook.cluster_status(cluster_identifier=cluster_identifier)
134136

135137
try:
138+
if cluster_state == 'paused':
139+
return
140+
136141
redshift_hook.get_conn().pause_cluster(ClusterIdentifier=cluster_identifier)
137142
except Exception as ex:
138143
logging.warning(

0 commit comments

Comments
 (0)