@@ -158,35 +158,39 @@ def maintainers_add_info(context):
158158 Given the active maintainers defined in the yaml file, it fetches
159159 the GitHub user information for them.
160160 """
161- timestamp = time .time ()
162-
163- cache_file = pathlib .Path ("maintainers.json" )
164- if cache_file .is_file ():
165- with open (cache_file ) as f :
166- context ["maintainers" ] = json .load (f )
167- # refresh cache after 1 hour
168- if (timestamp - context ["maintainers" ]["timestamp" ]) < 3_600 :
169- return context
170-
171- context ["maintainers" ]["timestamp" ] = timestamp
172-
173161 repeated = set (context ["maintainers" ]["active" ]) & set (
174162 context ["maintainers" ]["inactive" ]
175163 )
176164 if repeated :
177165 raise ValueError (f"Maintainers { repeated } are both active and inactive" )
178166
179- for kind in ("active" , "inactive" ):
180- context ["maintainers" ][f"{ kind } _with_github_info" ] = []
181- for user in context ["maintainers" ][kind ]:
182- resp = requests .get (f"https://api.github.com/users/{ user } " )
183- if context ["ignore_io_errors" ] and resp .status_code == 403 :
184- return context
185- resp .raise_for_status ()
186- context ["maintainers" ][f"{ kind } _with_github_info" ].append (resp .json ())
167+ maintainers_info = {}
168+ for user in (
169+ context ["maintainers" ]["active" ] + context ["maintainers" ]["inactive" ]
170+ ):
171+ resp = requests .get (f"https://api.github.com/users/{ user } " )
172+ if resp .status_code == 403 :
173+ sys .stderr .write (
174+ "WARN: GitHub API quota exceeded when fetching maintainers\n "
175+ )
176+ # if we exceed github api quota, we use the github info
177+ # of maintainers saved with the website
178+ resp_bkp = requests .get (
179+ context ["main" ]["production_url" ] + "maintainers.json"
180+ )
181+ resp_bkp .raise_for_status ()
182+ maintainers_info = resp_bkp .json ()
183+ break
184+
185+ resp .raise_for_status ()
186+ maintainers_info [user ] = resp .json ()
187187
188- with open (cache_file , "w" ) as f :
189- json .dump (context ["maintainers" ], f )
188+ context ["maintainers" ]["github_info" ] = maintainers_info
189+
190+ # save the data fetched from github to use it in case we exceed
191+ # git github api quota in the future
192+ with open (pathlib .Path (context ["target_path" ]) / "maintainers.json" , "w" ) as f :
193+ json .dump (maintainers_info , f )
190194
191195 return context
192196
@@ -196,11 +200,19 @@ def home_add_releases(context):
196200
197201 github_repo_url = context ["main" ]["github_repo_url" ]
198202 resp = requests .get (f"https://api.github.com/repos/{ github_repo_url } /releases" )
199- if context ["ignore_io_errors" ] and resp .status_code == 403 :
200- return context
201- resp .raise_for_status ()
203+ if resp .status_code == 403 :
204+ sys .stderr .write ("WARN: GitHub API quota exceeded when fetching releases\n " )
205+ resp_bkp = requests .get (context ["main" ]["production_url" ] + "releases.json" )
206+ resp_bkp .raise_for_status ()
207+ releases = resp_bkp .json ()
208+ else :
209+ resp .raise_for_status ()
210+ releases = resp .json ()
202211
203- for release in resp .json ():
212+ with open (pathlib .Path (context ["target_path" ]) / "releases.json" , "w" ) as f :
213+ json .dump (releases , f , default = datetime .datetime .isoformat )
214+
215+ for release in releases :
204216 if release ["prerelease" ]:
205217 continue
206218 published = datetime .datetime .strptime (
@@ -218,6 +230,7 @@ def home_add_releases(context):
218230 ),
219231 }
220232 )
233+
221234 return context
222235
223236 @staticmethod
@@ -264,12 +277,20 @@ def roadmap_pdeps(context):
264277 "https://api.github.com/search/issues?"
265278 f"q=is:pr is:open label:PDEP repo:{ github_repo_url } "
266279 )
267- if context ["ignore_io_errors" ] and resp .status_code == 403 :
268- return context
269- resp .raise_for_status ()
280+ if resp .status_code == 403 :
281+ sys .stderr .write ("WARN: GitHub API quota exceeded when fetching pdeps\n " )
282+ resp_bkp = requests .get (context ["main" ]["production_url" ] + "pdeps.json" )
283+ resp_bkp .raise_for_status ()
284+ pdeps = resp_bkp .json ()
285+ else :
286+ resp .raise_for_status ()
287+ pdeps = resp .json ()
288+
289+ with open (pathlib .Path (context ["target_path" ]) / "pdeps.json" , "w" ) as f :
290+ json .dump (pdeps , f )
270291
271- for pdep in resp . json () ["items" ]:
272- context ["pdeps" ]["under_discussion " ].append (
292+ for pdep in pdeps ["items" ]:
293+ context ["pdeps" ]["Under discussion " ].append (
273294 {"title" : pdep ["title" ], "url" : pdep ["url" ]}
274295 )
275296
@@ -302,7 +323,7 @@ def get_callable(obj_as_str: str) -> object:
302323 return obj
303324
304325
305- def get_context (config_fname : str , ignore_io_errors : bool , ** kwargs ):
326+ def get_context (config_fname : str , ** kwargs ):
306327 """
307328 Load the config yaml as the base context, and enrich it with the
308329 information added by the context preprocessors defined in the file.
@@ -311,7 +332,6 @@ def get_context(config_fname: str, ignore_io_errors: bool, **kwargs):
311332 context = yaml .safe_load (f )
312333
313334 context ["source_path" ] = os .path .dirname (config_fname )
314- context ["ignore_io_errors" ] = ignore_io_errors
315335 context .update (kwargs )
316336
317337 preprocessors = (
@@ -349,7 +369,9 @@ def extend_base_template(content: str, base_template: str) -> str:
349369
350370
351371def main (
352- source_path : str , target_path : str , base_url : str , ignore_io_errors : bool
372+ source_path : str ,
373+ target_path : str ,
374+ base_url : str ,
353375) -> int :
354376 """
355377 Copy every file in the source directory to the target directory.
@@ -363,7 +385,7 @@ def main(
363385 os .makedirs (target_path , exist_ok = True )
364386
365387 sys .stderr .write ("Generating context...\n " )
366- context = get_context (config_fname , ignore_io_errors , base_url = base_url )
388+ context = get_context (config_fname , base_url = base_url , target_path = target_path )
367389 sys .stderr .write ("Context generated\n " )
368390
369391 templates_path = os .path .join (source_path , context ["main" ]["templates_path" ])
@@ -407,15 +429,5 @@ def main(
407429 parser .add_argument (
408430 "--base-url" , default = "" , help = "base url where the website is served from"
409431 )
410- parser .add_argument (
411- "--ignore-io-errors" ,
412- action = "store_true" ,
413- help = "do not fail if errors happen when fetching "
414- "data from http sources, and those fail "
415- "(mostly useful to allow github quota errors "
416- "when running the script locally)" ,
417- )
418432 args = parser .parse_args ()
419- sys .exit (
420- main (args .source_path , args .target_path , args .base_url , args .ignore_io_errors )
421- )
433+ sys .exit (main (args .source_path , args .target_path , args .base_url ))
0 commit comments