14
14
TIMEOUT = 180
15
15
16
16
17
- def query_gcmt (start_time , end_time , min_magnitude = 5.0 ,
18
- max_depth = None ,
19
- catalog_id = None ,
20
- min_latitude = None , max_latitude = None ,
21
- min_longitude = None , max_longitude = None ):
22
-
23
- eventlist = _query_gcmt (start_time = start_time ,
24
- end_time = end_time ,
25
- min_magnitude = min_magnitude ,
26
- min_latitude = min_latitude ,
27
- max_latitude = max_latitude ,
28
- min_longitude = min_longitude ,
29
- max_longitude = max_longitude ,
30
- max_depth = max_depth )
31
-
32
- catalog = CSEPCatalog (data = eventlist ,
33
- name = 'gCMT' ,
34
- catalog_id = catalog_id ,
35
- date_accessed = utc_now_datetime ())
17
+ def query_gcmt (
18
+ start_time ,
19
+ end_time ,
20
+ min_magnitude = 5.0 ,
21
+ max_depth = None ,
22
+ catalog_id = None ,
23
+ min_latitude = None ,
24
+ max_latitude = None ,
25
+ min_longitude = None ,
26
+ max_longitude = None ,
27
+ ):
28
+
29
+ eventlist = _query_gcmt (
30
+ start_time = start_time ,
31
+ end_time = end_time ,
32
+ min_magnitude = min_magnitude ,
33
+ min_latitude = min_latitude ,
34
+ max_latitude = max_latitude ,
35
+ min_longitude = min_longitude ,
36
+ max_longitude = max_longitude ,
37
+ max_depth = max_depth ,
38
+ )
39
+
40
+ catalog = CSEPCatalog (
41
+ data = eventlist , name = "gCMT" , catalog_id = catalog_id , date_accessed = utc_now_datetime ()
42
+ )
36
43
return catalog
37
44
38
45
@@ -51,25 +58,23 @@ def from_zenodo(record_id, folder, force=False):
51
58
52
59
"""
53
60
# Grab the urls and filenames and checksums
54
- r = requests .get (f"https://zenodo.org/api/records/{ record_id } " )
55
- download_urls = [f [' links' ][ ' self' ] for f in r .json ()[' files' ]]
56
- filenames = [(f [' key' ], f [' checksum' ]) for f in r .json ()[' files' ]]
61
+ r = requests .get (f"https://zenodo.org/api/records/{ record_id } " , timeout = 3 )
62
+ download_urls = [f [" links" ][ " self" ] for f in r .json ()[" files" ]]
63
+ filenames = [(f [" key" ], f [" checksum" ]) for f in r .json ()[" files" ]]
57
64
58
65
# Download and verify checksums
59
66
for (fname , checksum ), url in zip (filenames , download_urls ):
60
67
full_path = os .path .join (folder , fname )
61
68
if os .path .exists (full_path ):
62
69
value , digest = _check_hash (full_path , checksum )
63
70
if value != digest :
64
- print (
65
- f"Checksum is different: re-downloading { fname } "
66
- f" from Zenodo..." )
71
+ print (f"Checksum is different: re-downloading { fname } " f" from Zenodo..." )
67
72
_download_file (url , full_path )
68
73
elif force :
69
74
print (f"Re-downloading { fname } from Zenodo..." )
70
75
_download_file (url , full_path )
71
76
else :
72
- print (f' Found file { fname } . Checksum OK.' )
77
+ print (f" Found file { fname } . Checksum OK." )
73
78
74
79
else :
75
80
print (f"Downloading { fname } from Zenodo..." )
@@ -96,24 +101,31 @@ def from_git(url, path, branch=None, depth=1, **kwargs):
96
101
the pygit repository
97
102
"""
98
103
99
- kwargs .update ({' depth' : depth })
104
+ kwargs .update ({" depth" : depth })
100
105
git .refresh ()
101
106
102
107
try :
103
108
repo = git .Repo (path )
104
109
except (git .NoSuchPathError , git .InvalidGitRepositoryError ):
105
110
repo = git .Repo .clone_from (url , path , branch = branch , ** kwargs )
106
- git_dir = os .path .join (path , ' .git' )
111
+ git_dir = os .path .join (path , " .git" )
107
112
if os .path .isdir (git_dir ):
108
113
shutil .rmtree (git_dir )
109
114
110
115
return repo
111
116
112
117
113
- def _query_gcmt (start_time , end_time , min_magnitude = 3.50 ,
114
- min_latitude = None , max_latitude = None ,
115
- min_longitude = None , max_longitude = None ,
116
- max_depth = 1000 , extra_gcmt_params = None ):
118
+ def _query_gcmt (
119
+ start_time ,
120
+ end_time ,
121
+ min_magnitude = 3.50 ,
122
+ min_latitude = None ,
123
+ max_latitude = None ,
124
+ min_longitude = None ,
125
+ max_longitude = None ,
126
+ max_depth = 1000 ,
127
+ extra_gcmt_params = None ,
128
+ ):
117
129
"""
118
130
Return GCMT eventlist from IRIS web service.
119
131
For details see "https://service.iris.edu/fdsnws/event/1/"
@@ -134,38 +146,44 @@ def _query_gcmt(start_time, end_time, min_magnitude=3.50,
134
146
"""
135
147
extra_gcmt_params = extra_gcmt_params or {}
136
148
137
- eventlist = gcmt_search (minmagnitude = min_magnitude ,
138
- minlatitude = min_latitude ,
139
- maxlatitude = max_latitude ,
140
- minlongitude = min_longitude ,
141
- maxlongitude = max_longitude ,
142
- starttime = start_time .isoformat (),
143
- endtime = end_time .isoformat (),
144
- maxdepth = max_depth , ** extra_gcmt_params )
149
+ eventlist = gcmt_search (
150
+ minmagnitude = min_magnitude ,
151
+ minlatitude = min_latitude ,
152
+ maxlatitude = max_latitude ,
153
+ minlongitude = min_longitude ,
154
+ maxlongitude = max_longitude ,
155
+ starttime = start_time .isoformat (),
156
+ endtime = end_time .isoformat (),
157
+ maxdepth = max_depth ,
158
+ ** extra_gcmt_params ,
159
+ )
145
160
146
161
return eventlist
147
162
148
- def gcmt_search (format = 'text' ,
149
- starttime = None ,
150
- endtime = None ,
151
- updatedafter = None ,
152
- minlatitude = None ,
153
- maxlatitude = None ,
154
- minlongitude = None ,
155
- maxlongitude = None ,
156
- latitude = None ,
157
- longitude = None ,
158
- maxradius = None ,
159
- catalog = 'GCMT' ,
160
- contributor = None ,
161
- maxdepth = 1000 ,
162
- maxmagnitude = 10.0 ,
163
- mindepth = - 100 ,
164
- minmagnitude = 0 ,
165
- offset = 1 ,
166
- orderby = 'time-asc' ,
167
- host = None ,
168
- verbose = False ):
163
+
164
+ def gcmt_search (
165
+ format = "text" ,
166
+ starttime = None ,
167
+ endtime = None ,
168
+ updatedafter = None ,
169
+ minlatitude = None ,
170
+ maxlatitude = None ,
171
+ minlongitude = None ,
172
+ maxlongitude = None ,
173
+ latitude = None ,
174
+ longitude = None ,
175
+ maxradius = None ,
176
+ catalog = "GCMT" ,
177
+ contributor = None ,
178
+ maxdepth = 1000 ,
179
+ maxmagnitude = 10.0 ,
180
+ mindepth = - 100 ,
181
+ minmagnitude = 0 ,
182
+ offset = 1 ,
183
+ orderby = "time-asc" ,
184
+ host = None ,
185
+ verbose = False ,
186
+ ):
169
187
"""Search the IRIS database for events matching input criteria.
170
188
This search function is a wrapper around the ComCat Web API described here:
171
189
https://service.iris.edu/fdsnws/event/1/
@@ -225,16 +243,16 @@ def gcmt_search(format='text',
225
243
226
244
for key , value in inputargs .items ():
227
245
if value is True :
228
- newargs [key ] = ' true'
246
+ newargs [key ] = " true"
229
247
continue
230
248
if value is False :
231
- newargs [key ] = ' false'
249
+ newargs [key ] = " false"
232
250
continue
233
251
if value is None :
234
252
continue
235
253
newargs [key ] = value
236
254
237
- del newargs [' verbose' ]
255
+ del newargs [" verbose" ]
238
256
239
257
events = _search_gcmt (** newargs )
240
258
@@ -249,11 +267,11 @@ def _search_gcmt(**_newargs):
249
267
paramstr = urlencode (_newargs )
250
268
url = HOST_CATALOG + paramstr
251
269
fh = request .urlopen (url , timeout = TIMEOUT )
252
- data = fh .read ().decode (' utf8' ).split (' \n ' )
270
+ data = fh .read ().decode (" utf8" ).split (" \n " )
253
271
fh .close ()
254
272
eventlist = []
255
273
for line in data [1 :]:
256
- line_ = line .split ('|' )
274
+ line_ = line .split ("|" )
257
275
if len (line_ ) != 1 :
258
276
id_ = line_ [0 ]
259
277
time_ = datetime .fromisoformat (line_ [1 ])
@@ -280,47 +298,47 @@ def _download_file(url: str, filename: str) -> None:
280
298
progress_bar_length = 72
281
299
block_size = 1024
282
300
283
- r = requests .get (url , stream = True )
284
- total_size = r .headers .get (' content-length' , False )
301
+ r = requests .get (url , timeout = 3 , stream = True )
302
+ total_size = r .headers .get (" content-length" , False )
285
303
if not total_size :
286
304
with requests .head (url ) as h :
287
305
try :
288
- total_size = int (h .headers .get (' Content-Length' , 0 ))
306
+ total_size = int (h .headers .get (" Content-Length" , 0 ))
289
307
except TypeError :
290
308
total_size = 0
291
309
else :
292
310
total_size = int (total_size )
293
311
download_size = 0
294
312
if total_size :
295
- print (
296
- f'Downloading file with size of { total_size / block_size :.3f} kB' )
313
+ print (f"Downloading file with size of { total_size / block_size :.3f} kB" )
297
314
else :
298
- print (f' Downloading file with unknown size' )
299
- with open (filename , 'wb' ) as f :
315
+ print (f" Downloading file with unknown size" )
316
+ with open (filename , "wb" ) as f :
300
317
for data in r .iter_content (chunk_size = block_size ):
301
318
download_size += len (data )
302
319
f .write (data )
303
320
if total_size :
304
- progress = int (
305
- progress_bar_length * download_size / total_size )
321
+ progress = int (progress_bar_length * download_size / total_size )
306
322
sys .stdout .write (
307
- '\r [{}{}] {:.1f}%' .format ('█' * progress , '.' *
308
- (progress_bar_length - progress ),
309
- 100 * download_size / total_size )
323
+ "\r [{}{}] {:.1f}%" .format (
324
+ "█" * progress ,
325
+ "." * (progress_bar_length - progress ),
326
+ 100 * download_size / total_size ,
327
+ )
310
328
)
311
329
sys .stdout .flush ()
312
- sys .stdout .write (' \n ' )
330
+ sys .stdout .write (" \n " )
313
331
314
332
315
333
def _check_hash (filename , checksum ):
316
334
"""
317
335
Checks if existing file hash matches checksum from url
318
336
"""
319
- algorithm , value = checksum .split (':' )
337
+ algorithm , value = checksum .split (":" )
320
338
if not os .path .exists (filename ):
321
- return value , ' invalid'
339
+ return value , " invalid"
322
340
h = hashlib .new (algorithm )
323
- with open (filename , 'rb' ) as f :
341
+ with open (filename , "rb" ) as f :
324
342
while True :
325
343
data = f .read (4096 )
326
344
if not data :
0 commit comments