Skip to content

Commit a7e7ba1

Browse files
committed
ft: separated Model class into subclasses TimeIndependent and TimeDependent, with corresponding methods related to their execution nature.
tests: adapted test to new Model subclasses tests: added test for TimeDependentModel with flat files, and expanded coverage to existing methods sty: formatted code to black -l 96, flake8 and pydocstring ft: created a ModelFactory class, which should handle the type of model instantiation. refac: changed TimeIndependentModel arg use_db to store_db rep: removed obsolete files build: homogenized dev requirements in setup.cfg and requirements_dev (used for tox) examples: removed requirement version of case_f/pymock dep: added new developer dependencies or linting/testing
1 parent 5795d21 commit a7e7ba1

25 files changed

+2200
-8264
lines changed

examples/case_e/models.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
- ALM:
22
path: models/gulia-wiemer.ALM.italy.10yr.2010-01-01.xml
33
forecast_unit: 10
4-
use_db: True
4+
store_db: True
55
- MPS04:
66
path: models/meletti.MPS04.italy.10yr.2010-01-01.xml
77
forecast_unit: 10
8-
use_db: True
8+
store_db: True
99
- TripleS-CPTI:
1010
path: models/zechar.TripleS-CPTI.italy.10yr.2010-01-01.xml
1111
forecast_unit: 10
12-
use_db: True
12+
store_db: True

examples/case_f/pymock/setup.cfg

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ url = https://git.gfz-potsdam.de/csep/it_experiment/models/pymock
1414
packages =
1515
pymock
1616
install_requires =
17-
numpy==1.23.4
18-
matplotlib==3.4.3
17+
numpy
18+
matplotlib
1919
python_requires = >=3.7
2020
zip_safe = no
2121

floatcsep/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,4 @@
77
from floatcsep import model
88
from floatcsep import readers
99

10-
__version__ = '0.1.3'
10+
__version__ = "0.1.4"

floatcsep/accessors.py

Lines changed: 100 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -14,25 +14,32 @@
1414
TIMEOUT = 180
1515

1616

17-
def query_gcmt(start_time, end_time, min_magnitude=5.0,
18-
max_depth=None,
19-
catalog_id=None,
20-
min_latitude=None, max_latitude=None,
21-
min_longitude=None, max_longitude=None):
22-
23-
eventlist = _query_gcmt(start_time=start_time,
24-
end_time=end_time,
25-
min_magnitude=min_magnitude,
26-
min_latitude=min_latitude,
27-
max_latitude=max_latitude,
28-
min_longitude=min_longitude,
29-
max_longitude=max_longitude,
30-
max_depth=max_depth)
31-
32-
catalog = CSEPCatalog(data=eventlist,
33-
name='gCMT',
34-
catalog_id=catalog_id,
35-
date_accessed=utc_now_datetime())
17+
def query_gcmt(
18+
start_time,
19+
end_time,
20+
min_magnitude=5.0,
21+
max_depth=None,
22+
catalog_id=None,
23+
min_latitude=None,
24+
max_latitude=None,
25+
min_longitude=None,
26+
max_longitude=None,
27+
):
28+
29+
eventlist = _query_gcmt(
30+
start_time=start_time,
31+
end_time=end_time,
32+
min_magnitude=min_magnitude,
33+
min_latitude=min_latitude,
34+
max_latitude=max_latitude,
35+
min_longitude=min_longitude,
36+
max_longitude=max_longitude,
37+
max_depth=max_depth,
38+
)
39+
40+
catalog = CSEPCatalog(
41+
data=eventlist, name="gCMT", catalog_id=catalog_id, date_accessed=utc_now_datetime()
42+
)
3643
return catalog
3744

3845

@@ -51,25 +58,23 @@ def from_zenodo(record_id, folder, force=False):
5158
5259
"""
5360
# Grab the urls and filenames and checksums
54-
r = requests.get(f"https://zenodo.org/api/records/{record_id}")
55-
download_urls = [f['links']['self'] for f in r.json()['files']]
56-
filenames = [(f['key'], f['checksum']) for f in r.json()['files']]
61+
r = requests.get(f"https://zenodo.org/api/records/{record_id}", timeout=3)
62+
download_urls = [f["links"]["self"] for f in r.json()["files"]]
63+
filenames = [(f["key"], f["checksum"]) for f in r.json()["files"]]
5764

5865
# Download and verify checksums
5966
for (fname, checksum), url in zip(filenames, download_urls):
6067
full_path = os.path.join(folder, fname)
6168
if os.path.exists(full_path):
6269
value, digest = _check_hash(full_path, checksum)
6370
if value != digest:
64-
print(
65-
f"Checksum is different: re-downloading {fname}"
66-
f" from Zenodo...")
71+
print(f"Checksum is different: re-downloading {fname}" f" from Zenodo...")
6772
_download_file(url, full_path)
6873
elif force:
6974
print(f"Re-downloading {fname} from Zenodo...")
7075
_download_file(url, full_path)
7176
else:
72-
print(f'Found file {fname}. Checksum OK.')
77+
print(f"Found file {fname}. Checksum OK.")
7378

7479
else:
7580
print(f"Downloading {fname} from Zenodo...")
@@ -96,24 +101,31 @@ def from_git(url, path, branch=None, depth=1, **kwargs):
96101
the pygit repository
97102
"""
98103

99-
kwargs.update({'depth': depth})
104+
kwargs.update({"depth": depth})
100105
git.refresh()
101106

102107
try:
103108
repo = git.Repo(path)
104109
except (git.NoSuchPathError, git.InvalidGitRepositoryError):
105110
repo = git.Repo.clone_from(url, path, branch=branch, **kwargs)
106-
git_dir = os.path.join(path, '.git')
111+
git_dir = os.path.join(path, ".git")
107112
if os.path.isdir(git_dir):
108113
shutil.rmtree(git_dir)
109114

110115
return repo
111116

112117

113-
def _query_gcmt(start_time, end_time, min_magnitude=3.50,
114-
min_latitude=None, max_latitude=None,
115-
min_longitude=None, max_longitude=None,
116-
max_depth=1000, extra_gcmt_params=None):
118+
def _query_gcmt(
119+
start_time,
120+
end_time,
121+
min_magnitude=3.50,
122+
min_latitude=None,
123+
max_latitude=None,
124+
min_longitude=None,
125+
max_longitude=None,
126+
max_depth=1000,
127+
extra_gcmt_params=None,
128+
):
117129
"""
118130
Return GCMT eventlist from IRIS web service.
119131
For details see "https://service.iris.edu/fdsnws/event/1/"
@@ -134,38 +146,44 @@ def _query_gcmt(start_time, end_time, min_magnitude=3.50,
134146
"""
135147
extra_gcmt_params = extra_gcmt_params or {}
136148

137-
eventlist = gcmt_search(minmagnitude=min_magnitude,
138-
minlatitude=min_latitude,
139-
maxlatitude=max_latitude,
140-
minlongitude=min_longitude,
141-
maxlongitude=max_longitude,
142-
starttime=start_time.isoformat(),
143-
endtime=end_time.isoformat(),
144-
maxdepth=max_depth, **extra_gcmt_params)
149+
eventlist = gcmt_search(
150+
minmagnitude=min_magnitude,
151+
minlatitude=min_latitude,
152+
maxlatitude=max_latitude,
153+
minlongitude=min_longitude,
154+
maxlongitude=max_longitude,
155+
starttime=start_time.isoformat(),
156+
endtime=end_time.isoformat(),
157+
maxdepth=max_depth,
158+
**extra_gcmt_params,
159+
)
145160

146161
return eventlist
147162

148-
def gcmt_search(format='text',
149-
starttime=None,
150-
endtime=None,
151-
updatedafter=None,
152-
minlatitude=None,
153-
maxlatitude=None,
154-
minlongitude=None,
155-
maxlongitude=None,
156-
latitude=None,
157-
longitude=None,
158-
maxradius=None,
159-
catalog='GCMT',
160-
contributor=None,
161-
maxdepth=1000,
162-
maxmagnitude=10.0,
163-
mindepth=-100,
164-
minmagnitude=0,
165-
offset=1,
166-
orderby='time-asc',
167-
host=None,
168-
verbose=False):
163+
164+
def gcmt_search(
165+
format="text",
166+
starttime=None,
167+
endtime=None,
168+
updatedafter=None,
169+
minlatitude=None,
170+
maxlatitude=None,
171+
minlongitude=None,
172+
maxlongitude=None,
173+
latitude=None,
174+
longitude=None,
175+
maxradius=None,
176+
catalog="GCMT",
177+
contributor=None,
178+
maxdepth=1000,
179+
maxmagnitude=10.0,
180+
mindepth=-100,
181+
minmagnitude=0,
182+
offset=1,
183+
orderby="time-asc",
184+
host=None,
185+
verbose=False,
186+
):
169187
"""Search the IRIS database for events matching input criteria.
170188
This search function is a wrapper around the ComCat Web API described here:
171189
https://service.iris.edu/fdsnws/event/1/
@@ -225,16 +243,16 @@ def gcmt_search(format='text',
225243

226244
for key, value in inputargs.items():
227245
if value is True:
228-
newargs[key] = 'true'
246+
newargs[key] = "true"
229247
continue
230248
if value is False:
231-
newargs[key] = 'false'
249+
newargs[key] = "false"
232250
continue
233251
if value is None:
234252
continue
235253
newargs[key] = value
236254

237-
del newargs['verbose']
255+
del newargs["verbose"]
238256

239257
events = _search_gcmt(**newargs)
240258

@@ -249,11 +267,11 @@ def _search_gcmt(**_newargs):
249267
paramstr = urlencode(_newargs)
250268
url = HOST_CATALOG + paramstr
251269
fh = request.urlopen(url, timeout=TIMEOUT)
252-
data = fh.read().decode('utf8').split('\n')
270+
data = fh.read().decode("utf8").split("\n")
253271
fh.close()
254272
eventlist = []
255273
for line in data[1:]:
256-
line_ = line.split('|')
274+
line_ = line.split("|")
257275
if len(line_) != 1:
258276
id_ = line_[0]
259277
time_ = datetime.fromisoformat(line_[1])
@@ -280,47 +298,47 @@ def _download_file(url: str, filename: str) -> None:
280298
progress_bar_length = 72
281299
block_size = 1024
282300

283-
r = requests.get(url, stream=True)
284-
total_size = r.headers.get('content-length', False)
301+
r = requests.get(url, timeout=3, stream=True)
302+
total_size = r.headers.get("content-length", False)
285303
if not total_size:
286304
with requests.head(url) as h:
287305
try:
288-
total_size = int(h.headers.get('Content-Length', 0))
306+
total_size = int(h.headers.get("Content-Length", 0))
289307
except TypeError:
290308
total_size = 0
291309
else:
292310
total_size = int(total_size)
293311
download_size = 0
294312
if total_size:
295-
print(
296-
f'Downloading file with size of {total_size / block_size:.3f} kB')
313+
print(f"Downloading file with size of {total_size / block_size:.3f} kB")
297314
else:
298-
print(f'Downloading file with unknown size')
299-
with open(filename, 'wb') as f:
315+
print(f"Downloading file with unknown size")
316+
with open(filename, "wb") as f:
300317
for data in r.iter_content(chunk_size=block_size):
301318
download_size += len(data)
302319
f.write(data)
303320
if total_size:
304-
progress = int(
305-
progress_bar_length * download_size / total_size)
321+
progress = int(progress_bar_length * download_size / total_size)
306322
sys.stdout.write(
307-
'\r[{}{}] {:.1f}%'.format('█' * progress, '.' *
308-
(progress_bar_length - progress),
309-
100 * download_size / total_size)
323+
"\r[{}{}] {:.1f}%".format(
324+
"█" * progress,
325+
"." * (progress_bar_length - progress),
326+
100 * download_size / total_size,
327+
)
310328
)
311329
sys.stdout.flush()
312-
sys.stdout.write('\n')
330+
sys.stdout.write("\n")
313331

314332

315333
def _check_hash(filename, checksum):
316334
"""
317335
Checks if existing file hash matches checksum from url
318336
"""
319-
algorithm, value = checksum.split(':')
337+
algorithm, value = checksum.split(":")
320338
if not os.path.exists(filename):
321-
return value, 'invalid'
339+
return value, "invalid"
322340
h = hashlib.new(algorithm)
323-
with open(filename, 'rb') as f:
341+
with open(filename, "rb") as f:
324342
while True:
325343
data = f.read(4096)
326344
if not data:

0 commit comments

Comments
 (0)