Skip to content

routed spatial_counts and spatial_magnitude_counts through region class #122

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 19, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 27 additions & 46 deletions csep/core/catalogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -662,15 +662,12 @@ def spatial_counts(self):
if self.region is None:
raise CSEPSchedulerException("Cannot create binned rates without region information.")

# todo: this should be routed through self.region to allow for different types of regions
output = regions._bin_catalog_spatial_counts(self.get_longitudes(),
self.get_latitudes(),
self.region.num_nodes,
self.region.bbox_mask,
self.region.idx_map,
self.region.xs,
self.region.ys)
return output
n_poly = self.region.num_nodes
event_counts = numpy.zeros(n_poly)
# this function could throw ValueError if points are outside of the region
idx = self.region.get_index_of(self.get_longitudes(), self.get_latitudes())
numpy.add.at(event_counts, idx, 1)
return event_counts

def spatial_event_probability(self):
# make sure region is specified with catalog
Expand All @@ -680,14 +677,11 @@ def spatial_event_probability(self):
if self.region is None:
raise CSEPSchedulerException("Cannot create binned probabilities without region information.")

output = regions._bin_catalog_probability(self.get_longitudes(),
self.get_latitudes(),
len(self.region.polygons),
self.region.bbox_mask,
self.region.idx_map,
self.region.xs,
self.region.ys)
return output
n_poly = self.region.num_nodes
event_flag = numpy.zeros(n_poly)
idx = self.region.get_index_of(self.get_longitudes(), self.get_latitudes())
event_flag[idx] = 1
return event_flag

def magnitude_counts(self, mag_bins=None, tol=0.00001, retbins=False):
""" Computes the count of events within mag_bins
Expand Down Expand Up @@ -724,16 +718,14 @@ def magnitude_counts(self, mag_bins=None, tol=0.00001, retbins=False):
else:
return out

def spatial_magnitude_counts(self, mag_bins=None, tol=0.00001, ret_skipped=False):
def spatial_magnitude_counts(self, mag_bins=None, tol=0.00001):
""" Return counts of events in space-magnitude region.

We figure out the index of the polygons and create a map that relates the spatial coordinate in the
Cartesian grid with with the polygon in region.

Args:
mag_bins: magnitude bins (optional). tries to use magnitue bins associated with region
ret_skipped (bool): if true, will return list of (lon, lat, mw) tuple of skipped points


Returns:
output: unnormalized event count in each bin, 1d ndarray where index corresponds to midpoints
Expand All @@ -747,33 +739,22 @@ def spatial_magnitude_counts(self, mag_bins=None, tol=0.00001, ret_skipped=False
if self.region.magnitudes is None and mag_bins is None:
raise CSEPCatalogException("Region must have magnitudes or mag_bins must be defined to "
"compute space magnitude binning.")

# prefer user supplied mag_bins
if mag_bins is None:
mag_bins = self.region.magnitudes
# short-circuit if zero-events in catalog... return array of zeros
if self.event_count == 0:
n_poly = self.region.num_nodes
n_mws = self.region.num_mag_bins
output = numpy.zeros((n_poly, n_mws))
skipped = []
else:
if mag_bins is None:
mag_bins = self.region.magnitudes

# compute if not
# todo: this should be routed through self.region to allow for different types of regions
output, skipped = regions._bin_catalog_spatio_magnitude_counts(self.get_longitudes(),
self.get_latitudes(),
self.get_magnitudes(),
self.region.num_nodes,
self.region.bbox_mask,
self.region.idx_map,
self.region.xs,
self.region.ys,
mag_bins,
tol=tol)
if ret_skipped:
return output, skipped
else:
return output
n_poly = self.region.num_nodes
event_counts = numpy.zeros((n_poly, len(mag_bins)))
if self.event_count != 0:
# this will throw ValueError if points outside range
spatial_idx = self.region.get_index_of(self.get_longitudes(), self.get_latitudes())
# also throwing the same error
mag_idx = bin1d_vec(self.get_magnitudes(), mag_bins, tol=tol, right_continuous=True)
for idx in range(spatial_idx.shape[0]):
if mag_idx[idx] == -1:
raise ValueError("at least one magnitude value outside of the valid region.")
event_counts[(spatial_idx[idx], mag_idx[idx])] += 1
return event_counts

def length_in_seconds(self):
""" Returns catalog length in seconds assuming that the catalog is sorted by time. """
Expand Down
4 changes: 3 additions & 1 deletion csep/utils/calc.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,13 @@ def bin1d_vec(p, bins, tol=None, right_continuous=False):
if right_continuous:
# set upper bin index to last
try:
idx[(idx >= len(bins) - 1)] = len(bins) - 1
idx[(idx < 0)] = -1
idx[(idx >= len(bins) - 1)] = len(bins) - 1
except (TypeError):
if idx >= len(bins) - 1:
idx = len(bins) - 1
if idx < 0:
idx = -1
else:
try:
idx[((idx < 0) | (idx >= len(bins)))] = -1
Expand Down
15 changes: 15 additions & 0 deletions tests/test_calc.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,3 +126,18 @@ def test_less_and_greater_than(self):
expected = [-1, 3, -1]
self.assertListEqual(test.tolist(), expected)

def test_scalar_outside(self):
from csep.utils.calc import bin1d_vec
mbins = numpy.arange(5.95, 9, 0.1) # This gives bins from 5.95 to 8.95
idx = bin1d_vec(5.95, mbins, tol=0.00001, right_continuous=True)
self.assertEqual(idx, 0)

idx = bin1d_vec(6, mbins, tol=0.00001, right_continuous=True) # This would give 0: Which is fine.
self.assertEqual(idx, 0)

idx = bin1d_vec(5, mbins, tol=0.00001, right_continuous=True)
self.assertEqual(idx, -1)

idx = bin1d_vec(4, mbins, tol=0.00001, right_continuous=True)
self.assertEqual(idx, -1)

80 changes: 76 additions & 4 deletions tests/test_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import csep
from csep.core import regions, forecasts
from csep.utils.time_utils import strptime_to_utc_epoch, strptime_to_utc_datetime
from csep.core.catalogs import CSEPCatalog
from csep.core.catalogs import CSEPCatalog, AbstractBaseCatalog
from csep.core.regions import CartesianGrid2D, Polygon, compute_vertices

def comcat_path():
Expand All @@ -17,6 +17,20 @@ def comcat_path():
'test_catalog.csv')
return data_dir

class MockCatalog(AbstractBaseCatalog):

def __init__(self):
super().__init__()

def get_number_of_events(self):
return len(self.get_latitudes())

def get_longitudes(self):
return numpy.array([0.05, 0.15])

def get_latitudes(self):
return numpy.array([0.15, 0.05])

class CatalogFiltering(unittest.TestCase):
def setUp(self):

Expand Down Expand Up @@ -108,7 +122,7 @@ def test_catalog_binning_and_filtering_acceptance(self):
)

# read catalog
comcat = csep.load_catalog(comcat_path(), region=region)
comcat = csep.load_catalog(comcat_path(), region=region).filter(f"magnitude >= 4.5")

# create data set from data set
d = forecasts.MarkedGriddedDataSet(
Expand All @@ -119,6 +133,7 @@ def test_catalog_binning_and_filtering_acceptance(self):

for idm, m_min in enumerate(d.magnitudes):
# catalog filtered cumulative
print(m_min)
c = comcat.filter([f'magnitude >= {m_min}'], in_place=False)
# catalog filtered incrementally
c_int = comcat.filter([f'magnitude >= {m_min}', f'magnitude < {m_min + 0.1}'], in_place=False)
Expand All @@ -127,9 +142,66 @@ def test_catalog_binning_and_filtering_acceptance(self):
# incremental counts
gs_int = d.data[:, idm].sum()
# event count from filtered catalog and events in binned data should be the same
numpy.testing.assert_almost_equal(gs, c.event_count)
numpy.testing.assert_almost_equal(gs_int, c_int.event_count)
numpy.testing.assert_equal(gs, c.event_count)
numpy.testing.assert_equal(gs_int, c_int.event_count)

def test_bin_spatial_counts(self):
""" this will test both good and bad points within the region.

1) 2 inside the domain
2) outside the bbox but not in domain
3) completely outside the domain

we will check that only 1 event is placed in the grid and ensure its location is correct.
"""
# create some arbitrary grid
nx = 8
ny = 10
dh = 0.1
x_points = numpy.arange(nx) * dh
y_points = numpy.arange(ny) * dh
origins = list(itertools.product(x_points, y_points))
# grid is missing first and last block
origins.pop(0)
origins.pop(-1)
cart_grid = CartesianGrid2D(
[Polygon(bbox) for bbox in compute_vertices(origins, dh)],
dh
)
catalog = MockCatalog()
catalog.region = cart_grid
test_result = catalog.spatial_counts()

# we have tested 2 inside the domain
self.assertEqual(numpy.sum(test_result), 2)
# we know that (0.05, 0.15) corresponds to index 0
self.assertEqual(test_result[0], 1)
# we know that (0.15, 0.05) corresponds too index 9
self.assertEqual(test_result[9], 1)

def test_bin_probability(self):
# we have tested 2 inside the domain
nx = 8
ny = 10
dh = 0.1
x_points = numpy.arange(nx) * dh
y_points = numpy.arange(ny) * dh
origins = list(itertools.product(x_points, y_points))
# grid is missing first and last block
origins.pop(0)
origins.pop(-1)
cart_grid = CartesianGrid2D(
[Polygon(bbox) for bbox in compute_vertices(origins, dh)],
dh
)
catalog = MockCatalog()
catalog.region = cart_grid
test_result = catalog.spatial_event_probability()
self.assertEqual(numpy.sum(test_result), 2)

# we know that (0.05, 0.15) corresponds to index 0 from the above test
self.assertEqual(test_result[0], 1)
self.assertEqual(test_result[9], 1)

if __name__ == '__main__':
unittest.main()