Skip to content

Commit

Permalink
add support for DES nightsum-like plots
Browse files Browse the repository at this point in the history
  • Loading branch information
ehneilsen committed Feb 27, 2024
1 parent 470e267 commit d82e5fc
Show file tree
Hide file tree
Showing 24 changed files with 1,150 additions and 26 deletions.
1 change: 1 addition & 0 deletions schedview/collect/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"read_scheduler",
"sample_pickle",
"load_bright_stars",
"read_ddf_visits",
]

from .footprint import get_footprint
Expand Down
2 changes: 1 addition & 1 deletion schedview/collect/local.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from .metrics import get_metric_path # noqa F401
from .opsim import read_opsim # noqa F401
from .opsim import read_ddf_visits, read_opsim # noqa F401
from .scheduler_pickle import read_scheduler # noqa F401
from .scheduler_pickle import sample_pickle # noqa F401
from .stars import load_bright_stars # noqa F401
175 changes: 159 additions & 16 deletions schedview/collect/opsim.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,99 @@
import sqlite3

import numpy as np
import pandas as pd
import yaml
from astropy.time import Time
from lsst.resources import ResourcePath
from rubin_scheduler.utils import ddf_locations
from rubin_sim import maf

DEFAULT_VISITS_COLUMNS = [
"observationId",
"fieldRA",
"fieldDec",
"observationStartMJD",
"flush_by_mjd",
"visitExposureTime",
"filter",
"rotSkyPos",
"rotSkyPos_desired",
"numExposures",
"airmass",
"seeingFwhm500",
"seeingFwhmEff",
"seeingFwhmGeom",
"skyBrightness",
"night",
"slewTime",
"visitTime",
"slewDistance",
"fiveSigmaDepth",
"altitude",
"azimuth",
"paraAngle",
"cloud",
"moonAlt",
"sunAlt",
"note",
"target",
"fieldId",
"proposalId",
"block_id",
"observationStartLST",
"rotTelPos",
"rotTelPos_backup",
"moonAz",
"sunAz",
"sunRA",
"sunDec",
"moonRA",
"moonDec",
"moonDistance",
"solarElong",
"moonPhase",
"cummTelAz",
"scripted_id",
]

def read_opsim(opsim_uri, start_time="2000-01-01", end_time="2100-01-01"):

class StartDateStacker(maf.BaseStacker):
"""Add the start date."""

cols_added = ["start_date"]

def __init__(self, start_mjd_col="observationStartMJD"):
self.units = "ns"
self.cols_req = [start_mjd_col]
self.start_mjd_col = start_mjd_col

def _run(self, sim_data, cols_present=False):
"""The start date as a datetime."""
if cols_present:
# Column already present in data; assume it is correct and does not
# need recalculating.
return sim_data
if len(sim_data) == 0:
return sim_data

sim_data["start_date"] = pd.to_datetime(
sim_data[self.start_mjd_col] + 2400000.5, origin="julian", unit="D", utc=True
)

return sim_data


DEFAULT_STACKERS = [maf.HourAngleStacker(), StartDateStacker()]


def read_opsim(
opsim_uri,
start_time=None,
end_time=None,
constraint=None,
dbcols=DEFAULT_VISITS_COLUMNS,
stackers=DEFAULT_STACKERS,
**kwargs,
):
"""Read visits from an opsim database.
Parameters
Expand All @@ -18,14 +104,36 @@ def read_opsim(opsim_uri, start_time="2000-01-01", end_time="2100-01-01"):
The start time for visits to be loaded
end_time : `str`, `astropy.time.Time`
The end time for visits ot be loaded
constraint : `str`, None
Query for which visits to load.
dbcols : `list` [`str`]
Columns required from the database.
stackers : `list` [`rubin_sim.maf.stackers`], optional
Stackers to be used to generate additional columns.
Returns
-------
visits : `pandas.DataFrame`
The visits and their parameters.
"""
start_mjd = Time(start_time).mjd
end_mjd = Time(end_time).mjd

# Add constraints corresponding to quested start and end times
if (start_time is not None) or (end_time is not None):
if constraint is None:
constraint = ""

if start_time is not None:
if len(constraint) > 0:
constraint += " AND "
constraint += f"(observationStartMJD >= {Time(start_time).mjd})"

if end_time is not None:
if len(constraint) > 0:
constraint += " AND "
constraint += f"(observationStartMJD <= {Time(end_time).mjd})"

if stackers is not None and len(stackers) > 0:
kwargs["stackers"] = stackers

original_resource_path = ResourcePath(opsim_uri)

Expand All @@ -42,18 +150,53 @@ def read_opsim(opsim_uri, start_time="2000-01-01", end_time="2100-01-01"):

with obs_path.as_local() as local_obs_path:
with sqlite3.connect(local_obs_path.ospath) as sim_connection:
visits = pd.read_sql_query(
f"SELECT * FROM observations WHERE observationStartMJD BETWEEN {start_mjd} AND {end_mjd}",
sim_connection,
index_col="observationId",
)

visits["start_date"] = pd.to_datetime(
visits["observationStartMJD"] + 2400000.5, origin="julian", unit="D", utc=True
)
visits = pd.DataFrame(maf.get_sim_data(sim_connection, constraint, dbcols, **kwargs))

if "start_date" in visits:
visits["start_date"] = pd.to_datetime(visits.start_date, unit="ns", utc=True)

if "HA_hours" not in visits.columns:
visits["HA_hours"] = (visits.observationStartLST - visits.fieldRA) * 24.0 / 360.0
visits["HA_hours"] = np.mod(visits["HA_hours"] + 12.0, 24) - 12
visits.set_index("observationId", inplace=True)

return visits


def read_ddf_visits(
opsim_uri,
start_time=None,
end_time=None,
dbcols=DEFAULT_VISITS_COLUMNS,
stackers=DEFAULT_STACKERS,
**kwargs,
):
"""Read DDF visits from an opsim database.
Parameters
----------
opsim_uri : `str`
The uri from which to load visits
start_time : `str`, `astropy.time.Time`
The start time for visits to be loaded
end_time : `str`, `astropy.time.Time`
The end time for visits ot be loaded
dbcols : `list` [`str`]
Columns required from the database.
stackers : `list` [`rubin_sim.maf.stackers`], optional
Stackers to be used to generate additional columns.
Returns
-------
visits : `pandas.DataFrame`
The visits and their parameters.
"""
ddf_field_names = tuple(ddf_locations().keys())
constraint = f"target IN {tuple(field_name for field_name in ddf_field_names)}"
visits = read_opsim(
opsim_uri,
start_time=start_time,
end_time=end_time,
constraint=constraint,
dbcols=dbcols,
stackers=stackers,
**kwargs,
)
return visits
4 changes: 4 additions & 0 deletions schedview/compute/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,14 @@
"make_scheduler_summary_df",
"make_survey_reward_df",
"compute_maps",
"compute_metric_by_visit",
"compute_hpix_metric_in_bands",
"visits",
]

from .astro import convert_evening_date_to_night_of_survey, night_events
from .camera import LsstCameraFootprintPerimeter
from .maf import compute_hpix_metric_in_bands, compute_metric_by_visit
from .scheduler import (
compute_basis_function_reward_at_time,
compute_basis_function_rewards,
Expand Down
112 changes: 112 additions & 0 deletions schedview/compute/maf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import sqlite3
from pathlib import Path
from tempfile import TemporaryDirectory

import pandas as pd
from rubin_sim import maf
from rubin_sim.scheduler.utils import SchemaConverter

__all__ = ["compute_metric_by_visit"]


def _visits_to_opsim(visits, opsim):
# Only write columns in visits that are in opsim databases,
# thereby avoiding added columns that might not be types
# that can be written to sqlite databases.
opsim_columns = list(SchemaConverter().convert_dict.keys())
with sqlite3.connect(opsim) as connection:
visits.reset_index()[opsim_columns].to_sql("observations", connection, index=False)


def compute_metric(visits, metric_bundle):
"""Compute metrics with MAF.
Parameters
----------
visits : `pandas.DataFrame`
The DataFrame of visits (with column names matching those of opsim
database).
metric_bundle : `maf.MetricBundle`, `dict`, or `list` of `maf.MetricBundle`
The metric bundle(s) to run.
Returns
-------
bundle_group : `maf.MetricBundleGroup`
The metric bundle group with the results.
"""
passed_one_bundle = isinstance(metric_bundle, maf.MetricBundle)
metric_bundles = [metric_bundle] if passed_one_bundle else metric_bundle

with TemporaryDirectory() as working_dir:
visits_db = Path(working_dir).joinpath("visits.db").as_posix()
_visits_to_opsim(visits, visits_db)

bundle_group = maf.MetricBundleGroup(metric_bundles, visits_db, out_dir=working_dir)
bundle_group.run_all()

return metric_bundle


def compute_metric_by_visit(visits, metric, constraint=""):
"""Compute a MAF metric by visit.
Parameters
----------
visits : `pandas.DataFrame`
The DataFrame of visits (with column names matching those of opsim
database).
metric : `rubin_sim.maf.metrics.BaseMetric`
The metric to compute.
constraint : `str`
The SQL query to filter visits to be used.
Returns
-------
values : `pandas.Series`
The metric values.
"""
slicer = maf.OneDSlicer("observationId", bin_size=1)
metric_bundle = maf.MetricBundle(slicer=slicer, metric=metric, constraint=constraint)

compute_metric(visits, metric_bundle)
result = pd.Series(metric_bundle.metric_values, index=slicer.slice_points["bins"][:-1].astype(int))
result.index.name = "observationId"
return result


def compute_hpix_metric_in_bands(visits, metric, constraint="", nside=32):
"""Compute a MAF metric by visit.
Parameters
----------
visits : `pandas.DataFrame`
The DataFrame of visits (with column names matching those of opsim
database).
metric : `rubin_sim.maf.metrics.BaseMetric`
The metric to compute.
constraint : `str`
The SQL query to filter visits to be used.
nside : `int`
The healpix nside of the healpix arrays to return.
Returns
-------
metric_values : `dict`
A dictionary of healpix arrays, where the keys are the filters with
visits in the input visit DataFrame.
"""
# Do only the filters we actually used
used_filters = visits["filter"].unique()

bundles = {}
for this_filter in used_filters:
this_constraint = f"filter == '{this_filter}'"
if len(constraint) > 0:
this_constraint += f" AND {constraint}"
slicer = maf.HealpixSlicer(nside=nside)
bundles[this_filter] = maf.MetricBundle(metric, slicer, this_constraint)

compute_metric(visits, bundles)
metric_values = {b: bundles[b].metric_values for b in bundles if bundles[b].metric_values is not None}

return metric_values
Loading

0 comments on commit d82e5fc

Please sign in to comment.