Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[bumpversion]
current_version = 0.3.7
current_version = 0.3.8
files = setup.py aurora/__init__.py
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ include LICENSE
include README.rst

recursive-include tests *
recursive-include data *
recursive-include tests/parkfield/config/*.json
recursive-exclude * __pycache__
recursive-exclude * *.py[co]
Expand Down
23 changes: 22 additions & 1 deletion aurora/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,22 @@
__version__ = "0.3.7"
__version__ = "0.3.8"

import sys
from loguru import logger


# =============================================================================
# Initiate loggers
# =============================================================================
config = {
"handlers": [
{
"sink": sys.stdout,
"level": "INFO",
"colorize": True,
"format": "<level>{time} | {level: <3} | {name} | {function} | {message}</level>",
},
],
"extra": {"user": "someone"},
}
logger.configure(**config)
# logger.disable("mt_metadata")
21 changes: 16 additions & 5 deletions aurora/general_helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,36 @@
import scipy.io as sio
import subprocess

from loguru import logger
from pathlib import Path

import aurora
import mt_metadata

init_file = inspect.getfile(aurora)
AURORA_PATH = Path(init_file).parent.parent
DATA_PATH = AURORA_PATH.joinpath("data")
TEST_PATH = AURORA_PATH.joinpath("tests")
SANDBOX = AURORA_PATH.joinpath("aurora", "sandbox")
CONFIG_PATH = AURORA_PATH.joinpath("aurora", "config")
BAND_SETUP_PATH = CONFIG_PATH.joinpath("emtf_band_setup")


def get_test_path():
test_path = AURORA_PATH.joinpath("tests")
if not test_path.exists():
msg = (
f"Could not locate test directory {TEST_PATH}\n "
f"This is most likely because aurora was installed from pypi or conda forge\n"
f"TEST_PATH should be replaced with DATA_PATH"
)
logger.warning(msg)
return test_path


try:
DATA_PATH = SANDBOX.joinpath("data")
DATA_PATH.mkdir(exist_ok=True, parents=True)
FIGURES_PATH = DATA_PATH.joinpath("figures")
FIGURES_PATH.mkdir(exist_ok=True, parents=True)
# TEST_BAND_FILE = DATA_PATH.joinpath("bandtest.nc")
except OSError:
DATA_PATH = None
FIGURES_PATH = None
mt_metadata_init = inspect.getfile(mt_metadata)
MT_METADATA_DATA = Path(mt_metadata_init).parent.parent.joinpath("data")
Expand Down
1 change: 1 addition & 0 deletions aurora/pipelines/transfer_function_kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,7 @@ def show_processing_summary(
"output_channels",
"num_samples_overlap",
"num_samples_advance",
"run_dataarray",
],
):
columns_to_show = self.processing_summary.columns
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,13 @@
the matlab tests.

"""
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy.io as sio

from aurora.config.metadata.processing import Processing
from aurora.config.emtf_band_setup import BANDS_256_29_FILE
from aurora.general_helper_functions import TEST_PATH
from aurora.general_helper_functions import get_test_path
from aurora.pipelines.transfer_function_kernel import TransferFunctionKernel
from aurora.sandbox.io_helpers.emtf_band_setup import EMTFBandSetupFile
from aurora.transfer_function.emtf_z_file_helpers import clip_bands_from_z_file
Expand All @@ -21,6 +20,8 @@
from mt_metadata.timeseries.survey import Survey
from mt_metadata.transfer_functions.core import TF

TEST_PATH = get_test_path()


def read_matlab_z_file(case_id, z_mat):
tmp = sio.loadmat(z_mat)
Expand Down
18 changes: 0 additions & 18 deletions aurora/sandbox/io_helpers/zfile_murphy.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,21 +406,3 @@ def read_z_file(z_file_path, angle=0.0):
z_obj.load()
z_obj.apparent_resistivity(angle=angle)
return z_obj


def test_reader(z_file_path=None):
from aurora.general_helper_functions import TEST_PATH

if z_file_path is None:
print("DEFAULT ZFILE FROM SYNTHETIC TEST BEING LOADED")
z_file_path = TEST_PATH.joinpath("synthetic", "emtf_output", "test1.zss")
z_obj = read_z_file(z_file_path)
return z_obj


def main():
test_reader()


if __name__ == "__main__":
main()
7 changes: 4 additions & 3 deletions aurora/test_utils/parkfield/make_parkfield_mth5.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from mth5.utils.helpers import read_back_data
from mth5.helpers import close_open_files
from aurora.sandbox.io_helpers.make_mth5_helpers import create_from_server_multistation
from aurora.test_utils.parkfield.path_helpers import DATA_PATH
from aurora.test_utils.parkfield.path_helpers import PARKFIELD_PATHS

DATA_SOURCES = ["NCEDC", "https://service.ncedc.org/"]
DATASET_ID = "pkd_sao_test_00"
Expand Down Expand Up @@ -39,7 +39,7 @@ def make_pkdsao_mth5(fdsn_dataset):
fdsn_dataset.initialize_client()
h5_path = create_from_server_multistation(
fdsn_dataset,
target_folder=DATA_PATH,
target_folder=PARKFIELD_PATHS["data"],
triage_units="V/m to mV/km",
)

Expand All @@ -56,7 +56,8 @@ def ensure_h5_exists():
-------

"""
h5_path = DATA_PATH.joinpath(FDSN_DATASET.h5_filebase)

h5_path = PARKFIELD_PATHS["data"].joinpath(FDSN_DATASET.h5_filebase)
if h5_path.exists():
return h5_path

Expand Down
21 changes: 11 additions & 10 deletions aurora/test_utils/parkfield/path_helpers.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
from aurora.general_helper_functions import TEST_PATH
from aurora.general_helper_functions import DATA_PATH

PARKFIELD_PATH = TEST_PATH.joinpath("parkfield")

AURORA_RESULTS_PATH = PARKFIELD_PATH.joinpath("aurora_results")
CONFIG_PATH = PARKFIELD_PATH.joinpath("config")
DATA_PATH = PARKFIELD_PATH.joinpath("data")
EMTF_RESULTS_PATH = PARKFIELD_PATH.joinpath("emtf_results")
def make_parkfield_paths():
base_path = DATA_PATH.joinpath("parkfield")
parkfield_paths = {}
parkfield_paths["data"] = base_path
parkfield_paths["aurora_results"] = base_path.joinpath("aurora_results")
parkfield_paths["config"] = base_path.joinpath("config")
parkfield_paths["emtf_results"] = base_path.joinpath("emtf_results")
return parkfield_paths

# May want to create results and data dir on init
AURORA_RESULTS_PATH.mkdir(exist_ok=True)
DATA_PATH.mkdir(exist_ok=True)
CONFIG_PATH.mkdir(exist_ok=True)

PARKFIELD_PATHS = make_parkfield_paths()
33 changes: 23 additions & 10 deletions aurora/test_utils/synthetic/make_mth5_from_asc.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@
import pathlib
import scipy.signal as ssig

from aurora.test_utils.synthetic.paths import DATA_PATH
from loguru import logger

from aurora.test_utils.synthetic.paths import SyntheticTestPaths
from aurora.test_utils.synthetic.station_config import make_filters
from aurora.test_utils.synthetic.station_config import make_station_01
from aurora.test_utils.synthetic.station_config import make_station_02
Expand All @@ -39,6 +41,9 @@

np.random.seed(0)

synthetic_test_paths = SyntheticTestPaths()
MTH5_PATH = synthetic_test_paths.mth5_path


def create_run_ts_from_synthetic_run(run, df, channel_nomenclature="default"):
"""
Expand Down Expand Up @@ -141,7 +146,7 @@ def get_set_survey_id(m):
def create_mth5_synthetic_file(
station_cfgs,
mth5_name,
target_folder=DATA_PATH,
target_folder="",
plot=False,
add_nan_values=False,
file_version="0.1.0",
Expand All @@ -156,7 +161,7 @@ def create_mth5_synthetic_file(
station_cfgs: list of dicts
The dicts are one-off data structure used to hold information mth5 needs to
initialize, specifically sample_rate, filters, etc.
mth5_path: string or pathlib.Path()
mth5_name: string or pathlib.Path()
Where the mth5 will be stored. This is generated by the station_config,
but may change in this method based on add_nan_values or channel_nomenclature
plot: bool
Expand All @@ -182,6 +187,13 @@ def create_mth5_synthetic_file(
mth5_path: pathlib.Path
The path to the stored h5 file.
"""
if not target_folder:
msg = f"No target folder provided for making {mth5_name}"
logger.warning("No target folder provided for making {}")
msg = f"Setting target folder to {MTH5_PATH}"
logger.info(msg)
target_folder = MTH5_PATH

mth5_path = target_folder.joinpath(mth5_name)
# set name for output h5 file
if add_nan_values:
Expand Down Expand Up @@ -251,7 +263,7 @@ def create_mth5_synthetic_file(
def create_test1_h5(
file_version="0.1.0",
channel_nomenclature="default",
target_folder=DATA_PATH,
target_folder=MTH5_PATH,
force_make_mth5=True,
):
station_01_params = make_station_01(channel_nomenclature=channel_nomenclature)
Expand All @@ -275,7 +287,7 @@ def create_test2_h5(
file_version="0.1.0",
channel_nomenclature="default",
force_make_mth5=True,
target_folder=DATA_PATH,
target_folder=MTH5_PATH,
):
station_02_params = make_station_02(channel_nomenclature=channel_nomenclature)
mth5_name = station_02_params.mth5_name
Expand All @@ -296,10 +308,10 @@ def create_test2_h5(
def create_test1_h5_with_nan(
file_version="0.1.0",
channel_nomenclature="default",
target_folder=DATA_PATH,
target_folder=MTH5_PATH,
):
station_01_params = make_station_01(channel_nomenclature=channel_nomenclature)
mth5_name = station_01_params.mth5_name # DATA_PATH.joinpath("test1.h5")
mth5_name = station_01_params.mth5_name
station_params = [
station_01_params,
]
Expand All @@ -317,7 +329,7 @@ def create_test1_h5_with_nan(
def create_test12rr_h5(
file_version="0.1.0",
channel_nomenclature="default",
target_folder=DATA_PATH,
target_folder=MTH5_PATH,
):
station_01_params = make_station_01(channel_nomenclature=channel_nomenclature)
station_02_params = make_station_02(channel_nomenclature=channel_nomenclature)
Expand All @@ -339,7 +351,7 @@ def create_test3_h5(
file_version="0.1.0",
channel_nomenclature="default",
force_make_mth5=True,
target_folder=DATA_PATH,
target_folder=MTH5_PATH,
):

station_03_params = make_station_03(channel_nomenclature=channel_nomenclature)
Expand All @@ -359,7 +371,7 @@ def create_test3_h5(
def create_test4_h5(
file_version="0.1.0",
channel_nomenclature="default",
target_folder=DATA_PATH,
target_folder=MTH5_PATH,
):
"""8Hz data kluged from the 1Hz ... only freqs below 0.5Hz will make sense (100 Ohmm and 45deg)"""
station_04_params = make_station_04(channel_nomenclature=channel_nomenclature)
Expand All @@ -371,6 +383,7 @@ def create_test4_h5(
plot=False,
file_version=file_version,
channel_nomenclature=channel_nomenclature,
target_folder=target_folder,
upsample_factor=8,
)
return mth5_path
Expand Down
9 changes: 6 additions & 3 deletions aurora/test_utils/synthetic/make_processing_configs.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
from aurora.config import BANDS_DEFAULT_FILE
from aurora.config import BANDS_256_26_FILE
from aurora.config.config_creator import ConfigCreator
from aurora.test_utils.synthetic.paths import CONFIG_PATH
from aurora.test_utils.synthetic.paths import DATA_PATH
from aurora.test_utils.synthetic.paths import SyntheticTestPaths

synthetic_test_paths = SyntheticTestPaths()
CONFIG_PATH = synthetic_test_paths.config_path
MTH5_PATH = synthetic_test_paths.mth5_path


def create_test_run_config(
Expand Down Expand Up @@ -132,7 +135,7 @@ def test_to_from_json():
from aurora.transfer_function.kernel_dataset import KernelDataset

# Specify path to mth5
data_path = DATA_PATH.joinpath("test1.h5")
data_path = MTH5_PATH.joinpath("test1.h5")
if not data_path.exists():
print("You need to run make_mth5_from_asc.py")
raise Exception
Expand Down
64 changes: 56 additions & 8 deletions aurora/test_utils/synthetic/paths.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,58 @@
from aurora.general_helper_functions import TEST_PATH
"""
Sets up paths for synthetic data testing.

SYNTHETIC_PATH = TEST_PATH.joinpath("synthetic")
CONFIG_PATH = SYNTHETIC_PATH.joinpath("config")
DATA_PATH = SYNTHETIC_PATH.joinpath("data")
EMTF_OUTPUT_PATH = SYNTHETIC_PATH.joinpath("emtf_output")
AURORA_RESULTS_PATH = SYNTHETIC_PATH.joinpath("aurora_results")
"""
import pathlib

AURORA_RESULTS_PATH.mkdir(exist_ok=True)
CONFIG_PATH.mkdir(exist_ok=True)
from aurora.general_helper_functions import DATA_PATH
from loguru import logger


class SyntheticTestPaths:
def __init__(self, sandbox_path=None):
"""
ivars:
- ascii_data_path, where the ascii data are stored
- mth5_path: this is where the mth5 files get written to.
- config folder: this is where the config files get saved while tests are running
- aurora_results folder: this is where the processing results get saved during test execution



Parameters
----------
sandbox_path: None or pathlib.Path
"""

self._root_path = DATA_PATH.joinpath("synthetic")
if sandbox_path is None:
logger.debug(f"synthetic sandbox path is being set to {self._root_path}")
self._sandbox_path = self._root_path

# READ ONLY OK
self.ascii_data_path = self._root_path.joinpath("ascii")

# NEED WRITE ACCESS
# Consider using an environment variable for sandbox_path
self.mth5_path = self._sandbox_path.joinpath("mth5")
self.aurora_results_path = self._sandbox_path.joinpath("aurora_results")
self.emtf_results_path = self._sandbox_path.joinpath("emtf_results")
self.config_path = self._sandbox_path.joinpath("config")
self.writability_check()
assert self.ascii_data_path.exists()

def writability_check(self):
"""
Placeholder
Should check that sandbox and dirs below have write access
If dirs are not writeable, consider
HOME = pathlib.Path().home()
workaround_sandbox = HOME.joinpath(".cache", "aurora", "sandbox")
"""
pass

def mkdirs(self):
self.aurora_results_path.mkdir(parents=True, exist_ok=True)
self.config_path.mkdir(parents=True, exist_ok=True)
self.emtf_results_path.mkdir(parents=True, exist_ok=True)
self.mth5_path.mkdir(parents=True, exist_ok=True)
Loading