Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/sim.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def future_loading(t, x=None):
elif (t < 1800):
i = 4
elif (t < 3000):
i = 2
i = 2
else:
i = 3
return batt.InputContainer({'i': i})
Expand Down
25 changes: 13 additions & 12 deletions prog_model_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

# REPLACE THIS WITH DERIVED PARAMETER CALLBACKS (IF ANY)
# See examples.derived_params
#
#
# Each function defines one or more derived parameters as a function of the other parameters.
def example_callback(params):
# Return format: dict of key: new value pair for at least one derived parameter
Expand All @@ -35,7 +35,7 @@ class ProgModelTemplate(PrognosticsModel):

# REPLACE THE FOLLOWING LIST WITH EVENTS BEING PREDICTED
events = [
'Example Event'
'Example Event'
]

# REPLACE THE FOLLOWING LIST WITH INPUTS (LOADING)
Expand All @@ -54,18 +54,18 @@ class ProgModelTemplate(PrognosticsModel):

# REPLACE THE FOLLOWING LIST WITH OUTPUTS (MEASURED VALUES)
outputs = [
'Example Output 1',
'Example Output 2'
'Example Output 1',
'Example Output 2'
]

# REPLACE THE FOLLOWING LIST WITH CONFIGURED PARAMETERS
# Note- everything required to configure the model
# should be in parameters- this is to enable the serialization features
# Note- everything required to configure the model
# should be in parameters- this is to enable the serialization features
default_parameters = { # Set default parameters
'Example Parameter 1': 0,
'Example Parameter 2': 3,
'process_noise': 0.1, # Process noise
'x0': { # Initial state
'x0': { # Initial state
'Examples State 1': 1.5,
'Examples State 2': -935,
'Examples State 3': 42.1,
Expand All @@ -85,7 +85,7 @@ class ProgModelTemplate(PrognosticsModel):
# See examples.derived_params
# Format: "trigger": [callbacks]
# Where trigger is the parameter that the derived parameters are derived from.
# And callbacks are one or more callback functions that define parameters that are
# And callbacks are one or more callback functions that define parameters that are
# derived from that parameter
# REPLACE THIS WITH ACTUAL DERIVED PARAMETER CALLBACKS
param_callbacks = {
Expand All @@ -104,12 +104,12 @@ class ProgModelTemplate(PrognosticsModel):
# # ADD OPTIONS CHECKS HERE

# # e.g., Checking for required parameters
# # if not 'required_param' in kwargs:
# # if not 'required_param' in kwargs:
# # throw Exception;

# super().__init__(**kwargs) # Run Parent constructor

# Model state initialization - there are two ways to provide the logic to initialize model state.
# Model state initialization - there are two ways to provide the logic to initialize model state.
# 1. Provide the initial state in parameters['x0'], or
# 2. Provide an Initialization function
#
Expand Down Expand Up @@ -238,7 +238,7 @@ def output(self, x):
# NOTE: KEYS FOR z MATCH 'outputs' LIST ABOVE
z = self.OutputContainer({
'Example Output 1': 0.0,
'Example Output 2': 0.0
'Example Output 2': 0.0
})

return z
Expand Down Expand Up @@ -268,7 +268,8 @@ def event_state(self, x):

return event_x

# Note: Thresholds met equation below is not strictly necessary. By default threshold_met will check if event_state is ≤ 0 for each event
# Note: Thresholds met equation below is not strictly necessary.
# By default threshold_met will check if event_state is ≤ 0 for each event
def threshold_met(self, x):
"""
For each event threshold, calculate if it has been met
Expand Down
12 changes: 6 additions & 6 deletions src/prog_models/composite_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ class CompositeModel(PrognosticsModel):

Keyword Args:
outputs (list[str]):
Model outputs in format "model_name.output_name". Must be subset of all outputs from models. If not provided, all outputs will be included.
Model outputs in format "model_name.output_name". Must be subset of all outputs from models. If not provided, all outputs will be included.
"""

def __init__(self, models, connections = [], **kwargs):
def __init__(self, models, connections=[], **kwargs):
# General Input Validation
if not isinstance(models, Iterable):
raise ValueError('The models argument must be a list')
Expand All @@ -55,7 +55,7 @@ def __init__(self, models, connections = [], **kwargs):
# Handle models
for m in models:
if isinstance(m, Iterable):
if len(m) != 2:
if len(m) != 2:
raise ValueError('Each model tuple must be of the form (name: str, model). For example ("Batt1", BatteryElectroChem())')
if not isinstance(m[0], str):
raise ValueError('The first element of each model tuple must be a string')
Expand Down Expand Up @@ -95,8 +95,8 @@ def __init__(self, models, connections = [], **kwargs):

# Handle Connections
kwargs['connections'] = []
self.__to_input_connections = {m_name : [] for m_name in self.model_names}
self.__to_state_connections = {m_name : [] for m_name in self.model_names}
self.__to_input_connections = {m_name: [] for m_name in self.model_names}
self.__to_state_connections = {m_name: [] for m_name in self.model_names}

for connection in connections:
# Input validation
Expand Down Expand Up @@ -143,7 +143,7 @@ def __init__(self, models, connections = [], **kwargs):
# Finish initialization
super().__init__(**kwargs)

def initialize(self, u = {}, z = {}):
def initialize(self, u={}, z={}):
if u is None:
u = {}
if z is None:
Expand Down
2 changes: 1 addition & 1 deletion src/prog_models/data_models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from .lstm_model import LSTMStateTransitionModel
from .pce import PolynomialChaosExpansion, PCE

SURROAGATE_METHOD_LOOKUP = {
SURROGATE_METHOD_LOOKUP = {
'dmd': DMDModel.from_model,
'lstm': LSTMStateTransitionModel.from_model,
'pce': PolynomialChaosExpansion.from_model,
Expand Down
75 changes: 48 additions & 27 deletions src/prog_models/data_models/data_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@
import numpy as np
import sys

from .. import PrognosticsModel
from prog_models import PrognosticsModel


class DataModel(PrognosticsModel, ABC):
"""
.. versionadded:: 1.4.0

Abstract Base Class for all Data Models (e.g., :py:class:`LSTMStateTransitionModel`). Defines the interface and all common tools. To create a new Data-Driven model, first subclass this, then define the abstract methods from this class and :py:class:`prog_models.PrognosticsModel`.
Abstract Base Class for all Data Models (e.g., :py:class:`LSTMStateTransitionModel`). Defines the interface and all common tools. To create a new Data-Driven model, first subclass this, then define the abstract methods from this class and :py:class:`prog_models.PrognosticsModel`.

See Also:
PrognosticsModel
Expand All @@ -24,25 +24,25 @@ def from_data(cls, **kwargs) -> "DataModel":
Create a Data Model from data. This class is overwritten by specific data-driven classes (e.g., :py:class:`LSTMStateTransitionModel`)

Keyword Arguments:
times (list[list]):
times (list[list]):
list of input data for use in data. Each element is the times for a single run of size (n_times)
inputs (list[np.array]):
inputs (list[np.array]):
list of :term:`input` data for use in data. Each element is the inputs for a single run of size (n_times, n_inputs)
states (list[np.array]):
states (list[np.array]):
list of :term:`state` data for use in data. Each element is the states for a single run of size (n_times, n_states)
outputs (list[np.array]):
outputs (list[np.array]):
list of :term:`output` data for use in data. Each element is the outputs for a single run of size (n_times, n_outputs)
event_states (list[np.array]):
event_states (list[np.array]):
list of :term:`event state` data for use in data. Each element is the event states for a single run of size (n_times, n_event_states)
time_of_event (np.array):
Array of time of event data for use in data. Each element is the time of event for a single run of size (n_samples, n_events)
input_keys (list[str]):
input_keys (list[str]):
List of :term:`input` keys
state_keys (list[str]):
state_keys (list[str]):
List of :term:`state` keys
output_keys (list[str]):
output_keys (list[str]):
List of :term:`output` keys
event_keys (list[str]):
event_keys (list[str]):
List of :term:`event` keys

See specific data class for more additional keyword arguments
Expand All @@ -64,17 +64,17 @@ def __setstate__(self, state):

def __getstate__(self):
# This is necessary to support pickling
# Override this, replacing the [] with any arguments from the constructor
# Override this, replacing the [] with any arguments from constructor
return ([], self.parameters.data)

def summary(self, file = sys.stdout):
def summary(self, file=sys.stdout):
"""
Print a summary of the model
"""
print(self.__class__.__name__, file=file)

@staticmethod
def check_data_format(inputs, outputs, states = None, event_states = None, t_mets = None):
def check_data_format(inputs, outputs, states=None, event_states=None, t_mets=None):
if len(inputs) == 0:
raise ValueError("No data provided. inputs must be in format [run1_inputs, ...] and have at least one element")
if len(inputs) != len(outputs):
Expand All @@ -92,9 +92,9 @@ def from_model(cls, m: PrognosticsModel, load_functions: list, **kwargs) -> "Dat
Create a Data Model from an existing PrognosticsModel (i.e., a :term:`surrogate` model). Generates data through simulation with supplied load functions. Then calls :py:func:`from_data` to generate the model.

Args:
m (PrognosticsModel):
m (PrognosticsModel):
Model to generate data from
load_functions (list[function]):
load_functions (list[function]):
Each index is a callable loading function of (t, x = None) -> z used to predict :term:`future load` at a given time (t) and :term:`state` (x)

Keyword Args:
Expand All @@ -106,8 +106,8 @@ def from_model(cls, m: PrognosticsModel, load_functions: list, **kwargs) -> "Dat
Returns:
DataModel: Trained PrognosticsModel
"""
# Configure
config = { # Defaults
# Configure
config = { # Defaults
'add_dt': True,
'input_keys': m.inputs.copy(),
'output_keys': m.outputs.copy(),
Expand Down Expand Up @@ -139,27 +139,48 @@ def from_model(cls, m: PrognosticsModel, load_functions: list, **kwargs) -> "Dat

# Create sim config for each element
sim_cfg = [{
cfg : config[cfg][i]
for cfg in sim_cfg_params if cfg in config
cfg: config[cfg][i]
for cfg in sim_cfg_params if cfg in config
} for i in range(len(load_functions))]

# Simulate
data = [m.simulate_to_threshold(load, **sim_cfg[i]) for (i, load) in enumerate(load_functions)]
# Simulate
data = [
m.simulate_to_threshold(load, **sim_cfg[i])
for (i, load) in enumerate(load_functions)]

# Prepare data
times = [d.times for d in data]
if config['add_dt']:
config['input_keys'].append('dt')
if len(data[0].inputs) > 0 and len(data[0].inputs[0]) == 0:
# No inputs
inputs = [np.array([[config['dt'][i]] for _ in data[i].inputs], dtype=float) for i in range(len(data))]
inputs = [
np.array(
[[config['dt'][i]] for _ in data[i].inputs],
dtype=float)
for i in range(len(data))]
else:
inputs = [np.array([np.hstack((u_i.matrix[:][0].T, [config['dt'][i]])) for u_i in d.inputs], dtype=float) for i, d in enumerate(data)]
inputs = [
np.array(
[
np.hstack((u_i.matrix[:][0].T, [config['dt'][i]]))
for u_i in d.inputs],
dtype=float)
for i, d in enumerate(data)]
else:
inputs = [d.inputs for d in data]
outputs = [d.outputs for d in data]
states = [d.states for d in data]
event_states = [d.event_states for d in data]
t_met = [[list(m.threshold_met(x).values()) for x in state] for state in states]

return cls.from_data(times = times, inputs = inputs, states = states, outputs = outputs, event_states = event_states, t_met= t_met, **config)
t_met = [
[list(m.threshold_met(x).values()) for x in state]
for state in states]

return cls.from_data(
times=times,
inputs=inputs,
states=states,
outputs=outputs,
event_states=event_states,
t_met=t_met,
**config)
Loading