Skip to content

Commit

Permalink
Merge pull request #26 from prakaa/master
Browse files Browse the repository at this point in the history
Logging + repo reorg
  • Loading branch information
nick-gorman authored Oct 18, 2022
2 parents d5f87ca + d393dcb commit 87283b2
Show file tree
Hide file tree
Showing 36 changed files with 6,956 additions and 4,415 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,4 @@ nemosis/raw_aemo_data/*
NEMOSIS.exe
nemosis/smoke_tests.py
nemosis/check_new_bid_table_functionality.py
*.pyc
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
- [Additional columns](#accessing-additional-table-columns)
- [Data from static tables](#data-from-static-tables)
- [static_table](#static_table)
- [Disable logging](#disable-logging)

-----

Expand Down Expand Up @@ -225,3 +226,17 @@ from nemosis import static_table

fcas_variables = static_table('VARIABLES_FCAS_4_SECOND', raw_data_cache)
```
### Disable logging

NEMOSIS uses the python logging module to print messages to the console. If desired, this can be disabled after
imports, as shown below. This will disable log messages unless they are at least warnings.

```python

import logging

from nemosis import dynamic_data_compiler

logging.getLogger("nemosis").setLevel(logging.WARNING)

```
12 changes: 12 additions & 0 deletions e.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import logging

from nemosis import dynamic_data_compiler

logging.getLogger("nemosis").setLevel(logging.WARNING)

start_time = '2017/01/01 00:00:00'
end_time = '2017/01/01 00:05:00'
table = 'DISPATCHPRICE'
raw_data_cache = 'D:/nemosis_cache'

price_data = dynamic_data_compiler(start_time, end_time, table, raw_data_cache)
147 changes: 90 additions & 57 deletions examples/.ipynb_checkpoints/agc_following-checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,73 +5,106 @@
import plotly.express as px

# Specify where we will be caching the raw AEMO data.
raw_data_cache = 'C:/Users/nick/Desktop/cache'
raw_data_cache = "C:/Users/nick/Desktop/cache"

# Time window to pull data from.
start_time = '2021/04/27 00:00:00'
end_time = '2021/04/28 00:00:00'
start_time = "2021/04/27 00:00:00"
end_time = "2021/04/28 00:00:00"

# Download the latest FCAS causer pays elements file. The update_static_file=True argument forces nemosis to
# download the a new copy of file from AEMO even if a copy already exists in the cache.
fcas_causer_pays_elements = static_table(table_name='ELEMENTS_FCAS_4_SECOND', raw_data_location=raw_data_cache,
update_static_file=True)
fcas_causer_pays_elements = static_table(
table_name="ELEMENTS_FCAS_4_SECOND",
raw_data_location=raw_data_cache,
update_static_file=True,
)

# Using filtering and manual inspection find which fcas element numbers belong to Hornsdale Power Reserve.
elements_for_honsdale_power_reserve = \
fcas_causer_pays_elements[fcas_causer_pays_elements['EMSNAME'].str.contains('HPR')]
elements_for_honsdale_power_reserve = fcas_causer_pays_elements[
fcas_causer_pays_elements["EMSNAME"].str.contains("HPR")
]

# Check which variable numbers we will need.
fcas_causer_pays_elements = static_table(table_name='ELEMENTS_FCAS_4_SECOND', raw_data_location=raw_data_cache,
update_static_file=True)

scada_4s_resolution = dynamic_data_compiler(start_time, end_time, table_name='FCAS_4_SECOND',
raw_data_location=raw_data_cache,
filter_cols=['ELEMENTNUMBER', 'VARIABLENUMBER'],
filter_values=([330, 331], [2, 5]), fformat='parquet')

scada_5min_resolution = dynamic_data_compiler(start_time, end_time, 'DISPATCHLOAD', raw_data_cache,
select_columns=['SETTLEMENTDATE', 'DUID', 'INITIALMW',
'TOTALCLEARED'],
filter_cols=['DUID'], filter_values=(['HPRG1', 'HPRL1'],))

elements = {
330: 'HPRG1',
331: 'HPRL1'
}

variables = {
2: 'scada_value',
5: 'regulation_target'
}

scada_4s_resolution['DUID'] = scada_4s_resolution['ELEMENTNUMBER'].apply(lambda x: elements[x])
scada_4s_resolution['variable'] = scada_4s_resolution['VARIABLENUMBER'].apply(lambda x: variables[x])

scada_4s_resolution = scada_4s_resolution.pivot(index=['TIMESTAMP', 'DUID'], columns='variable', values='VALUE')
fcas_causer_pays_elements = static_table(
table_name="ELEMENTS_FCAS_4_SECOND",
raw_data_location=raw_data_cache,
update_static_file=True,
)

scada_4s_resolution = dynamic_data_compiler(
start_time,
end_time,
table_name="FCAS_4_SECOND",
raw_data_location=raw_data_cache,
filter_cols=["ELEMENTNUMBER", "VARIABLENUMBER"],
filter_values=([330, 331], [2, 5]),
fformat="parquet",
)

scada_5min_resolution = dynamic_data_compiler(
start_time,
end_time,
"DISPATCHLOAD",
raw_data_cache,
select_columns=["SETTLEMENTDATE", "DUID", "INITIALMW", "TOTALCLEARED"],
filter_cols=["DUID"],
filter_values=(["HPRG1", "HPRL1"],),
)

elements = {330: "HPRG1", 331: "HPRL1"}

variables = {2: "scada_value", 5: "regulation_target"}

scada_4s_resolution["DUID"] = scada_4s_resolution["ELEMENTNUMBER"].apply(
lambda x: elements[x]
)
scada_4s_resolution["variable"] = scada_4s_resolution["VARIABLENUMBER"].apply(
lambda x: variables[x]
)

scada_4s_resolution = scada_4s_resolution.pivot(
index=["TIMESTAMP", "DUID"], columns="variable", values="VALUE"
)

scada_4s_resolution.reset_index(inplace=True)

scada = pd.merge_asof(scada_4s_resolution, scada_5min_resolution, left_on='TIMESTAMP',
right_on='SETTLEMENTDATE', by='DUID', direction='forward')

scada['fraction_ramp_complete'] = 1 - ((scada['SETTLEMENTDATE'] - scada['TIMESTAMP']) / timedelta(minutes=5))

scada['linear_ramp_target'] = scada['INITIALMW'] + \
(scada['TOTALCLEARED'] - scada['INITIALMW']) * scada['fraction_ramp_complete']

scada['linear_ramp_target'] = np.where(scada['DUID'] == 'HPRL1', -1 * scada['linear_ramp_target'],
scada['linear_ramp_target'])
scada['scada_value'] = np.where(scada['DUID'] == 'HPRL1', -1 * scada['scada_value'],
scada['scada_value'])
scada['regulation_target'] = np.where(scada['DUID'] == 'HPRL1', -1 * scada['regulation_target'],
scada['regulation_target'])

scada = scada.groupby('TIMESTAMP', as_index=False).agg(
{'linear_ramp_target': 'sum', 'scada_value': 'sum', 'regulation_target': 'sum'})

scada['target'] = scada['linear_ramp_target'] + scada['regulation_target']

fig = px.line(scada, x='TIMESTAMP', y=['target', 'scada_value'])
scada = pd.merge_asof(
scada_4s_resolution,
scada_5min_resolution,
left_on="TIMESTAMP",
right_on="SETTLEMENTDATE",
by="DUID",
direction="forward",
)

scada["fraction_ramp_complete"] = 1 - (
(scada["SETTLEMENTDATE"] - scada["TIMESTAMP"]) / timedelta(minutes=5)
)

scada["linear_ramp_target"] = (
scada["INITIALMW"]
+ (scada["TOTALCLEARED"] - scada["INITIALMW"]) * scada["fraction_ramp_complete"]
)

scada["linear_ramp_target"] = np.where(
scada["DUID"] == "HPRL1",
-1 * scada["linear_ramp_target"],
scada["linear_ramp_target"],
)
scada["scada_value"] = np.where(
scada["DUID"] == "HPRL1", -1 * scada["scada_value"], scada["scada_value"]
)
scada["regulation_target"] = np.where(
scada["DUID"] == "HPRL1",
-1 * scada["regulation_target"],
scada["regulation_target"],
)

scada = scada.groupby("TIMESTAMP", as_index=False).agg(
{"linear_ramp_target": "sum", "scada_value": "sum", "regulation_target": "sum"}
)

scada["target"] = scada["linear_ramp_target"] + scada["regulation_target"]

fig = px.line(scada, x="TIMESTAMP", y=["target", "scada_value"])
fig.show()


10 changes: 9 additions & 1 deletion nemosis/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
import logging
import sys
from . import data_fetch_methods
from .data_fetch_methods import *
name = "osdan"

name = "osdan"

logging.getLogger(__name__).addHandler(logging.NullHandler())
logging.basicConfig(
stream=sys.stdout, level=logging.INFO, format="%(levelname)s: %(message)s"
)
2 changes: 1 addition & 1 deletion nemosis/custom_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ class DataMismatchError(Exception):


class DataFormatError(Exception):
pass
pass
Loading

0 comments on commit 87283b2

Please sign in to comment.