Skip to content

Commit

Permalink
Move storage_level_intra_rp out of dataframes
Browse files Browse the repository at this point in the history
  • Loading branch information
abelsiqueira committed Nov 12, 2024
1 parent b4cfce3 commit c111a79
Show file tree
Hide file tree
Showing 8 changed files with 164 additions and 59 deletions.
38 changes: 20 additions & 18 deletions src/constraints/storage.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,37 +8,39 @@ Adds the storage asset constraints to the model.

function add_storage_constraints!(
model,
variables,
graph,
dataframes,
accumulated_energy_capacity,
incoming_flow_lowest_storage_resolution_intra_rp,
outgoing_flow_lowest_storage_resolution_intra_rp,
df_storage_intra_rp_balance_grouped,
df_storage_inter_rp_balance_grouped,
storage_level_intra_rp,
storage_level_inter_rp,
incoming_flow_storage_inter_rp_balance,
outgoing_flow_storage_inter_rp_balance,
)

## INTRA-TEMPORAL CONSTRAINTS (within a representative period)
storage_level_intra_rp = variables[:storage_level_intra_rp]
df_storage_intra_rp_balance_grouped =
DataFrames.groupby(storage_level_intra_rp.indices, [:asset, :year, :rep_period])

# - Balance constraint (using the lowest temporal resolution)
for ((a, rp, y), sub_df) in pairs(df_storage_intra_rp_balance_grouped)
for ((a, y, rp), sub_df) in pairs(df_storage_intra_rp_balance_grouped)
# This assumes an ordering of the time blocks, that is guaranteed inside
# construct_dataframes
# The storage_inflows have been moved here
model[Symbol("storage_intra_rp_balance_$(a)_$(y)_$(rp)")] = [
@constraint(
model,
storage_level_intra_rp[row.index] ==
storage_level_intra_rp.container[row.index] ==
(
if k > 1
storage_level_intra_rp[row.index-1] # This assumes contiguous index
storage_level_intra_rp.container[row.index-1] # This assumes contiguous index
else
(
if ismissing(graph[a].initial_storage_level[row.year])
storage_level_intra_rp[last(sub_df.index)]
storage_level_intra_rp.container[last(sub_df.index)]
else
graph[a].initial_storage_level[row.year]
end
Expand All @@ -51,12 +53,12 @@ function add_storage_constraints!(
row.year,
row.year,
("inflows", rp),
row.timesteps_block,
row.time_block_start:row.time_block_end,
0.0,
) * graph[a].storage_inflows[row.year] +
incoming_flow_lowest_storage_resolution_intra_rp[row.index] -
outgoing_flow_lowest_storage_resolution_intra_rp[row.index],
base_name = "storage_intra_rp_balance[$a,$y,$rp,$(row.timesteps_block)]"
base_name = "storage_intra_rp_balance[$a,$y,$rp,$(row.time_block_start:row.time_block_end)]"
) for (k, row) in enumerate(eachrow(sub_df))
]
end
Expand All @@ -65,44 +67,44 @@ function add_storage_constraints!(
model[:max_storage_level_intra_rp_limit] = [
@constraint(
model,
storage_level_intra_rp[row.index]
storage_level_intra_rp.container[row.index]
profile_aggregation(
Statistics.mean,
graph[row.asset].rep_periods_profiles,
row.year,
row.year,
("max-storage-level", row.rep_period),
row.timesteps_block,
row.time_block_start:row.time_block_end,
1.0,
) * accumulated_energy_capacity[row.year, row.asset],
base_name = "max_storage_level_intra_rp_limit[$(row.asset),$(row.year),$(row.rep_period),$(row.timesteps_block)]"
) for row in eachrow(dataframes[:storage_level_intra_rp])
base_name = "max_storage_level_intra_rp_limit[$(row.asset),$(row.year),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]"
) for row in eachrow(storage_level_intra_rp.indices)
]

# - Minimum storage level
model[:min_storage_level_intra_rp_limit] = [
@constraint(
model,
storage_level_intra_rp[row.index]
storage_level_intra_rp.container[row.index]
profile_aggregation(
Statistics.mean,
graph[row.asset].rep_periods_profiles,
row.year,
row.year,
("min_storage_level", row.rep_period),
row.timesteps_block,
row.time_block_start:row.time_block_end,
0.0,
) * accumulated_energy_capacity[row.year, row.asset],
base_name = "min_storage_level_intra_rp_limit[$(row.asset),$(row.year),$(row.rep_period),$(row.timesteps_block)]"
) for row in eachrow(dataframes[:storage_level_intra_rp])
base_name = "min_storage_level_intra_rp_limit[$(row.asset),$(row.year),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]"
) for row in eachrow(storage_level_intra_rp.indices)
]

# - Cycling condition
for ((a, _, y), sub_df) in pairs(df_storage_intra_rp_balance_grouped)
for ((a, y, _), sub_df) in pairs(df_storage_intra_rp_balance_grouped)
# Ordering is assumed
if !ismissing(graph[a].initial_storage_level[y])
JuMP.set_lower_bound(
storage_level_intra_rp[last(sub_df.index)],
storage_level_intra_rp.container[last(sub_df.index)],
graph[a].initial_storage_level[y],
)
end
Expand Down
7 changes: 1 addition & 6 deletions src/create-model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,6 @@ function create_model(
# Unpacking dataframes
@timeit to "unpacking dataframes" begin
df_units_on_and_outflows = dataframes[:units_on_and_outflows]
df_storage_intra_rp_balance_grouped =
DataFrames.groupby(dataframes[:storage_level_intra_rp], [:asset, :rep_period, :year])
df_storage_inter_rp_balance_grouped =
DataFrames.groupby(dataframes[:storage_level_inter_rp], [:asset, :year])
end
Expand All @@ -83,8 +81,6 @@ function create_model(
@timeit to "add_storage_variables!" add_storage_variables!(model, graph, sets, variables)

# TODO: This should disapear after the changes on add_expressions_to_dataframe! and storing the solution
storage_level_intra_rp =
model[:storage_level_intra_rp] = variables[:storage_level_intra_rp].container
storage_level_inter_rp =
model[:storage_level_inter_rp] = variables[:storage_level_inter_rp].container

Expand Down Expand Up @@ -167,14 +163,13 @@ function create_model(

@timeit to "add_storage_constraints!" add_storage_constraints!(
model,
variables,
graph,
dataframes,
accumulated_energy_capacity,
incoming_flow_lowest_storage_resolution_intra_rp,
outgoing_flow_lowest_storage_resolution_intra_rp,
df_storage_intra_rp_balance_grouped,
df_storage_inter_rp_balance_grouped,
storage_level_intra_rp,
storage_level_inter_rp,
incoming_flow_storage_inter_rp_balance,
outgoing_flow_storage_inter_rp_balance,
Expand Down
31 changes: 16 additions & 15 deletions src/io.jl
Original file line number Diff line number Diff line change
Expand Up @@ -527,21 +527,22 @@ function save_solution_to_file(output_folder, graph, dataframes, solution)
# )
# output_table |> CSV.write(output_file)

output_file = joinpath(output_folder, "storage-level-intra-rp.csv")
output_table = DataFrames.select(
dataframes[:storage_level_intra_rp],
:asset,
:rep_period,
:timesteps_block => :timestep,
)
output_table.value = solution.storage_level_intra_rp
if !isempty(output_table.asset)
output_table = DataFrames.combine(DataFrames.groupby(output_table, :asset)) do subgroup
_check_initial_storage_level!(subgroup, graph)
_interpolate_storage_level!(subgroup, :timestep)
end
end
output_table |> CSV.write(output_file)
# TODO: Fix output of storage_level_intra_rp
# output_file = joinpath(output_folder, "storage-level-intra-rp.csv")
# output_table = DataFrames.select(
# dataframes[:storage_level_intra_rp],
# :asset,
# :rep_period,
# :timesteps_block => :timestep,
# )
# output_table.value = solution.storage_level_intra_rp
# if !isempty(output_table.asset)
# output_table = DataFrames.combine(DataFrames.groupby(output_table, :asset)) do subgroup
# _check_initial_storage_level!(subgroup, graph)
# _interpolate_storage_level!(subgroup, :timestep)
# end
# end
# output_table |> CSV.write(output_file)

output_file = joinpath(output_folder, "storage-level-inter-rp.csv")
output_table =
Expand Down
19 changes: 15 additions & 4 deletions src/model-preparation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,14 @@ function add_expression_terms_intra_rp_constraints!(
end
# Sum the corresponding flows from the workspace
for row in eachrow(sub_df)
row[case.col_name] = agg(@view workspace[row.timesteps_block])
# TODO: This is a hack to handle constraint tables that still have timesteps_block
# In particular, storage_level_intra_rp
if haskey(row, :timesteps_block)
row[case.col_name] = agg(@view workspace[row.timesteps_block])
else
row[case.col_name] =
agg(@view workspace[row.time_block_start:row.time_block_end])
end
if conditions_to_add_min_outgoing_flow_duration
row[:min_outgoing_flow_duration] = outgoing_flow_durations
end
Expand Down Expand Up @@ -474,8 +481,12 @@ function add_expressions_to_dataframe!(
use_highest_resolution = false,
multiply_by_duration = true,
)
# TODO: storage_level_intra_rp is serving as a constraints indices
# This should be fixed when:
# - the constraint is separate from the variable
# - the incoming and outgoing flows are stored outside the DF
add_expression_terms_intra_rp_constraints!(
dataframes[:storage_level_intra_rp],
variables[:storage_level_intra_rp].indices,
variables[:flow],
expression_workspace,
representative_periods,
Expand Down Expand Up @@ -571,10 +582,10 @@ function add_expressions_to_dataframe!(
model[:outgoing_flow_lowest_resolution] = dataframes[:lowest].outgoing_flow
incoming_flow_lowest_storage_resolution_intra_rp =
model[:incoming_flow_lowest_storage_resolution_intra_rp] =
dataframes[:storage_level_intra_rp].incoming_flow
variables[:storage_level_intra_rp].indices.incoming_flow
outgoing_flow_lowest_storage_resolution_intra_rp =
model[:outgoing_flow_lowest_storage_resolution_intra_rp] =
dataframes[:storage_level_intra_rp].outgoing_flow
variables[:storage_level_intra_rp].indices.outgoing_flow
incoming_flow_highest_in_out_resolution =
model[:incoming_flow_highest_in_out_resolution] =
dataframes[:highest_in_out].incoming_flow
Expand Down
15 changes: 8 additions & 7 deletions src/solve-model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,12 @@ function solve_model!(
graph[a].investment_integer_storage_energy[y] ? round(Int, value) : value
end

for row in eachrow(energy_problem.dataframes[:storage_level_intra_rp])
a, rp, timesteps_block, value =
row.asset, row.rep_period, row.timesteps_block, row.solution
graph[a].storage_level_intra_rp[(rp, timesteps_block)] = value
end
# TODO: fix this
# for row in eachrow(energy_problem.dataframes[:storage_level_intra_rp])
# a, rp, timesteps_block, value =
# row.asset, row.rep_period, row.timesteps_block, row.solution
# graph[a].storage_level_intra_rp[(rp, timesteps_block)] = value
# end

for row in eachrow(energy_problem.dataframes[:storage_level_inter_rp])
a, pb, value = row.asset, row.periods_block, row.solution
Expand Down Expand Up @@ -102,7 +103,7 @@ function solve_model!(dataframes, model, args...; kwargs...)

# TODO: fix this later
# dataframes[:flow].solution = solution.flow
dataframes[:storage_level_intra_rp].solution = solution.storage_level_intra_rp
# dataframes[:storage_level_intra_rp].solution = solution.storage_level_intra_rp
dataframes[:storage_level_inter_rp].solution = solution.storage_level_inter_rp
dataframes[:max_energy_inter_rp].solution = solution.max_energy_inter_rp
dataframes[:min_energy_inter_rp].solution = solution.min_energy_inter_rp
Expand Down Expand Up @@ -196,7 +197,7 @@ function solve_model(
Dict(k => JuMP.value(v) for (k, v) in variables[:assets_investment].lookup),
Dict(k => JuMP.value(v) for (k, v) in variables[:assets_investment_energy].lookup),
Dict(k => JuMP.value(v) for (k, v) in variables[:flows_investment].lookup),
JuMP.value.(model[:storage_level_intra_rp]),
JuMP.value.(variables[:storage_level_intra_rp].container),
JuMP.value.(model[:storage_level_inter_rp]),
JuMP.value.(model[:max_energy_inter_rp]),
JuMP.value.(model[:min_energy_inter_rp]),
Expand Down
14 changes: 7 additions & 7 deletions src/time-resolution.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,13 @@ function compute_constraints_partitions(graph, representative_periods, years)
strategy = :lowest,
asset_filter = (a, y) -> graph[a].type in ["conversion", "producer"],
),
(
name = :storage_level_intra_rp,
partitions = _all,
strategy = :lowest,
asset_filter = (a, y) ->
graph[a].type == "storage" && !get(graph[a].is_seasonal, y, false),
),
# (
# name = :storage_level_intra_rp,
# partitions = _all,
# strategy = :lowest,
# asset_filter = (a, y) ->
# graph[a].type == "storage" && !get(graph[a].is_seasonal, y, false),
# ),
(
name = :lowest_in_out,
partitions = _allflows,
Expand Down
97 changes: 96 additions & 1 deletion src/variables/create.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ export compute_variables_indices
# The signature should be something like `...(connection; assets_data="t_assets_data", ...)`
function compute_variables_indices(connection, dataframes)
variables = Dict(
:storage_level_intra_rp => TulipaVariable(dataframes[:storage_level_intra_rp]),
:storage_level_inter_rp => TulipaVariable(dataframes[:storage_level_inter_rp]),
:is_charging => TulipaVariable(dataframes[:lowest_in_out]),
)
Expand Down Expand Up @@ -49,6 +48,102 @@ function compute_variables_indices(connection, dataframes)
) |> DataFrame,
)

# TODO: Move this to some decent place
# This table is the union of all assets, incoming flows and outgoing flows
DuckDB.execute(
connection,
"CREATE OR REPLACE TEMP TABLE t_union AS
SELECT asset, year, rep_period, time_block_start, time_block_end
FROM asset_time_resolution
UNION
SELECT from_asset as asset, year, rep_period, time_block_start, time_block_end
FROM flow_time_resolution
UNION
SELECT to_asset as asset, year, rep_period, time_block_start, time_block_end
FROM flow_time_resolution
ORDER BY asset, year, rep_period, time_block_start, time_block_end DESC
",
)
# The logic:
# - t_union has groups in order, then s:e ordered by s increasing and e decreasing
# - in a group (asset, year, rep_period) we can take the first range then
# - continue in the sequence selecting the next largest e
DuckDB.execute(
connection,
"CREATE OR REPLACE TABLE storage_level_intra_rp(
asset STRING,
year INT,
rep_period INT,
time_block_start INT,
time_block_end INT
)",
)
appender = DuckDB.Appender(connection, "storage_level_intra_rp")
function _append_helper(appender, group, s, e)
for x in group
DuckDB.append(appender, x)
end
DuckDB.append(appender, s)
DuckDB.append(appender, e)
DuckDB.end_row(appender)
end
s = 0
e_candidate = 0
current_group = ("", 0, 0)
@timeit to "append storage_level_intra_rp rows" for row in DuckDB.query(
connection,
"SELECT * FROM t_union
LEFT JOIN assets_data ON
t_union.asset=assets_data.name
AND t_union.year=assets_data.year
LEFT JOIN graph_assets_data ON
t_union.asset=graph_assets_data.name
WHERE
graph_assets_data.type='storage'
AND assets_data.is_seasonal=false",
)
if (row.asset, row.year, row.rep_period) != current_group
# New group, create the last entry
# Except for the initial case and when it was already added
if s != 0 && s <= e_candidate
_append_helper(appender, current_group, s, e_candidate)
end
# Start of a new group
current_group = (row.asset, row.year, row.rep_period)
e_candidate = row.time_block_end
s = 1
end
if row.time_block_start > s
# Since it's ordered, we ran out of candidates, so this marks the beginning of a new section
# Then, let's append and update
_append_helper(appender, current_group, s, e_candidate)
s = e_candidate + 1
e_candidate = row.time_block_end
else
# This row has a candidate
e_candidate = max(e_candidate, row.time_block_end)
end
end

Check warning on line 126 in src/variables/create.jl

View check run for this annotation

Codecov / codecov/patch

src/variables/create.jl#L126

Added line #L126 was not covered by tests
# Add the last entry
if s > 0 && s <= e_candidate # Being safe
_append_helper(appender, current_group, s, e_candidate)
end
DuckDB.close(appender)
variables[:storage_level_intra_rp] = TulipaVariable(
DuckDB.query(
connection,
"CREATE OR REPLACE TEMP SEQUENCE id START 1;
SELECT
nextval('id') as index,
asset,
year,
rep_period,
time_block_start,
time_block_end
FROM storage_level_intra_rp",
) |> DataFrame,
)

variables[:flows_investment] = TulipaVariable(DuckDB.query(
connection,
"SELECT
Expand Down
Loading

0 comments on commit c111a79

Please sign in to comment.