Commit d93b27b2 authored by Juha Kiviluoma's avatar Juha Kiviluoma
Browse files

Merge branch 'dev' into show_resdemand

parents 678a0202 575cc61e
......@@ -14,7 +14,7 @@ solvelink = %Solvelink.Loadlibrary% // Solvelink controls how the problem is pa
threads = -1 // How many cores the solver can use: 0 = all cores; negative values = all cores - n
$ifi not '%debug%' == 'yes'
$ife not %debug%>1
solprint = Silent // Controls solution file outputs - debug mode will be more verbose
;
$title Backbone
$ontext
Backbone - chronological energy systems model
Copyright (C) 2016 - 2018 VTT Technical Research Centre of Finland
Copyright (C) 2016 - 2019 VTT Technical Research Centre of Finland
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
......@@ -21,22 +21,23 @@ Created by:
Juha Kiviluoma
Erkka Rinne
Topi Rasku
Niina Helisto
Niina Helist
- Based on Stochastic Model Predictive Control method [1].
- Enables multiple different models (m) to be implemented by changing
the temporal structure of the model.
the temporal structure of the model. (MULTI-MODEL RUNS TO BE IMPLEMENTED)
- Time steps (t) can vary in length.
- Short term forecast stochasticity (f) and longer term statistical uncertainty (s).
- Can handle ramp based dispatch in addition to energy blocks.
- Can handle ramp based dispatch in addition to energy blocks. (TO BE IMPLEMENTED)
GAMS command line arguments
--debug=[yes|no]
Switch on/off debugging mode. In debug mode, writes debug.gdx
with all symbols as well as a gdx file for each solution containing
model parameters, variables and equations.
--debug=[0|1|2]
Set level of debugging information. Default is 0 when no extra information is
saved or displayded. At level 1, file 'debug.gdx' containing all GAMS symbols
is written at the end of execution. At level 2, debug information is written
for each solve separately.
--diag=[yes|no]
Switch on/off diagnostics. Writes some additional diagnostic results in
......@@ -46,13 +47,9 @@ GAMS command line arguments
Do not solve the model, just do preliminary calculations.
For testing purposes.
--<name of model parameter>=<value>
Set model parameter value. See file inc/setting_sets.gms for available
parameters.
--<name of model feature>=[yes|no]
Switch model features on/off. See file inc/setting_sets.gms for available
features.
--penalty=<value>
Changes the value of the penalty cost. Default penalty value is 1e9
if not provided.
--input_dir=<path>
Directory to read input from. Defaults to './input'.
......@@ -70,6 +67,9 @@ References
==========================================================================
$offtext
* Set default debugging level
$if not set debug $setglobal debug 0
* Default values for input and output dir
$if not set input_dir $setglobal input_dir 'input'
$if not set output_dir $setglobal output_dir 'output'
......@@ -133,7 +133,7 @@ $iftheni.dummy not %dummy% == 'yes'
$$include 'inc/3f_afterSolve.gms' // Post-processing variables after the solve
$$include 'inc/4a_outputVariant.gms' // Store results from the loop
$endif.dummy
$iftheni.debug '%debug%' == 'yes'
$ifthene.debug %debug%>1
putclose gdx;
put_utility 'gdxout' / '%output_dir%/' mSolve.tl:0 '-' tSolve.tl:0 '.gdx';
execute_unload
......@@ -159,7 +159,7 @@ execute_unload '%output_dir%/results.gdx',
$$include 'defOutput/resultSymbols.inc'
;
*$ifi '%debug%' == 'yes' execute_unload 'output/debug.gdx';
$ife %debug%>0
execute_unload '%output_dir%/debug.gdx';
if(errorcount > 0, abort errorcount);
......
......@@ -2,6 +2,14 @@
All notable changes to this project will be documented in this file.
## [Unreleased]
### Changed
- Suppress ouput from SCENRED2
## [1.0.3] - 2019-02-05
### Fixed
- Only selects forecasts with positive probability for the solve
## [1.0.2] - 2019-02-04
### Added
- New model setting `dataLength` to set the length of time series data before it is
recycled. Warn if this is not defined and automatically calculated from data.
......@@ -15,6 +23,9 @@ All notable changes to this project will be documented in this file.
### Changed
- Automatic calculation of parameter `dt_circular` takes into account time steps
only from `t000001` onwards.
- Debug mode yes/no changed to debug levels 0, 1 or 2. With higher level produces
more information. Default is 0, when no extra files are written (not even *debug.gdx*).
Set debug level with command line parameter `--debug=LEVEL`.
### Fixed
- Calculation of parameter `df_central`
......@@ -25,4 +36,6 @@ All notable changes to this project will be documented in this file.
### Changed
- Major updates to data structures etc.
[Unreleased]: https://gitlab.vtt.fi/backbone/backbone/compare/v1.0...dev
[Unreleased]: https://gitlab.vtt.fi/backbone/backbone/compare/v1.0.3...dev
[1.0.3]: https://gitlab.vtt.fi/backbone/backbone/compare/v1.0.2...v1.0.3
[1.0.2]: https://gitlab.vtt.fi/backbone/backbone/compare/v1.0...v1.0.2
......@@ -33,6 +33,7 @@ Model building /
* q_startuptype
* q_onlineLimit
* q_onlineMinUptime
* q_onlineCyclic
* q_minDown
* q_genRamp
* q_genRampChange
......
......@@ -35,6 +35,7 @@ Model invest /
q_offlineAfterShutDown
q_onlineLimit
q_onlineMinUptime
q_onlineCyclic
q_genRamp
q_rampUpLimit
q_rampDownLimit
......
......@@ -35,6 +35,7 @@ Model schedule /
q_onlineOnStartUp
q_offlineAfterShutDown
q_onlineMinUptime
* q_onlineCyclic
q_genRamp
q_rampUpLimit
q_rampDownLimit
......
......@@ -114,8 +114,8 @@ if (mType('schedule'),
mTimeseries_loop_read('schedule', 'ts_reserveDemand') = no;
mTimeseries_loop_read('schedule', 'ts_unit') = no;
mTimeseries_loop_read('schedule', 'ts_effUnit') = no;
mTimeseries_loop_read('schedule', 'ts_effGroupUnit') = no;
* mTimeseries_loop_read('schedule', 'ts_effUnit') = no; // THESE ARE CURRENTLY DISABLED, ENABLE AT OWN RISK
* mTimeseries_loop_read('schedule', 'ts_effGroupUnit') = no; // THESE ARE CURRENTLY DISABLED, ENABLE AT OWN RISK
mTimeseries_loop_read('schedule', 'ts_influx') = no;
mTimeseries_loop_read('schedule', 'ts_cf') = no;
mTimeseries_loop_read('schedule', 'ts_reserveDemand') = no;
......
......@@ -57,11 +57,13 @@ p_uStartup
p_u_maxOutputInLastRunUpInterval
p_u_runUpTimeIntervals
dt_toStartup
p_ut_runUp
p_uCounter_runUpMin
p_uCounter_runUpMax
p_u_maxOutputInFirstShutdownInterval
p_u_shutdownTimeIntervals
dt_toShutdown
p_ut_shutdown
p_uCounter_shutdownMin
p_uCounter_shutdownMax
* Variables
v_obj
......@@ -101,6 +103,7 @@ v_invest_MIP
q_onlineOnStartUp
q_offlineAfterShutDown
q_onlineMinUptime
q_onlineCyclic
q_genRamp
q_rampUpLimit
q_rampDownLimit
......
......@@ -140,12 +140,12 @@ Sets
// Other Features
feature "Set of optional model features" /
findStorageStart "Solve for optimal storage start levels"
* findStorageStart "Solve for optimal storage start levels" // NOT IMPLEMENTED
storageValue "Use storage value instead of fixed control"
storageEnd "Expected storage end levels greater than starting levels"
addOn "Use StoSSch as a storage add-on to a larger model"
extraRes "Use extra tertiary reserves for error in elec. load during time step"
rampSched "Use power based scheduling"
* storageEnd "Expected storage end levels greater than starting levels" // NOT IMPLEMENTED
* addOn "Use StoSSch as a storage add-on to a larger model" // NOT IMPLEMENTED
* extraRes "Use extra tertiary reserves for error in elec. load during time step" // NOT IMPLEMENTED
* rampSched "Use power based scheduling" // PARTIALLY IMPLEMENTED
scenRed "Reduce number of long-tem scenarios using GAMS SCENRED2"
/
......@@ -185,18 +185,8 @@ Parameter params(*) /
$if exist 'params.inc' $include 'params.inc'
/;
// Activate model features if found
Set active(mType, feature) "Set membership tells active model features" /
$if exist 'features.inc' $include 'features.inc'
/;
// Parse command line options and store values for features
$if set findStorageStart active('findStorageStart') = %findStorageStart%;
$if set storageValue active('storageValue') = %storageValue%;
$if set storageEnd active('storageEnd') = %storageEnd%;
$if set addOn active('addOn') = %addOn%;
$if set extraRes active('extraRes') = %extraRes%;
$if set rampSched active('rampSched') = %rampSched%;
// Features
Set active(mType, feature) "Set membership tells active model features";
* =============================================================================
* --- Parameter Set Definitions -----------------------------------------------
......@@ -313,6 +303,13 @@ param_unit "Set of possible data parameters for units" /
lastStepNotAggregated "Last time step when the unit is not yet aggregated - calculated in inputsLoop.gms for units that have aggregation"
/
param_eff "Parameters used for unit efficiency approximations" /
lb "Minimum load of the unit"
op "Maximum load of the unit, or the operating point of the SOS2 variable in the piecewise linear heat rate approximation (lambda)"
section "Operational heat rate of the unit, or the SOS2 variable in the piecewise linear heat rate approximation (lambda)"
slope "Heat rate parameter representing no-load fuel consumption"
/
param_fuel "Parameters for fuels" /
main "Main fuel of the unit - unless input fuels defined as grids"
startup "Startup fuel of the unit, if exists. Can be the same as main fuel - consumption using startupFuelCons"
......
......@@ -48,6 +48,7 @@ Sets
unittype "Unit technology types"
unit_investLP(unit) "Units with continuous investments allowed"
unit_investMIP(unit) "Units with integer investments allowed"
unit_timeseries(unit) "Units with time series enabled"
* --- Nodes -------------------------------------------------------------------
node_spill(node) "Nodes that can spill; used to remove v_spill variables where not relevant"
......@@ -115,7 +116,8 @@ Sets
modelSolves(mType, t) "when different models are to be solved"
f_solve(f) "forecasts in the model to be solved next"
t_latestForecast(t) "t for the latest forecast that is available"
gnss_bound(grid, node, s, s) "Bound the samples so that the state at the last interval of the first sample equals the state at the first interval of the second sample"
gnss_bound(grid, node, s, s) "Bound the samples so that the node state at the last interval of the first sample equals the state at the first interval of the second sample"
uss_bound(unit, s, s) "Bound the samples so that the unit online state at the last interval of the first sample equals the state at the first interval of the second sample"
s_parallel(s) "Samples which are treated as parallel"
s_active(s) "Samples with non-zero probability in the current model solve"
ss(s, s) "Previous sample of sample"
......@@ -143,7 +145,9 @@ Sets
effLevelGroupUnit(effLevel, effSelector, unit) "What efficiency selectors are in use for each unit at each efficiency representation level"
effGroupSelectorUnit(effSelector, unit, effSelector) "Group name for efficiency selector set, e.g. Lambda02 contains Lambda01 and Lambda02"
mSettingsReservesInUse(mType, *, up_down) "Reserves that are used in each model type"
unitCounter(unit, counter) "Counter subset used for restricting excessive looping over the counter set when defining unit startup/shutdown/online time restrictions"
unitCounter(unit, counter) "Counter used for restricting excessive looping over the counter set when defining unit startup/shutdown/online time restrictions"
runUpCounter(unit, counter) "Counter used for unit run-up intervals"
shutdownCounter(unit, counter) "Counter used for unit shutdown intervals"
* --- Sets used for grouping of units, transfer links, nodes, etc. ------------
group "A group of units, transfer links, nodes, etc."
......
......@@ -60,25 +60,29 @@ Parameters
p_fuelEmission(fuel, emission) "Fuel emission content"
p_uFuel(unit, param_fuel, fuel, param_unitFuel) "Parameters interacting between units and fuels"
p_unitFuelEmissionCost(unit, fuel, emission) "Emission costs for each unit, calculated from input data"
p_effUnit(effSelector, unit, effSelector, *) "Data for piece-wise linear efficiency blocks"
p_effGroupUnit(effSelector, unit, *) "Unit data specific to a efficiency group (e.g. left border of the unit)"
p_effUnit(effSelector, unit, effSelector, param_eff) "Data for piece-wise linear efficiency blocks"
p_effGroupUnit(effSelector, unit, param_eff) "Unit data specific to a efficiency group (e.g. left border of the unit)"
p_uNonoperational(unit, starttype, min_max) "Non-operational time after being shut down before start up"
p_uStartup(unit, starttype, cost_consumption) "Startup cost and fuel consumption"
p_u_maxOutputInLastRunUpInterval(unit) "Maximum output in the last interval for the run-up to min. load (p.u.)"
p_u_maxRampSpeedInLastRunUpInterval(unit) "Maximum ramp speed in the last interval for the run-up to min. load (p.u.)"
p_u_runUpTimeIntervals(unit) "Time steps required for the run-up phase"
p_u_runUpTimeIntervalsCeil(unit) "Ceiling of time steps required for the run-up phase"
p_ut_runUp(unit, t) "Output for the time steps where the unit is being started up to the minimum load (minimum output in the last interval) (p.u.)"
p_uCounter_runUpMin(unit, counter) "Minimum output for the time steps where the unit is being started up to the minimum load (minimum output in the last interval) (p.u.)"
p_uCounter_runUpMax(unit, counter) "Maximum output for the time steps where the unit is being started up to the minimum load (minimum output in the last interval) (p.u.)"
p_u_maxOutputInFirstShutdownInterval(unit) "Maximum output in the first interval for the shutdown from min. load (p.u.)"
p_u_shutdownTimeIntervals(unit) "Time steps required for the shutdown phase"
p_u_shutdownTimeIntervalsCeil(unit) "Ceiling of time steps required for the shutdown phase"
p_ut_shutdown(unit, t) "Output for the time steps where the unit is being shut down from the minimum load (minimum output in the first interval) (p.u.)"
p_u_shutdownTimeIntervalsCeil(unit) "Floor of time steps required for the shutdown phase"
p_uCounter_shutdownMin(unit, counter) "Minimum output for the time steps where the unit is being shut down from the minimum load (minimum output in the first interval) (p.u.)"
p_uCounter_shutdownMax(unit, counter) "Maximum output for the time steps where the unit is being shut down from the minimum load (minimum output in the first interval) (p.u.)"
// Time dependent unit & fuel parameters
ts_unit(unit, *, f, t) "Time dependent unit data, where energy type doesn't matter"
ts_effUnit(effSelector, unit, effSelector, *, f, t) "Time dependent data for piece-wise linear efficiency blocks"
ts_effGroupUnit(effSelector, unit, *, f, t) "Time dependent efficiency group unit data"
ts_unit(unit, param_unit, f, t) "Time dependent unit data, where energy type doesn't matter"
ts_effUnit(effSelector, unit, effSelector, param_eff, f, t) "Time dependent data for piece-wise linear efficiency blocks"
ts_effGroupUnit(effSelector, unit, param_eff, f, t) "Time dependent efficiency group unit data"
// Alias used for interval aggregation
ts_unit_(unit, *, f, t)
ts_unit_(unit, param_unit, f, t)
* ts_effUnit_(effSelector, unit, effSelector, param_eff, f, t)
* ts_effGroupUnit_(effSelector, unit, param_eff, f, t)
;
* --- Probability -------------------------------------------------------------
......@@ -105,6 +109,7 @@ Parameters
dt_starttypeUnitCounter(starttype, unit, counter) "Displacement needed to account for starttype constraints (in time steps)"
dt_downtimeUnitCounter(unit, counter) "Displacement needed to account for downtime constraints (in time steps)"
dt_uptimeUnitCounter(unit, counter) "Displacement needed to account for uptime constraints (in time steps)"
dt_trajectory(counter) "Run-up/shutdown trajectory time index displacement"
dt_sampleOffset(*, node, *, s) "Time offset to make periodic time series data (for grid/flow, unit, label) to go into different samples"
// Forecast displacement arrays
......@@ -150,9 +155,9 @@ Parameters
ts_fuelPrice_(fuel, t) "Mean fuel price time during time step (EUR/MWh)"
// Aliases used for updating data in inputsLoop.gms
ts_unit_update(unit, *, f, t)
ts_effUnit_update(effSelector, unit, effSelector, *, f, t)
ts_effGroupUnit_update(effSelector, unit, *, f, t)
ts_unit_update(unit, param_unit, f, t)
ts_effUnit_update(effSelector, unit, effSelector, param_eff, f, t)
ts_effGroupUnit_update(effSelector, unit, param_eff, f, t)
ts_influx_update(grid, node, f, t)
ts_cf_update(flow, node, f, t)
ts_reserveDemand_update(restype, up_down, node, f, t)
......
......@@ -67,6 +67,7 @@ $loaddc gnGroup
$loaddc p_groupPolicy
$loaddc p_groupPolicy3D
$loaddc gnss_bound
$loaddc uss_bound
$gdxin
$ifthen exist '%input_dir%/includeInputData_ext.inc'
......@@ -176,6 +177,9 @@ unitStarttype(unit, starttypeConstrained)${ p_unit(unit, 'startWarmAfterXhours')
}
= yes;
// Units with time series data enabled
unit_timeseries(unit)${ p_unit(unit, 'useTimeseries') }
= yes;
* --- Unit Related Parameters -------------------------------------------------
......
......@@ -63,6 +63,7 @@ equations
q_offlineAfterShutdown(s, unit, f, t) "Unit must be offline after shutting down"
q_onlineLimit(mType, s, unit, f, t) "Number of online units limited for units with startup constraints, minimum down time, or investment possibility"
q_onlineMinUptime(mType, s, unit, f, t) "Number of online units constrained for units with minimum up time"
q_onlineCyclic(unit, s, s, mType) "Cyclic online state bound for the first and the last states of samples"
q_genRamp(mType, s, grid, node, unit, f, t) "Record the ramps of units with ramp restricitions or costs"
q_rampUpLimit(mType, s, grid, node, unit, f, t) "Up ramping limited for units"
q_rampDownLimit(mType, s, grid, node, unit, f, t) "Down ramping limited for units"
......@@ -90,7 +91,7 @@ equations
q_stateUpwardLimit(grid, node, mType, s, f, t) "Limit the commitments of a node with a state variable to the available headrooms"
q_stateDownwardLimit(grid, node, mType, s, f, t) "Limit the commitments of a node with a state variable to the available headrooms"
q_boundStateMaxDiff(grid, node, node, mType, s, f, t) "Node state variables bounded by other nodes (maximum state difference)"
q_boundCyclic(grid, node, s, s, mType) "Cyclic bound for the first and the last states of samples"
q_boundCyclic(grid, node, s, s, mType) "Cyclic node state bound for the first and the last states of samples"
// Policy
q_inertiaMin(group, s, f, t) "Minimum inertia in a group of nodes"
......
......@@ -179,7 +179,7 @@ q_obj ..
) // END sum(gn2n_directional)
) // END sum(t_invest)
$ifthen exist '%input_dir%/2c_additional_objective_terms.gms'
$ifthen.addterms exist '%input_dir%/2c_additional_objective_terms.gms'
$$include '%input_dir%/2c_additional_objective_terms.gms';
$endif
$endif.addterms
;
This diff is collapsed.
......@@ -62,12 +62,14 @@ $offtext
);
// Calculate which samples are treated as parallel and the previous samples
loop(ms_initial(m, s_),
loop(ms(m, s)$(not sameas(s, s_)),
loop(ms_initial(m, s_), // Select the root sample
loop(ms(m, s)$(not sameas(s, s_)), // Select other samples than root
// If two samples share same starting time, treat them as parallel
if(msStart(m, s) = msStart(m, s - 1),
s_parallel(s) = yes;
s_parallel(s - 1) = yes;
);
// Set previous samples for samples
if(msEnd(m, s_) = msStart(m, s), ss(s, s_) = yes);
if(msEnd(m, s - 1) = msStart(m, s), ss(s, s - 1) = yes);
);
......@@ -80,13 +82,16 @@ $offtext
if (not sum(f, mf(m, f)), // unless they have been provided as input
mf(m, f)$(ord(f) <= 1 + mSettings(m, 'forecasts')) = yes; // realization needs one f, therefore 1 + number of forecasts
);
msf(m, s, f)$(ms(m, s) and mf(m, f)) = yes;
msf(m, s_parallel(s), f) = mf_central(m, f); // Parallel samples only have central forecast
// Select the forecasts included in the modes to be solved
f_solve(f)${mf(m,f) and p_mfProbability(m, f)}
= yes;
// Select combinations of models, samples and forecasts to be solved
msf(m, s, f_solve(f))$(ms(m, s) and mf(m, f)) = yes;
msf(m, s_parallel(s), f_solve(f)) = mf_central(m, f); // Parallel samples only have central forecast
// Check the modelSolves for preset patterns for model solve timings
// If not found, then use mSettings to set the model solve timings
if(sum(modelSolves(m, t_full(t)), 1) = 0,
......@@ -115,6 +120,8 @@ $offtext
if(not mInterval(m, 'lastStepInIntervalBlock', counter),
continueLoop = 0;
elseif mod(mInterval(m, 'lastStepInIntervalBlock', counter) - mInterval(m, 'lastStepInIntervalBlock', counter-1), mInterval(m, 'stepsPerInterval', counter)),
put log "!!! Error occurred on interval block ", counter.tl:0 /;
put log "!!! Abort: stepsPerInterval is not evenly divisible within the interval"
abort "stepsPerInterval is not evenly divisible within the interval", m, continueLoop;
else
continueLoop = continueLoop + 1;
......@@ -125,7 +132,7 @@ $offtext
if(mSettings(m, 'dataLength'),
tmp = max(mSettings(m, 'dataLength') + 1, tmp); // 'dataLength' increased by one to account for t000000 in ord(t)
else
put log '!!! mSettings(m, dataLength) is not defined! Calculating dataLength based on ts_influx and ts_node.' /;
put log '!!! Warning: mSettings(m, dataLength) is not defined! Calculating dataLength based on ts_influx and ts_node.' /;
// Calculate the length of the time series data (based on realized forecast)
option clear = tt; // Find the time steps with input time series data (ts_influx and ts_node)
loop(gn(grid, node),
......@@ -152,13 +159,13 @@ dt_circular(t_full(t))${ ord(t) > tmp }
* --- Initialize Unit Efficiency Approximations -------------------------------
* =============================================================================
loop(m,
* --- Unit Aggregation --------------------------------------------------------
unitAggregator_unit(unit, unit_)$sum(effLevel$(mSettingsEff(m, effLevel)), unitUnitEffLevel(unit, unit_, effLevel)) = yes;
// Define unit aggregation sets
// Define unit aggregation sets
unit_aggregator(unit)${ sum(unit_, unitAggregator_unit(unit, unit_)) }
= yes; // Set of aggregator units
unit_aggregated(unit)${ sum(unit_, unitAggregator_unit(unit_, unit)) }
......@@ -167,19 +174,20 @@ loop(m,
unit_noAggregate(unit)$unit_aggregated(unit) = no;
unit_noAggregate(unit)${ sum((unit_, effLevel), unitUnitEffLevel(unit, unit_, effLevel)) } = no;
// Process data for unit aggregations
// Aggregate maxGen as the sum of aggregated maxGen
// Process data for unit aggregations
// Aggregate maxGen as the sum of aggregated maxGen
p_gnu(grid, node, unit_aggregator(unit), 'maxGen')
= sum(unit_$unitAggregator_unit(unit, unit_),
+ p_gnu(grid, node, unit_, 'maxGen')
);
// Aggregate maxCons as the sum of aggregated maxCons
// Aggregate maxCons as the sum of aggregated maxCons
p_gnu(grid, node, unit_aggregator(unit), 'maxCons')
= sum(unit_$unitAggregator_unit(unit, unit_),
+ p_gnu(grid, node, unit_, 'maxCons')
);
* --- Calculate 'lastStepNotAggregated' for aggregated units and aggregator units
* --- Calculate 'lastStepNotAggregated' for aggregated units and aggregator units ---
loop(effLevel$mSettingsEff(m, effLevel),
loop(effLevel_${mSettingsEff(m, effLevel_) and ord(effLevel_) < ord(effLevel)},
p_unit(unit_aggregated(unit), 'lastStepNotAggregated')${ sum(unit_,unitUnitEffLevel(unit_, unit, effLevel)) }
......@@ -190,7 +198,7 @@ loop(m,
);
);
* --- Ensure that efficiency levels extend to the end of the model horizon and do not go beyond ----
* --- Ensure that efficiency levels extend to the end of the model horizon and do not go beyond ---
loop(m,
// First check how many efficiency levels there are and cut levels going beyond the t_horizon
......@@ -282,7 +290,7 @@ loop(effGroupSelectorUnit(effSelector, unit, effSelector_),
// Parameters for direct conversion units without online variables
if(effDirectOff(effSelector),
p_effUnit(effSelector, unit, effSelector, 'lb') = 0; // No min load for the DirectOff approximation
p_effUnit(effSelector, unit, effSelector, 'op') = smax(op, p_unit(unit, op));
p_effUnit(effSelector, unit, effSelector, 'op') = smax(op, p_unit(unit, op)); // Maximum operating point
p_effUnit(effSelector, unit, effSelector, 'slope') = 1 / smax(eff${p_unit(unit, eff)}, p_unit(unit, eff)); // Uses maximum found (nonzero) efficiency.
p_effUnit(effSelector, unit, effSelector, 'section') = 0; // No section for the DirectOff approximation
); // END if(effDirectOff)
......@@ -413,34 +421,33 @@ loop(m,
// Calculate time intervals needed for the run-up phase
tmp = [ p_unit(unit,'op00') / (p_unit(unit, 'rampSpeedToMinLoad') * 60) ] / mSettings(m, 'stepLengthInHours');
p_u_runUpTimeIntervals(unit) = tmp;
p_u_runUpTimeIntervalsCeil(unit) = ceil(p_u_runUpTimeIntervals(unit))
// Calculate output during the run-up phase
loop(t${ord(t)<=p_u_runUpTimeIntervalsCeil(unit)},
p_ut_runUp(unit, t) =
+ p_unit(unit, 'rampSpeedToMinLoad') * (ceil(p_u_runUpTimeIntervals(unit) - ord(t)) + 0.5)
* 60 // Unit conversion from [p.u./min] to [p.u./h]
* mSettings(m, 'stepLengthInHours')
);
// Combine output in the second last interval and the weighted average of rampSpeedToMinLoad and the smallest non-zero maxRampUp
p_u_maxOutputInLastRunUpInterval(unit) =
(
+ p_unit(unit, 'rampSpeedToMinLoad') * (tmp-floor(tmp)) * mSettings(m, 'stepLengthInHours')
+ smin(gnu(grid, node, unit)${p_gnu(grid, node, unit, 'maxRampUp')}, p_gnu(grid, node, unit, 'maxRampUp')) * (ceil(tmp)-tmp) * mSettings(m, 'stepLengthInHours')
+ p_unit(unit, 'rampSpeedToMinLoad')${not sum(gnu(grid, node, unit), p_gnu(grid, node, unit, 'maxRampUp'))} * (ceil(tmp)-tmp) * mSettings(m, 'stepLengthInHours')
)
* 60 // Unit conversion from [p.u./min] to [p.u./h]
+ sum(t${ord(t) = 2}, p_ut_runUp(unit, t));
// Maximum output in the last time interval of the run-up phase can't exceed the maximum capacity
p_u_maxOutputInLastRunUpInterval(unit) = min(p_u_maxOutputInLastRunUpInterval(unit), 1);
// Maximum ramp speed in the last time interval of the run-phasae is equal to maximum output after the time period minus the output on the previous time period in the run-up phase
p_u_maxRampSpeedInLastRunUpInterval(unit) = p_u_maxOutputInLastRunUpInterval(unit) - sum(t$[ord(t) = 2], p_ut_runUp(unit, t));
// Minimum output in the last time interval of the run-up phase equals minimum load
p_ut_runUp(unit, t)${ord(t) = 1} = p_unit(unit,'op00');
p_u_runUpTimeIntervalsCeil(unit) = ceil(p_u_runUpTimeIntervals(unit));
runUpCounter(unit, counter) // Store the required number of run-up intervals for each unit
${ ord(counter) <= p_u_runUpTimeIntervalsCeil(unit) }
= yes;
dt_trajectory(counter)
${ runUpCounter(unit, counter) }
= - ord(counter) + 1; // Runup starts immediately at v_startup
// Calculate minimum output during the run-up phase; partial intervals calculated using weighted averaging with min load
p_uCounter_runUpMin(runUpCounter(unit, counter))
= + p_unit(unit, 'rampSpeedToMinLoad')
* ( + min(ord(counter), p_u_runUpTimeIntervals(unit)) // Location on ramp
- 0.5 * min(p_u_runUpTimeIntervals(unit) - ord(counter) + 1, 1) // Average ramp section
)
* min(p_u_runUpTimeIntervals(unit) - ord(counter) + 1, 1) // Portion of time interval spent ramping
* mSettings(m, 'stepLengthInHours') // Ramp length in hours
* 60 // unit conversion from [p.u./min] to [p.u./h]
+ p_unit(unit, 'op00')${ not runUpCounter(unit, counter+1) } // Time potentially spent at min load during the last run-up interval
* ( p_u_runUpTimeIntervalsCeil(unit) - p_u_runUpTimeIntervals(unit) );
// Maximum output on the last run-up interval can be higher, otherwise the same as minimum.
p_uCounter_runUpMax(runUpCounter(unit, counter))
= p_uCounter_runUpMin(unit, counter);
p_uCounter_runUpMax(runUpCounter(unit, counter))${ not runUpCounter(unit, counter+1) }
= p_uCounter_runUpMax(unit, counter)
+ ( 1 - p_uCounter_runUpMax(unit, counter) )
* ( p_u_runUpTimeIntervalsCeil(unit) - p_u_runUpTimeIntervals(unit) );
); // END loop(unit)
); // END loop(m)
......@@ -452,39 +459,38 @@ loop(m,
// Calculate time intervals needed for the shutdown phase
tmp = [ p_unit(unit,'op00') / (p_unit(unit, 'rampSpeedFromMinLoad') * 60) ] / mSettings(m, 'stepLengthInHours');
p_u_shutdownTimeIntervals(unit) = tmp;
p_u_shutdownTimeIntervalsCeil(unit) = ceil(p_u_shutdownTimeIntervals(unit))
// Calculate output during the shutdown phase
loop(t${ord(t)<=p_u_shutdownTimeIntervalsCeil(unit)},
p_ut_shutdown(unit, t) =
+ p_unit(unit, 'rampSpeedFromMinLoad') * (ceil(p_u_shutdownTimeIntervals(unit) - ord(t) + 1))
* 60 // Unit conversion from [p.u./min] to [p.u./h]
* mSettings(m, 'stepLengthInHours')
);
// Combine output in the second interval and the weighted average of rampSpeedFromMinLoad and the smallest non-zero maxRampDown
p_u_maxOutputInFirstShutdownInterval(unit) =
(
+ p_unit(unit, 'rampSpeedFromMinLoad') * (tmp-floor(tmp)) * mSettings(m, 'stepLengthInHours')
+ smin(gnu(grid, node, unit)${p_gnu(grid, node, unit, 'maxRampDown')}, p_gnu(grid, node, unit, 'maxRampDown')) * (ceil(tmp)-tmp) * mSettings(m, 'stepLengthInHours')
+ p_unit(unit, 'rampSpeedFromMinLoad')${not sum(gnu(grid, node, unit), p_gnu(grid, node, unit, 'maxRampDown'))} * (ceil(tmp)-tmp) * mSettings(m, 'stepLengthInHours')
)
* 60 // Unit conversion from [p.u./min] to [p.u./h]
+ sum(t${ord(t) = 2}, p_ut_shutdown(unit, t));
// Maximum output in the first time interval of the shutdown phase can't exceed the maximum capacity
p_u_maxOutputInFirstShutdownInterval(unit) = min(p_u_maxOutputInFirstShutdownInterval(unit), 1);
// Minimum output in the first time interval of the shutdown phase equals minimum load
p_ut_shutdown(unit, t)${ord(t) = 1} = p_unit(unit,'op00');
p_u_shutdownTimeIntervalsCeil(unit) = ceil(p_u_shutdownTimeIntervals(unit));
shutdownCounter(unit, counter) // Store the required number of shutdown intervals for each unit
${ ord(counter) <= p_u_shutDownTimeIntervalsCeil(unit)}
= yes;
dt_trajectory(counter)
${ shutdownCounter(unit, counter) }
= - ord(counter) + 1; // Shutdown starts immediately at v_shutdown
// Calculate minimum output during the shutdown phase; partial intervals calculated using weighted average with zero load
p_uCounter_shutdownMin(shutdownCounter(unit, counter))
= + p_unit(unit, 'rampSpeedFromMinLoad')
* ( min(p_u_shutdownTimeIntervalsCeil(unit) - ord(counter) + 1, p_u_shutdownTimeIntervals(unit)) // Location on ramp
- 0.5 * min(p_u_shutdownTimeIntervals(unit) - p_u_shutdownTimeIntervalsCeil(unit) + ord(counter), 1) // Average ramp section
)
* min(p_u_shutdownTimeIntervals(unit) - p_u_shutdownTimeIntervalsCeil(unit) + ord(counter), 1) // Portion of time interval spent ramping
* mSettings(m, 'stepLengthInHours') // Ramp length in hours
* 60 // unit conversion from [p.u./min] to [p.u./h]
+ p_unit(unit, 'op00')${ not shutdownCounter(unit, counter-1) } // Time potentially spent at min load on the first shutdown interval
* ( p_u_shutdownTimeIntervalsCeil(unit) - p_u_shutdownTimeIntervals(unit) );
// Maximum output on the first shutdown interval can be higher, otherwise the same as minimum.
p_uCounter_shutdownMax(shutdownCounter(unit, counter))
= p_uCounter_shutdownMin(unit, counter);
p_uCounter_shutdownMax(shutdownCounter(unit, counter))${ not shutdownCounter(unit, counter-1) }
= p_uCounter_shutdownMax(unit, counter)
+ ( 1 - p_uCounter_shutdownMax(unit, counter) )
* ( p_u_shutdownTimeIntervalsCeil(unit) - p_u_shutdownTimeIntervals(unit) );
) // END loop(unit)
); // END loop(unit)
); // END loop(m)
* --- Unit Startup and Shutdown Counters --------------------------------------
// Initialize unitCounter
Option clear = unitCounter;
* --- Unit Starttype, Uptime and Downtime Counters ----------------------------
loop(m,
// Loop over units with online approximations in the model
......@@ -568,7 +574,7 @@ loop(m,
loop(fuel,
// Determine the time steps where the prices change
Option clear = tt;
tt(t_full(t))${ ts_fuelPriceChange(fuel ,t) }
tt(t)${ ts_fuelPriceChange(fuel ,t) }
= yes;
ts_fuelPrice(fuel, t_full(t)) = sum(tt(t_)${ ord(t_) <= ord(t) }, ts_fuelPriceChange(fuel, t_));
); // END loop(fuel)
......@@ -637,5 +643,12 @@ loop(m, // Not ideal, but multi-model functionality is not yet implemented
abort "There are insufficient effLevels in the effLevelGroupUnit data for all the defined mSettingsEff!";
); // END if(smax)
* --- Check if time intervals are aggregated before 't_trajectoryHorizon' -----
if (mInterval(m, 'lastStepInIntervalBlock', 'c000') < mSettings(m, 't_trajectoryHorizon')
OR (mInterval(m, 'stepsPerInterval', 'c000') > 1 and mSettings(m, 't_trajectoryHorizon') > 0),
put log '!!! Warning: Trajectories used on aggregated time steps! This could result in significant distortion of the trajectories.';
); // END if()
); // END loop(m)
......@@ -20,7 +20,7 @@ $offtext
* =============================================================================
// This is only done if debug mode is not specifically enabled
$iftheni.debug NOT '%debug%' == 'yes'
$ifthene.debug not %debug%>0
* --- Variables ---------------------------------------------------------------