Commit 74e1ba2b authored by Juha Kiviluoma's avatar Juha Kiviluoma
Browse files

Merge remote-tracking branch 'origin/dev' into dev_BB_in_Spine_VabiSys

# Conflicts:
#	.spinetoolbox/items/bb_parameters/filter_config.json
#	.spinetoolbox/items/bb_sets/filter_config.json
#	.spinetoolbox/project.json
#	spineToolbox.json
#	tools/bb1.3_to_spinedb.json
#	tools/bb_base_sets.json
parents f15055d3 a2bdcf63
......@@ -18,7 +18,6 @@ EfficiencyPiecewise.xlsx
# Data files
// Solution gap: the first one reached will end iteration
optca = 0 // Absolute gap between the found solution and the best possible solution
optcr = 0.0004 // Relative gap between the found solution and the best possible solution
solvelink = %Solvelink.Loadlibrary% // Solvelink controls how the problem is passed from GAMS to the solver. Loadlibrary constant means that the model is passed in core without the use of temporary files.
* profile = 8 // Profile will show the execution speed of statements at the defined depth within loops.
* bratio = 0.25 // How large share of the candidate elements need to be found for advanced basis in LP problems. Default 0.25.
* solveopt = merge // How solution values are stored after multiple solves. Default merge.
* savepoint = 1 // NOTE! Savepoint is controlled by Backbone model options.
threads = -1 // How many cores the solver can use: 0 = all cores; negative values = all cores - n
$ife not %debug%>1
solprint = Silent // Controls solution file outputs - debug mode will be more verbose
* =============================================================================
* --- Save investments results in file to be used in child setups -
* =============================================================================
* Output file streams
file f_changes /'output\'/;
f_changes.lw = 26; // Field width of set label output, default in GAMS is 12, increase as needed = 500; // Number of characters that may be placed on a single row of the page, default in GAMS is 255, increase as needed
put f_changes
* Do not allow investments in the child setups
put "p_unit('",, "', 'maxUnitCount') = 0;"/;
* Update the number of subunits in the child setups (rounded here to the nearest integer)
tmp = round(r_invest(unit), 0)
put "p_unit('",, "', 'unitCount') = p_unit('",, "', 'unitCount') + ", tmp, ";"/;
* Update capacity values in the child setups
loop(gnu(grid, node, unit)${r_invest(unit)},
tmp = round(r_invest(unit), 0) * p_gnu(grid, node, unit, 'unitSize');
put "p_gnu('",, "', '",, "', '",, "', 'capacity') = p_gnu('",, "', '",, "', '",, "', 'capacity') + ", tmp, ";"/;);
* Example updates for storage units (commented out at the moment, use names etc. that work in your case)
*p_gnBoundaryPropertiesForStates('battery_grid', 'battery_node', 'upwardLimit', 'useConstant') = 1;
*p_gnBoundaryPropertiesForStates('battery_grid', 'battery_node', 'upwardLimit', 'multiplier') = 1;
*p_gnBoundaryPropertiesForStates('battery_grid', 'battery_node', 'upwardLimit', 'constant')
* = p_gnu('battery_grid', 'battery_node', 'battery_charge', 'upperLimitCapacityRatio') * p_gnu('battery_grid', 'battery_node', 'battery_charge', 'capacity');
*p_gnu('battery_grid', 'battery_node', 'battery_charge', 'upperLimitCapacityRatio') = 0;
*uGroup('battery_charge', 'battery_online_group1') = yes;
*uGroup('battery_discharge', 'battery_online_group1') = yes;
*p_groupPolicy('battery_online_group1', 'constrainedOnlineTotalMax') = p_unit('battery_charge', 'unitCount');
*p_groupPolicy3D('battery_online_group1', 'constrainedOnlineMultiplier', 'battery_charge') = 1;
*p_groupPolicy3D('battery_online_group1', 'constrainedOnlineMultiplier', 'battery_discharge') = 1;
* Do not allow investments in the child setups (commented out at the moment)
*loop(gn2n_directional(grid, node, node_),
* put "p_gnn('",, "', '",, "', '",, "', 'transferCapMax') = 0;"/;
* put "p_gnn('",, "', '",, "', '",, "', 'transferCapMax') = 0;"/;
* Update transmission capacity in the child setups
loop(gn2n_directional(grid, node, node_)${sum(t_invest, r_investTransfer(grid, node, node_, t_invest))},
tmp = sum(t_invest, r_investTransfer(grid, node, node_, t_invest));
put "p_gnn('",, "', '",, "', '",, "', 'transferCap') = p_gnn('",, "', '",, "', '",, "', 'transferCap') + ", tmp, ";"/;
put "p_gnn('",, "', '",, "', '",, "', 'transferCap') = p_gnn('",, "', '",, "', '",, "', 'transferCap') + ", tmp, ";"/;
# Changelog
All notable changes to this project will be documented in this file.
## [Unreleased]
### Added
- Dynamic generation portfolios aka pathway modelling aka multi-year simulations with discounted costs enabled
### Changed
- Static inertia requirement can be fulfilled by both rotational inertia of machines and certain reserve products
## [1.2.1] - 2019-11-26
### Fixed
- Fixed a possible division by zero in the calculation of r_gnuUtilizationRate
- Updated and 1e_scenChanges.gms to match with the current naming of sets and parameters
### Changed
- Changed variable O&M costs from p_unit(unit, 'omCosts') to p_gnu(grid, node, unit, 'vomCosts')
## [1.2] - 2019-11-12
### Added
- Dynamic inertia requirements based on loss of unit and loss of export/import (ROCOF constraints)
- N-1 reserve requirement for transfer links
- A separate parameter to tell whether units can provide offline reserve (non-spinning reserve)
- Maximum share of reserve provision from a group of units
- All input files, including *inputData.gdx*, are optional
- Enabling different combinations of LP and MIP online and invest variables
- Separate availability parameter for output units in the capacity margin constraint
- Parameter `gn_forecasts(*, node, timeseries)` to tell which nodes and timeseries use forecasts
### Changed
- Reserve requirements are now based on groups (previously node based)
- Changed the v_startup (and v_shutdown) variables into integers to improve the performance online approximations
- Updated tool definitions for Sceleton Titan and Spine Toolbox
- The program will now stop looping in case of execution errors
- Scenario reduction is done based on total available energy
- Maintain original scenario labels after reduction
- Clear time series data from droppped samples after scenario reduction
### Fixed
- Removed hard-coded `elec grids` from *setVariableLimits* and *rampSched files*
- Cyclic bounds between different samples was not working correctly (#97)
- Time series smoothing not working at all (#100)
- Fix a number of compilation warnings
- Limiting the provision of online reserve based on the online variable
- Sample probability bug from scenario reduction (probability of single scenario above one)
## 1.1.5 - 2020-11-28
### Fixed
- Long-term scenario data when using only one scenario
- Bug with scenario smooting which caused wrong values on later than first solve
## 1.1.4 - 2019-11-02
### Fixed
- Sample probability bug from scenario reduction
## 1.1.3 - 2019-10-24
### Changed
- Scenario reduction is done based on total available energy
## 1.1.2 - 2019-10-23
### Changed
- Maintain original scenario labels after reduction
## [1.1] - 2019-04-17
### Added
- New model setting 't_perfectForesight' tells the number of time steps (from
the beginning of current solve) for which realized data is used instead of
forecasts. This value cannot exceed current forecast length, however. Setting
the value lower than 't_jump' has no effect.
- Automated the calculation of sample start and end times if using long-term
scenarios. Also setting number of scenarios to one, instructs the model to use
central forecast for the long-term.
- Speedup for model dimension calculation (set `msft` etc.)
- Support long time intervals in the first block
- Possibility to limit `v_online` to zero according to time series
- Output for reserve transfer results
- Reserve provision limits with investments
- Constrain the set of units to which ramp equations are applied
- Piecewise linear heat rate curves
- Checks for reserves
- Allow to set certain value for `v_gen` at 't000000'
### Changed
- Removed some old command line arguments
- Removed obsolete 'emissionIntensity' fuel parameter
### Fixed
- Unit ramps during start-up and shutdown
- Refreshing forecast data in *inputsLoop*
- Aggregated groups that were not in use were included in the model
- `mst_end` not found for the last sample
- Start-up not working for units without start costs or start fuel consumption
- *periodicInit* will fail with multiple model definitions
- Reserves should not be allowed to be locked when the interval is greater than
smallest interval in use
- Start-up phase and aggregated time steps do not work together
- In SOS2 unit cannot exceed the generation of `p_ut_runUp`
- Startup cost calculation
- Efficiency presentations
- `p_uNonoperational` not fully correct
## [1.0.6] - 2019-03-27
### Fixed
- Major bug in state variable reserve equations
- Scenario smoothing alogirithm
### Changed
- Speedup for timeseries calculations
### Added
- New model setting `mSettings(mType, 'onlyExistingForecasts') = 0|1` to control
the reading of forecasts. Set to 1 to only read forecast data that exists in
the file. Note that zeros need to be saved as Eps when using this.
- Proper stochastic programming for the long-term scenarios period. Possible also
to create a stochastic tree from the original data.
- Clickable link to *sr.log* in the process window in case of SCENRED2 error
- New diagnostic parameter for timeseries scenarios `d_ts_scenarios`
## [1.0.5] - 2019-02-14
### Fixed
- Probabilities were not updated after using scenario reduction
### Added
- Enable long-term samples that extend several years by using planning horizon
which is longer than one scenario (e.g. 3 years). Note: Cannot use all data for
samples as last years need to be reserved for the planning horizon.
## [1.0.4] - 2019-02-11
### Fixed
- Severe bug in setting node state level limits
### Changed
- Suppress ouput from SCENRED2
## [1.0.3] - 2019-02-05
### Fixed
- Only selects forecasts with positive probability for the solve
## [1.0.2] - 2019-02-04
### Added
- New model setting `dataLength` to set the length of time series data before it is
recycled. Warn if this is not defined and automatically calculated from data.
- Command line arguments '--input_dir=<path>' and '--ouput_dir=<path' to set
input and output directories, respectively.
- Added sample dimension to most variables and equations (excl. investments).
Samples can now be used as long-term scenario alternatives (for e.g. hydro scehduling)
- Number of parallel samples can be reduced using SCENRED2. Activate with active('scenRed')
and set parameters in modelsInit.
### Changed
- Automatic calculation of parameter `dt_circular` takes into account time steps
only from `t000001` onwards.
- Debug mode yes/no changed to debug levels 0, 1 or 2. With higher level produces
more information. Default is 0, when no extra files are written (not even *debug.gdx*).
Set debug level with command line parameter `--debug=LEVEL`.
### Fixed
- Calculation of parameter `df_central`
- Readability of some displayed messages
## 1.0 - 2018-09-12
### Changed
- Major updates to data structures etc.
This diff is collapsed.
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
1. after moving the project, the URL in tool specification should be modified into current main folder directory (xx/BB_in_Spine) \
2. the directory slash must use "/" \
3. the VabiSys inputData.gdx is in v1.3 converted from a v1.2 one by the tools/inputGDX_from_v1.2_to_v1.3.gms
\ No newline at end of file
# Backbone
Backbone is a generic energy network optimization tool written in [GAMS]( It has been designed to be highly adaptable in different dimensions: temporal, spatial, technology representation and market design. The model can represent stochastics with a [model predictive control method](, with short-term forecasts and longer-term statistical uncertainties. Backbone can support multiple different models (e.g. investment or unit commitment) due to the modifiable temporal structure and varying lengths of the time steps.
If you use Backbone in a published work, please cite the [following publication](, which describes the Backbone energy systems modelling framework.
## Getting Started
Make sure that you have [Git]( version control system and a Git interface, such as [TortoiseGit]( or [SourceTree](, installed on your computer.
You also need to have [GAMS]( version 24.0 or later installed.
In order to get a copy of the Backbone project, you need to clone it using Git. Copy and paste the URL of the original Backbone repository and select the directory where you want Backbone to be cloned. The URL of the original Backbone repository is
You should now have *Backbone.gms*, a few additional files and five subdirectories in the directory where you cloned Backbone.
Small example input datasets are provided online in the [wiki](
## Model File Structure
Backbone has been designed with a modular structure, making it easier to change even large portions of the model if necessary. The various gms-files of the model are described briefly below, in the order of their execution when running Backbone.
* Backbone.gms - The heart of the model, containing instructions on how the rest of the files are read and compiled. The following files are currently named with an index corresponding to their turn in the Backbone compilation order.
* 1a_definitions.gms - Contains important definitions regarding the models used, such as possible model features and parameters.
* 1b_sets.gms - Contains the set definitions required by the models.
* 1c_parameters.gms - Contains the parameter definitions used by the models.
* 1d_results.gms - Contains definitions for the model results.
* 1e_inputs.gms - Contains instructions on how to load input data, as well as forms a lot of helpful sets based on said data, for example in order to facilitate writing the constraints.
* 1e_scenChanges.gms - Inside input.gms - reads additional changes for scenarios (Sceleton Titan can use these)
* 2a_variables.gms - Contains variable definitions used by the models.
* 2b_eqDeclarations.gms - Contains equation declarations for the models.
* 2c_objective.gms - Contains the objective function definition.
* 2d_constraints.gms - Contains definitions for constraint equations.
* *Model Definition Files* - Contains GAMS definitions for different models, essentially lists the equations (constraints) that apply. Current files include *schedule.gms*, *building.gms* and *invest.gms*.
* 3a_periodicInit.gms - Initializes various data and sets for the solve loop.
* 3b_periodicLoop.gms - Contains instructions for the forecast-interval structure of the desired model.
* 3c_inputsLoop.gms - Contains instructions for updating the forecast data, optional forecast improvements, aggregating time series data for the time intervals, and other input data processing.
* 3d_setVariableLimits.gms - Defines the variable boundaries for each solve.
* 3e_solve.gms - Contains the GAMS solve command for using the solver.
* 3f_afterSolve.gms - Fixes some variable values after solve.
* 4a_outputVariant.gms - Contains instructions for storing desired results during the solve loop.
* 4b_outputInvariant.gms - Calculates further results post-solve.
* 4c_outputQuickFile.gms
Most of these files are under *\inc* in the Backbone folder, except for the model definition files being housed under *\defModels*. Other than the abovementioned files, a few key input files are required for Backbone to work. These are assumed to be found under *\input* and are briefly described below.
* inputData.gdx - Contains most of the input data about the system to be modelled.
* 1_options.gms - Contains options to control the solver.
* - Contains definitions for the time, forecast and sample index ranges.
* modelsInit.gms - Contains model parameters for the solve (or a link to a template under *\defModels* to be used). Useful for any additional GAMS scripting.
Backbone folder contains template files *1_options_temp.gms*, **, and *modelsInit_temp.gms* to provide examples of the input format. These files can be copied into *\input* and renamed to *1_options.gms*, **, and *modelsInit.gms*.
## When Simply Using Backbone
When starting to use Backbone, there is no immediate need to understand every single file that makes up the model. The files below list the most important files to understand, if one’s aim is simply to use Backbone for modelling/simulation purposes, without the need to modify the way the model works.
* **1a_definitions.gms**: Lists all the possible model settings, as well as all the different parameters that Backbone understands. Also lists some auxiliary sets that are required for the model structure, but don’t hold any intuitive meaning.
* **1e_inputs.gms**: Imports the input data into Backbone, and thus contains a list of the sets and parameters that need to be included in the “InputData.gdx” input file. Also contains rules for generating all sorts of auxiliary sets based on the input data that are used throughout the model files. Contains a few data integrity checks as well, but these could/should be expanded upon in the future.
* **1b_sets.gms and 1c_parameters.gms**: Understanding of the required dimensions of the input sets and parameters is necessary in order to create working input files.
* **Model Initialization Files**: E.g. *scheduleInit_temp.gms* that define the rules for the optimization model.
## Authors
* Juha Kiviluoma
* Erkka Rinne
* Topi Rasku
* Niina Helisto
* Dana Kirchem
* Ran Li
* Ciara O'Dwyer
## License
This program is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with this program. If not, see <>.
......@@ -140,6 +140,9 @@ if (mType('schedule'),
mf_realization('schedule', 'f00') = yes;
mf_central('schedule', f) = no;
mf_central('schedule', 'f02') = yes;
// Define special forecast label that holds scenario data
//mf_scenario('schedule', 'scen') = yes;
// Define forecast probabilities (weights)
p_mfProbability('schedule', f) = 0;
......@@ -46,9 +46,9 @@ p_gnn
......@@ -19,6 +19,9 @@ $offtext
* --- Results Symbols Included in Output --------------------------------------
* =============================================================================
* --- Symbols related to the simulation setup ---------------------------------
t_realized = t
* --- Cost Result Symbols -----------------------------------------------------
// Total Objective Function
......@@ -27,6 +27,7 @@ Sets
* --- Energy generation and consumption ---------------------------------------
unit "Set of generators, storages and loads"
unittype "Unit technology types"
unit_flow(unit) "Unit that depend directly on variable energy flows (RoR, solar PV, etc.)"
unit_commodity(unit) "Units using an exogenous commodity with a price"
unit_fail(unit) "Units that might fail"
......@@ -41,13 +42,12 @@ Sets
unit_noSlope(unit) "Units without piecewise linear efficiency constraints"
unitAggregator_unit(unit, unit) "Aggregate unit linked to aggregated units"
unitUnitEffLevel(unit, unit, EffLevel) "Aggregator unit linked to aggreted units with a definition when to start the aggregation"
flowUnit(flow, *) "Units or storages linked to a certain energy flow time series"
unitUnittype(unit, *) "Link generation technologies to types"
flowUnit(flow, unit) "Units linked to a certain energy flow time series"
unitUnittype(unit, unittype) "Link generation technologies to types"
unitStarttype(unit, starttype) "Units with special startup properties"
un_commodity(unit, node) "Units linked with commodities"
un_commodity_in(unit, node) "Units linked with input commodities"
un_commodity_out(unit, node) "Units linked with output commodities"
unittype "Unit technology types"
unit_investLP(unit) "Units with continuous investments allowed"
unit_investMIP(unit) "Units with integer investments allowed"
unit_timeseries(unit) "Units with time series enabled"
......@@ -96,6 +96,7 @@ Sets
t_current(t) "Set of time steps within the current solve horizon"
t_active(t) "Set of active t:s within the current solve horizon, including necessary history"
t_invest(t) "Time steps when investments can be made"
t_realized(t) "Set of realized time steps in the simulation"
tt(t) "Temporary subset for time steps used for calculations"
tt_(t) "Another temporary subset for time steps used for calculations"
tt_block(counter, t) "Temporary time step subset for storing the time interval blocks"
......@@ -157,7 +158,7 @@ $if defined scenario
effGroupSelector(effSelector, effSelector) "Efficiency selectors included in efficiency groups, e.g. Lambda02 contains Lambda01 and Lambda02."
effLevelGroupUnit(effLevel, effSelector, unit) "What efficiency selectors are in use for each unit at each efficiency representation level"
effGroupSelectorUnit(effSelector, unit, effSelector) "Group name for efficiency selector set, e.g. Lambda02 contains Lambda01 and Lambda02"
mSettingsReservesInUse(mType, *, up_down) "Reserves that are used in each model type"
mSettingsReservesInUse(mType, restype, up_down) "Reserves that are used in each model type"
unitCounter(unit, counter) "Counter used for restricting excessive looping over the counter set when defining unit startup/shutdown/online time restrictions"
runUpCounter(unit, counter) "Counter used for unit run-up intervals"
shutdownCounter(unit, counter) "Counter used for unit shutdown intervals"
......@@ -62,9 +62,9 @@ Parameters
p_gnuReserves(grid, node, unit, restype, param_policy) "Reserve provision data for units"
p_gnnReserves(grid, node, node, restype, up_down) "Reserve provision data for node node connections"
p_gnuRes2Res(grid, node, unit, restype, up_down, restype) "The first type of reserve can be used also in the second reserve category (with a possible multiplier)"
p_gnPolicy(grid, node, param_policy, *) "Policy data for grid, node"
p_groupPolicy(group, param_policy) "Two-dimensional policy data for groups"
p_groupPolicy3D(group, param_policy, *) "Three-dimensional policy data for groups"
p_groupPolicyUnit(group, param_policy, unit) "Three-dimensional policy data for groups and units"
p_groupPolicyEmission(group, param_policy, emission) "Three-dimensional policy data for groups and emissions"
p_price(node, param_price) "Commodity price parameters"
p_nEmission(node, emission) "Emission content (kg/MWh)"
p_uStartupfuel(unit, node, param_unitStartupfuel) "Parameters for startup fuels"
......@@ -52,7 +52,6 @@ $ifthen exist '%input_dir%/inputData.gdx'
$$loaddc p_gnuRes2Res
$$loaddc ts_reserveDemand
$$loaddc p_gnBoundaryPropertiesForStates
$$loaddc p_gnPolicy
$$loaddc p_uStartupfuel
$$loaddc flowUnit
$$loaddc emission
......@@ -73,7 +72,8 @@ $ifthen exist '%input_dir%/inputData.gdx'
$$loaddc gnGroup
$$loaddc sGroup
$$loaddc p_groupPolicy
$$loaddc p_groupPolicy3D
$$loaddc p_groupPolicyUnit
$$loaddc p_groupPolicyEmission
$$loaddc gnss_bound
$$loaddc uss_bound