Compare commits

..

1 Commits

Author SHA1 Message Date
1afd71b97b Make test/ a standalone project 2023-05-19 15:27:54 -05:00
164 changed files with 1160 additions and 30973 deletions

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
version: ['1.10', '1.12']
version: ['1.6', '1.7', '1.8', '1.9']
os:
- ubuntu-latest
arch:

6
.gitignore vendored
View File

@@ -1,4 +1,3 @@
*-off.md
*.bak
*.gz
*.ipynb
@@ -20,7 +19,6 @@
.apdisk
.com.apple.timemachine.donotpresent
.fseventsd
.idea
.ipy*
.vscode
Icon
@@ -34,10 +32,6 @@ benchmark/tables
benchmark/tmp.json
build
docs/_build
docs/src/tutorials/customizing.md
docs/src/tutorials/lmp.md
docs/src/tutorials/market.md
docs/src/tutorials/usage.md
instances/**/*.json
instances/_source
local

View File

@@ -1,27 +0,0 @@
{
"creators": [
{
"orcid": "0000-0002-5022-9802",
"affiliation": "Argonne National Laboratory",
"name": "Santos Xavier, Alinson"
},
{
"affiliation": "University of Florida",
"name": "Kazachkov, Aleksandr M."
},
{
"affiliation": "Technische Universität Berlin",
"name": "Yurdakul, Ogün"
},
{
"affiliation": "Purdue University",
"name": "He, Jun"
},
{
"affiliation": "Argonne National Laboratory",
"name": "Qiu, Feng"
}
],
"title": "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment",
"description": "<b>UnitCommitment.jl</b> (UC.jl) is an optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and Julia/JuMP implementations of state-of-the-art mixed-integer programming formulations."
}

View File

@@ -11,22 +11,6 @@ All notable changes to this project will be documented in this file.
[semver]: https://semver.org/spec/v2.0.0.html
[pkjjl]: https://pkgdocs.julialang.org/v1/compatibility/#compat-pre-1.0
## [0.4.1] - 2025-11-05
### Fixed
- Fix multi-threading issues in Julia 1.12
### Changed
- The package now requires Julia 1.10 or newer
## [0.4.0] - 2024-05-21
### Added
- Add support for two-stage stochastic problems
- Add support for day-ahead and real-time market clearing simulation
- Add time decomposition methods
- Add scenario decomposition methods (progressive hedging)
- Add support for energy storage units
- Rewrite documentation with runnable examples
## [0.3.0] - 2022-07-18
### Added
- Add support for multiple reserve products and zonal reserves.

View File

@@ -2,10 +2,10 @@
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
VERSION := 0.4
VERSION := 0.3
docs:
cd docs; julia --project=. -e 'include("make.jl"); make()'; cd ..
cd docs; julia --project=. make.jl; cd ..
rsync -avP --delete-after docs/build/ ../docs/$(VERSION)/
.PHONY: docs

View File

@@ -2,7 +2,7 @@ name = "UnitCommitment"
uuid = "64606440-39ea-11e9-0f29-3303a1d3d877"
authors = ["Santos Xavier, Alinson <axavier@anl.gov>"]
repo = "https://github.com/ANL-CEEESA/UnitCommitment.jl"
version = "0.4.1"
version = "0.3.0"
[deps]
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
@@ -18,8 +18,6 @@ PackageCompiler = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
MPI = "da04e1cc-30fd-572f-bb4f-1f8673147195"
[compat]
DataStructures = "0.18"
@@ -28,7 +26,5 @@ GZip = "0.5"
JSON = "0.21"
JuMP = "1"
MathOptInterface = "1"
MPI = "0.20"
PackageCompiler = "1"
julia = "1.10"
TimerOutputs = "0.5"
julia = "1"

View File

@@ -87,7 +87,11 @@ UnitCommitment.write("/tmp/output.json", solution)
## Documentation
See official documentation at: https://anl-ceeesa.github.io/UnitCommitment.jl/
1. [Usage](https://anl-ceeesa.github.io/UnitCommitment.jl/0.3/usage/)
2. [Data Format](https://anl-ceeesa.github.io/UnitCommitment.jl/0.3/format/)
3. [Instances](https://anl-ceeesa.github.io/UnitCommitment.jl/0.3/instances/)
4. [JuMP Model](https://anl-ceeesa.github.io/UnitCommitment.jl/0.3/model/)
5. [API Reference](https://anl-ceeesa.github.io/UnitCommitment.jl/0.3/api/)
## Authors
* **Alinson S. Xavier** (Argonne National Laboratory)
@@ -108,7 +112,7 @@ See official documentation at: https://anl-ceeesa.github.io/UnitCommitment.jl/
If you use UnitCommitment.jl in your research (instances, models or algorithms), we kindly request that you cite the package as follows:
* **Alinson S. Xavier, Aleksandr M. Kazachkov, Ogün Yurdakul, Jun He, Feng Qiu**. "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment (Version 0.4)". Zenodo (2024). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874).
* **Alinson S. Xavier, Aleksandr M. Kazachkov, Ogün Yurdakul, Feng Qiu**. "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment (Version 0.3)". Zenodo (2022). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874).
If you use the instances, we additionally request that you cite the original sources, as described in the documentation.
@@ -116,7 +120,7 @@ If you use the instances, we additionally request that you cite the original sou
```text
UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment
Copyright © 2020-2024, UChicago Argonne, LLC. All Rights Reserved.
Copyright © 2020-2022, UChicago Argonne, LLC. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:

View File

@@ -1,10 +1,5 @@
[deps]
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
Glob = "c27321d9-0574-5035-807b-f59d2c89b15c"
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306"
MPI = "da04e1cc-30fd-572f-bb4f-1f8673147195"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
UnitCommitment = "64606440-39ea-11e9-0f29-3303a1d3d877"

File diff suppressed because it is too large Load Diff

View File

@@ -1,495 +0,0 @@
{
"Parameters": {
"Version": "0.3",
"Time horizon (h)": 4
},
"Generators": {
"g1": {
"Bus": "b1",
"Production cost curve (MW)": [
100,
110,
130,
135
],
"Production cost curve ($)": [
1400,
1600,
2200,
2400
],
"Startup delays (h)": [
1,
2,
3
],
"Startup costs ($)": [
1000.0,
1500.0,
2000.0
],
"Initial status (h)": -100,
"Initial power (MW)": 0
},
"g2": {
"Bus": "b2",
"Production cost curve (MW)": [
0,
47,
94,
140
],
"Production cost curve ($)": [
0,
2256.00,
4733.37,
7395.39
],
"Startup delays (h)": [
1,
4
],
"Startup costs ($)": [
3000.0,
4000.0
],
"Ramp up limit (MW)": 98.0,
"Ramp down limit (MW)": 98.0,
"Startup limit (MW)": 98.0,
"Shutdown limit (MW)": 98.0,
"Minimum uptime (h)": 4,
"Minimum downtime (h)": 4,
"Maximum daily energy (MWh)": null,
"Maximum daily starts": null,
"Initial status (h)": -8,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
},
"g3": {
"Bus": "b3",
"Production cost curve (MW)": [
0,
33,
66,
100
],
"Production cost curve ($)": [
0,
1113.75,
2369.07,
3891.54
],
"Startup delays (h)": [
1,
4,
8
],
"Startup costs ($)": [
1000.0,
2000.0,
3000.0
],
"Ramp up limit (MW)": 70.0,
"Ramp down limit (MW)": 70.0,
"Startup limit (MW)": 70.0,
"Shutdown limit (MW)": 70.0,
"Must run?": true,
"Minimum uptime (h)": 1,
"Minimum downtime (h)": 1,
"Maximum daily energy (MWh)": null,
"Maximum daily starts": null,
"Initial status (h)": -6,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
},
"g4": {
"Bus": "b6",
"Production cost curve (MW)": [
33,
66,
100
],
"Production cost curve ($)": [
1113.75,
2369.07,
3891.54
],
"Initial status (h)": -100,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
},
"g5": {
"Bus": "b8",
"Production cost curve (MW)": [
33,
66,
100
],
"Production cost curve ($)": [
1113.75,
2369.07,
3891.54
],
"Initial status (h)": -100,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
},
"g6": {
"Bus": "b8",
"Production cost curve (MW)": [
100
],
"Production cost curve ($)": [
10000.00
],
"Initial status (h)": -100,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
}
},
"Buses": {
"b1": {
"Load (MW)": 0.0
},
"b2": {
"Load (MW)": [
26.01527,
24.46212,
23.29725,
22.90897
]
},
"b3": {
"Load (MW)": [
112.93263,
106.19039,
101.1337,
99.44814
]
},
"b4": {
"Load (MW)": [
57.30552,
53.88429,
51.31838,
50.46307
]
},
"b5": {
"Load (MW)": [
9.11134,
8.56738,
8.15941,
8.02342
]
},
"b6": {
"Load (MW)": [
13.42723,
12.62561,
12.02439,
11.82398
]
},
"b7": {
"Load (MW)": 0.0
},
"b8": {
"Load (MW)": 0.0
},
"b9": {
"Load (MW)": [
35.36638,
33.25495,
31.67138,
31.14353
]
},
"b10": {
"Load (MW)": [
10.78974,
10.14558,
9.66246,
9.50141
]
},
"b11": {
"Load (MW)": [
4.19601,
3.9455,
3.75762,
3.69499
]
},
"b12": {
"Load (MW)": [
7.31305,
6.87645,
6.549,
6.43985
]
},
"b13": {
"Load (MW)": [
16.18461,
15.21837,
14.49368,
14.25212
]
},
"b14": {
"Load (MW)": [
17.86302,
16.79657,
15.99673,
15.73012
]
}
},
"Transmission lines": {
"l1": {
"Source bus": "b1",
"Target bus": "b2",
"Reactance (ohms)": 0.05917000000000001,
"Susceptance (S)": 29.496860773945063,
"Normal flow limit (MW)": 300.0,
"Emergency flow limit (MW)": 400.0,
"Flow limit penalty ($/MW)": 1000.0
},
"l2": {
"Source bus": "b1",
"Target bus": "b5",
"Reactance (ohms)": 0.22304000000000002,
"Susceptance (S)": 7.825184953346168
},
"l3": {
"Source bus": "b2",
"Target bus": "b3",
"Reactance (ohms)": 0.19797,
"Susceptance (S)": 8.816129979261149
},
"l4": {
"Source bus": "b2",
"Target bus": "b4",
"Reactance (ohms)": 0.17632,
"Susceptance (S)": 9.898645939169292
},
"l5": {
"Source bus": "b2",
"Target bus": "b5",
"Reactance (ohms)": 0.17388,
"Susceptance (S)": 10.037550333530765
},
"l6": {
"Source bus": "b3",
"Target bus": "b4",
"Reactance (ohms)": 0.17103,
"Susceptance (S)": 10.204813494675376
},
"l7": {
"Source bus": "b4",
"Target bus": "b5",
"Reactance (ohms)": 0.04211,
"Susceptance (S)": 41.44690695783257
},
"l8": {
"Source bus": "b4",
"Target bus": "b7",
"Reactance (ohms)": 0.20911999999999997,
"Susceptance (S)": 8.346065665619404
},
"l9": {
"Source bus": "b4",
"Target bus": "b9",
"Reactance (ohms)": 0.55618,
"Susceptance (S)": 3.1380654680037567
},
"l10": {
"Source bus": "b5",
"Target bus": "b6",
"Reactance (ohms)": 0.25201999999999997,
"Susceptance (S)": 6.92536009838239
},
"l11": {
"Source bus": "b6",
"Target bus": "b11",
"Reactance (ohms)": 0.1989,
"Susceptance (S)": 8.774908255376218
},
"l12": {
"Source bus": "b6",
"Target bus": "b12",
"Reactance (ohms)": 0.25581,
"Susceptance (S)": 6.8227561549365925
},
"l13": {
"Source bus": "b6",
"Target bus": "b13",
"Reactance (ohms)": 0.13027,
"Susceptance (S)": 13.397783465067395
},
"l14": {
"Source bus": "b7",
"Target bus": "b8",
"Reactance (ohms)": 0.17615,
"Susceptance (S)": 9.908198989465395
},
"l15": {
"Source bus": "b7",
"Target bus": "b9",
"Reactance (ohms)": 0.11001,
"Susceptance (S)": 15.865187273832648
},
"l16": {
"Source bus": "b9",
"Target bus": "b10",
"Reactance (ohms)": 0.0845,
"Susceptance (S)": 20.65478404727017
},
"l17": {
"Source bus": "b9",
"Target bus": "b14",
"Reactance (ohms)": 0.27038,
"Susceptance (S)": 6.4550974628091184
},
"l18": {
"Source bus": "b10",
"Target bus": "b11",
"Reactance (ohms)": 0.19207,
"Susceptance (S)": 9.08694357262628
},
"l19": {
"Source bus": "b12",
"Target bus": "b13",
"Reactance (ohms)": 0.19988,
"Susceptance (S)": 8.73188539120637
},
"l20": {
"Source bus": "b13",
"Target bus": "b14",
"Reactance (ohms)": 0.34802,
"Susceptance (S)": 5.0150257226433235
}
},
"Contingencies": {
"c1": {
"Affected lines": [
"l1"
]
},
"c2": {
"Affected lines": [
"l2"
]
},
"c3": {
"Affected lines": [
"l3"
]
},
"c4": {
"Affected lines": [
"l4"
]
},
"c5": {
"Affected lines": [
"l5"
]
},
"c6": {
"Affected lines": [
"l6"
]
},
"c7": {
"Affected lines": [
"l7"
]
},
"c8": {
"Affected lines": [
"l8"
]
},
"c9": {
"Affected lines": [
"l9"
]
},
"c10": {
"Affected lines": [
"l10"
]
},
"c11": {
"Affected lines": [
"l11"
]
},
"c12": {
"Affected lines": [
"l12"
]
},
"c13": {
"Affected lines": [
"l13"
]
},
"c15": {
"Affected lines": [
"l15"
]
},
"c16": {
"Affected lines": [
"l16"
]
},
"c17": {
"Affected lines": [
"l17"
]
},
"c18": {
"Affected lines": [
"l18"
]
},
"c19": {
"Affected lines": [
"l19"
]
},
"c20": {
"Affected lines": [
"l20"
]
}
},
"Price-sensitive loads": {
"ps1": {
"Bus": "b3",
"Revenue ($/MW)": 100.0,
"Demand (MW)": 50.0
}
},
"Reserves": {
"r1": {
"Type": "Spinning",
"Amount (MW)": 100.0,
"Shortfall penalty ($/MW)": 1000.0
}
}
}

View File

@@ -1,495 +0,0 @@
{
"Parameters": {
"Version": "0.3",
"Time horizon (h)": 4
},
"Generators": {
"g1": {
"Bus": "b1",
"Production cost curve (MW)": [
100,
110,
130,
135
],
"Production cost curve ($)": [
1400,
1600,
2200,
2400
],
"Startup delays (h)": [
1,
2,
3
],
"Startup costs ($)": [
1000.0,
1500.0,
2000.0
],
"Initial status (h)": -100,
"Initial power (MW)": 0
},
"g2": {
"Bus": "b2",
"Production cost curve (MW)": [
0,
47,
94,
140
],
"Production cost curve ($)": [
0,
2256.00,
4733.37,
7395.39
],
"Startup delays (h)": [
1,
4
],
"Startup costs ($)": [
3000.0,
4000.0
],
"Ramp up limit (MW)": 98.0,
"Ramp down limit (MW)": 98.0,
"Startup limit (MW)": 98.0,
"Shutdown limit (MW)": 98.0,
"Minimum uptime (h)": 4,
"Minimum downtime (h)": 4,
"Maximum daily energy (MWh)": null,
"Maximum daily starts": null,
"Initial status (h)": -8,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
},
"g3": {
"Bus": "b3",
"Production cost curve (MW)": [
0,
33,
66,
100
],
"Production cost curve ($)": [
0,
1113.75,
2369.07,
3891.54
],
"Startup delays (h)": [
1,
4,
8
],
"Startup costs ($)": [
1000.0,
2000.0,
3000.0
],
"Ramp up limit (MW)": 70.0,
"Ramp down limit (MW)": 70.0,
"Startup limit (MW)": 70.0,
"Shutdown limit (MW)": 70.0,
"Must run?": true,
"Minimum uptime (h)": 1,
"Minimum downtime (h)": 1,
"Maximum daily energy (MWh)": null,
"Maximum daily starts": null,
"Initial status (h)": -6,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
},
"g4": {
"Bus": "b6",
"Production cost curve (MW)": [
33,
66,
100
],
"Production cost curve ($)": [
1113.75,
2369.07,
3891.54
],
"Initial status (h)": -100,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
},
"g5": {
"Bus": "b8",
"Production cost curve (MW)": [
33,
66,
100
],
"Production cost curve ($)": [
1113.75,
2369.07,
3891.54
],
"Initial status (h)": -100,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
},
"g6": {
"Bus": "b8",
"Production cost curve (MW)": [
100
],
"Production cost curve ($)": [
10000.00
],
"Initial status (h)": -100,
"Initial power (MW)": 0,
"Reserve eligibility": [
"r1"
]
}
},
"Buses": {
"b1": {
"Load (MW)": 0.0
},
"b2": {
"Load (MW)": [
26.01527,
24.46212,
23.29725,
22.90897
]
},
"b3": {
"Load (MW)": [
112.93263,
106.19039,
101.1337,
99.44814
]
},
"b4": {
"Load (MW)": [
57.30552,
53.88429,
51.31838,
50.46307
]
},
"b5": {
"Load (MW)": [
9.11134,
8.56738,
8.15941,
8.02342
]
},
"b6": {
"Load (MW)": [
13.42723,
12.62561,
12.02439,
11.82398
]
},
"b7": {
"Load (MW)": 0.0
},
"b8": {
"Load (MW)": 0.0
},
"b9": {
"Load (MW)": [
35.36638,
33.25495,
31.67138,
31.14353
]
},
"b10": {
"Load (MW)": [
10.78974,
10.14558,
9.66246,
9.50141
]
},
"b11": {
"Load (MW)": [
4.19601,
3.9455,
3.75762,
3.69499
]
},
"b12": {
"Load (MW)": [
7.31305,
6.87645,
6.549,
6.43985
]
},
"b13": {
"Load (MW)": [
16.18461,
15.21837,
14.49368,
14.25212
]
},
"b14": {
"Load (MW)": [
17.86302,
16.79657,
15.99673,
15.73012
]
}
},
"Transmission lines": {
"l1": {
"Source bus": "b1",
"Target bus": "b2",
"Reactance (ohms)": 0.05917000000000001,
"Susceptance (S)": 29.496860773945063,
"Normal flow limit (MW)": 300.0,
"Emergency flow limit (MW)": 400.0,
"Flow limit penalty ($/MW)": 1000.0
},
"l2": {
"Source bus": "b1",
"Target bus": "b5",
"Reactance (ohms)": 0.22304000000000002,
"Susceptance (S)": 7.825184953346168
},
"l3": {
"Source bus": "b2",
"Target bus": "b3",
"Reactance (ohms)": 0.19797,
"Susceptance (S)": 8.816129979261149
},
"l4": {
"Source bus": "b2",
"Target bus": "b4",
"Reactance (ohms)": 0.17632,
"Susceptance (S)": 9.898645939169292
},
"l5": {
"Source bus": "b2",
"Target bus": "b5",
"Reactance (ohms)": 0.17388,
"Susceptance (S)": 10.037550333530765
},
"l6": {
"Source bus": "b3",
"Target bus": "b4",
"Reactance (ohms)": 0.17103,
"Susceptance (S)": 10.204813494675376
},
"l7": {
"Source bus": "b4",
"Target bus": "b5",
"Reactance (ohms)": 0.04211,
"Susceptance (S)": 41.44690695783257
},
"l8": {
"Source bus": "b4",
"Target bus": "b7",
"Reactance (ohms)": 0.20911999999999997,
"Susceptance (S)": 8.346065665619404
},
"l9": {
"Source bus": "b4",
"Target bus": "b9",
"Reactance (ohms)": 0.55618,
"Susceptance (S)": 3.1380654680037567
},
"l10": {
"Source bus": "b5",
"Target bus": "b6",
"Reactance (ohms)": 0.25201999999999997,
"Susceptance (S)": 6.92536009838239
},
"l11": {
"Source bus": "b6",
"Target bus": "b11",
"Reactance (ohms)": 0.1989,
"Susceptance (S)": 8.774908255376218
},
"l12": {
"Source bus": "b6",
"Target bus": "b12",
"Reactance (ohms)": 0.25581,
"Susceptance (S)": 6.8227561549365925
},
"l13": {
"Source bus": "b6",
"Target bus": "b13",
"Reactance (ohms)": 0.13027,
"Susceptance (S)": 13.397783465067395
},
"l14": {
"Source bus": "b7",
"Target bus": "b8",
"Reactance (ohms)": 0.17615,
"Susceptance (S)": 9.908198989465395
},
"l15": {
"Source bus": "b7",
"Target bus": "b9",
"Reactance (ohms)": 0.11001,
"Susceptance (S)": 15.865187273832648
},
"l16": {
"Source bus": "b9",
"Target bus": "b10",
"Reactance (ohms)": 0.0845,
"Susceptance (S)": 20.65478404727017
},
"l17": {
"Source bus": "b9",
"Target bus": "b14",
"Reactance (ohms)": 0.27038,
"Susceptance (S)": 6.4550974628091184
},
"l18": {
"Source bus": "b10",
"Target bus": "b11",
"Reactance (ohms)": 0.19207,
"Susceptance (S)": 9.08694357262628
},
"l19": {
"Source bus": "b12",
"Target bus": "b13",
"Reactance (ohms)": 0.19988,
"Susceptance (S)": 8.73188539120637
},
"l20": {
"Source bus": "b13",
"Target bus": "b14",
"Reactance (ohms)": 0.34802,
"Susceptance (S)": 5.0150257226433235
}
},
"Contingencies": {
"c1": {
"Affected lines": [
"l1"
]
},
"c2": {
"Affected lines": [
"l2"
]
},
"c3": {
"Affected lines": [
"l3"
]
},
"c4": {
"Affected lines": [
"l4"
]
},
"c5": {
"Affected lines": [
"l5"
]
},
"c6": {
"Affected lines": [
"l6"
]
},
"c7": {
"Affected lines": [
"l7"
]
},
"c8": {
"Affected lines": [
"l8"
]
},
"c9": {
"Affected lines": [
"l9"
]
},
"c10": {
"Affected lines": [
"l10"
]
},
"c11": {
"Affected lines": [
"l11"
]
},
"c12": {
"Affected lines": [
"l12"
]
},
"c13": {
"Affected lines": [
"l13"
]
},
"c15": {
"Affected lines": [
"l15"
]
},
"c16": {
"Affected lines": [
"l16"
]
},
"c17": {
"Affected lines": [
"l17"
]
},
"c18": {
"Affected lines": [
"l18"
]
},
"c19": {
"Affected lines": [
"l19"
]
},
"c20": {
"Affected lines": [
"l20"
]
}
},
"Price-sensitive loads": {
"ps1": {
"Bus": "b3",
"Revenue ($/MW)": 100.0,
"Demand (MW)": 50.0
}
},
"Reserves": {
"r1": {
"Type": "Spinning",
"Amount (MW)": 100.0,
"Shortfall penalty ($/MW)": 1000.0
}
}
}

View File

@@ -1,43 +1,16 @@
using Documenter
using UnitCommitment
using JuMP
using Literate
using Documenter, UnitCommitment, JuMP
function make()
literate_sources = [
"src/tutorials/usage.jl",
"src/tutorials/customizing.jl",
"src/tutorials/lmp.jl",
"src/tutorials/market.jl",
]
for src in literate_sources
Literate.markdown(
src,
dirname(src);
documenter = true,
credit = false,
)
end
return makedocs(
sitename = "UnitCommitment.jl",
pages = [
"Home" => "index.md",
"Tutorials" => [
"tutorials/usage.md",
"tutorials/customizing.md",
"tutorials/lmp.md",
"tutorials/market.md",
"tutorials/decomposition.md",
],
"User guide" => [
"guides/problem.md",
"guides/format.md",
"guides/instances.md",
],
"api.md",
],
format = Documenter.HTML(assets = ["assets/custom.css"]),
makedocs(
sitename="UnitCommitment.jl",
pages=[
"Home" => "index.md",
"usage.md",
"format.md",
"instances.md",
"model.md",
"api.md",
],
format = Documenter.HTML(
assets=["assets/custom.css"],
)
end
)

View File

@@ -15,18 +15,17 @@ UnitCommitment.write
## Locational Marginal Prices
### Conventional LMPs
```@docs
UnitCommitment.compute_lmp(::JuMP.Model,::UnitCommitment.ConventionalLMP)
```
### Approximated Extended LMPs
```@docs
UnitCommitment.AELMP
UnitCommitment.compute_lmp(::JuMP.Model,::UnitCommitment.AELMP)
```
## Modify instance
```@docs

View File

@@ -1,6 +1,6 @@
@media screen and (min-width: 1056px) {
#documenter .docs-main {
max-width: 50rem !important;
max-width: 65rem !important;
}
}

330
docs/src/format.md Normal file
View File

@@ -0,0 +1,330 @@
Data Format
===========
Input Data Format
-----------------
Instances are specified by JSON files containing the following main sections:
* [Parameters](#Parameters)
* [Buses](#Buses)
* [Generators](#Generators)
* [Price-sensitive loads](#Price-sensitive-loads)
* [Transmission lines](#Transmission-lines)
* [Reserves](#Reserves)
* [Contingencies](#Contingencies)
Each section is described in detail below. See [case118/2017-01-01.json.gz](https://axavier.org/UnitCommitment.jl/0.3/instances/matpower/case118/2017-01-01.json.gz) for a complete example.
### Parameters
This section describes system-wide parameters, such as power balance penalty, and optimization parameters, such as the length of the planning horizon and the time.
| Key | Description | Default | Time series?
| :----------------------------- | :------------------------------------------------ | :------: | :------------:
| `Version` | Version of UnitCommitment.jl this file was written for. Required to ensure that the file remains readable in future versions of the package. If you are following this page to construct the file, this field should equal `0.3`. | Required | N
| `Time horizon (h)` | Length of the planning horizon (in hours). | Required | N
| `Time step (min)` | Length of each time step (in minutes). Must be a divisor of 60 (e.g. 60, 30, 20, 15, etc). | `60` | N
| `Power balance penalty ($/MW)` | Penalty for system-wide shortage or surplus in production (in $/MW). This is charged per time step. For example, if there is a shortage of 1 MW for three time steps, three times this amount will be charged. | `1000.0` | Y
#### Example
```json
{
"Parameters": {
"Version": "0.3",
"Time horizon (h)": 4,
"Power balance penalty ($/MW)": 1000.0
}
}
```
### Buses
This section describes the characteristics of each bus in the system.
| Key | Description | Default | Time series?
| :----------------- | :------------------------------------------------------------ | ------- | :-------------:
| `Load (MW)` | Fixed load connected to the bus (in MW). | Required | Y
#### Example
```json
{
"Buses": {
"b1": {
"Load (MW)": 0.0
},
"b2": {
"Load (MW)": [
26.01527,
24.46212,
23.29725,
22.90897
]
}
}
}
```
### Generators
This section describes all generators in the system. Two types of units can be specified:
- **Thermal units:** Units that produce power by converting heat into electrical energy, such as coal and oil power plants. These units use a more complex model, with binary decision variables, and various constraints to enforce ramp rates and minimum up/down time.
- **Profiled units:** Simplified model for units that do not require the constraints mentioned above, only a maximum and minimum power output for each time period. Typically used for renewables and hydro.
#### Thermal Units
| Key | Description | Default | Time series?
| :------------------------ | :------------------------------------------------| ------- | :-----------:
| `Bus` | Identifier of the bus where this generator is located (string). | Required | N
| `Type` | Type of the generator (string). For thermal generators, this must be `Thermal`. | Required | N
| `Production cost curve (MW)` and `Production cost curve ($)` | Parameters describing the piecewise-linear production costs. See below for more details. | Required | Y
| `Startup costs ($)` and `Startup delays (h)` | Parameters describing how much it costs to start the generator after it has been shut down for a certain amount of time. If `Startup costs ($)` and `Startup delays (h)` are set to `[300.0, 400.0]` and `[1, 4]`, for example, and the generator is shut down at time `00:00` (h:min), then it costs \$300 to start up the generator at any time between `01:00` and `03:59`, and \$400 to start the generator at time `04:00` or any time after that. The number of startup cost points is unlimited, and may be different for each generator. Startup delays must be strictly increasing and the first entry must equal `Minimum downtime (h)`. | `[0.0]` and `[1]` | N
| `Minimum uptime (h)` | Minimum amount of time the generator must stay operational after starting up (in hours). For example, if the generator starts up at time `00:00` (h:min) and `Minimum uptime (h)` is set to 4, then the generator can only shut down at time `04:00`. | `1` | N
| `Minimum downtime (h)` | Minimum amount of time the generator must stay offline after shutting down (in hours). For example, if the generator shuts down at time `00:00` (h:min) and `Minimum downtime (h)` is set to 4, then the generator can only start producing power again at time `04:00`. | `1` | N
| `Ramp up limit (MW)` | Maximum increase in production from one time step to the next (in MW). For example, if the generator is producing 100 MW at time step 1 and if this parameter is set to 40 MW, then the generator will produce at most 140 MW at time step 2. | `+inf` | N
| `Ramp down limit (MW)` | Maximum decrease in production from one time step to the next (in MW). For example, if the generator is producing 100 MW at time step 1 and this parameter is set to 40 MW, then the generator will produce at least 60 MW at time step 2. | `+inf` | N
| `Startup limit (MW)` | Maximum amount of power a generator can produce immediately after starting up (in MW). For example, if `Startup limit (MW)` is set to 100 MW and the unit is off at time step 1, then it may produce at most 100 MW at time step 2.| `+inf` | N
| `Shutdown limit (MW)` | Maximum amount of power a generator can produce immediately before shutting down (in MW). Specifically, the generator can only shut down at time step `t+1` if its production at time step `t` is below this limit. | `+inf` | N
| `Initial status (h)` | If set to a positive number, indicates the amount of time (in hours) the generator has been on at the beginning of the simulation, and if set to a negative number, the amount of time the generator has been off. For example, if `Initial status (h)` is `-2`, this means that the generator was off since `-02:00` (h:min). The simulation starts at time `00:00`. If `Initial status (h)` is `3`, this means that the generator was on since `-03:00`. A value of zero is not acceptable. | Required | N
| `Initial power (MW)` | Amount of power the generator at time step `-1`, immediately before the planning horizon starts. | Required | N
| `Must run?` | If `true`, the generator should be committed, even if that is not economical (Boolean). | `false` | Y
| `Reserve eligibility` | List of reserve products this generator is eligibe to provide. By default, the generator is not eligible to provide any reserves. | `[]` | N
| `Commitment status` | List of commitment status over the time horizon. At time `t`, if `true`, the generator must be commited at that time period; if `false`, the generator must not be commited at that time period. If `null` at time `t`, the generator's commitment status is then decided by the model. By default, the status is a list of `null` values. | `null` | Y
#### Profiled Units
| Key | Description | Default | Time series?
| :---------------- | :------------------------------------------------ | :------: | :------------:
| `Bus` | Identifier of the bus where this generator is located (string). | Required | N
| `Type` | Type of the generator (string). For profiled generators, this must be `Profiled`. | Required | N
| `Cost ($/MW)` | Cost incurred for serving each MW of power by this generator. | Required | Y
| `Minimum power (MW)` | Minimum amount of power this generator may supply. | `0.0` | Y
| `Maximum power (MW)` | Maximum amount of power this generator may supply. | Required | Y
#### Production costs and limits
Production costs are represented as piecewise-linear curves. Figure 1 shows an example cost curve with three segments, where it costs \$1400, \$1600, \$2200 and \$2400 to generate, respectively, 100, 110, 130 and 135 MW of power. To model this generator, `Production cost curve (MW)` should be set to `[100, 110, 130, 135]`, and `Production cost curve ($)` should be set to `[1400, 1600, 2200, 2400]`.
Note that this curve also specifies the production limits. Specifically, the first point identifies the minimum power output when the unit is operational, while the last point identifies the maximum power output.
```@raw html
<center>
<img src="../assets/cost_curve.png" style="max-width: 500px"/>
<div><b>Figure 1.</b> Piecewise-linear production cost curve.</div>
<br/>
</center>
```
#### Additional remarks:
* For time-dependent production limits or time-dependent production costs, the usage of nested arrays is allowed. For example, if `Production cost curve (MW)` is set to `[5.0, [10.0, 12.0, 15.0, 20.0]]`, then the unit may generate at most 10, 12, 15 and 20 MW of power during time steps 1, 2, 3 and 4, respectively. The minimum output for all time periods is fixed to at 5 MW.
* There is no limit to the number of piecewise-linear segments, and different generators may have a different number of segments.
* If `Production cost curve (MW)` and `Production cost curve ($)` both contain a single element, then the generator must produce exactly that amount of power when operational. To specify that the generator may produce any amount of power up to a certain limit `P`, the parameter `Production cost curve (MW)` should be set to `[0, P]`.
* Production cost curves must be convex.
#### Example
```json
{
"Generators": {
"gen1": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [100.0, 110.0, 130.0, 135.0],
"Production cost curve ($)": [1400.0, 1600.0, 2200.0, 2400.0],
"Startup costs ($)": [300.0, 400.0],
"Startup delays (h)": [1, 4],
"Ramp up limit (MW)": 232.68,
"Ramp down limit (MW)": 232.68,
"Startup limit (MW)": 232.68,
"Shutdown limit (MW)": 232.68,
"Minimum downtime (h)": 4,
"Minimum uptime (h)": 4,
"Initial status (h)": 12,
"Initial power (MW)": 115,
"Must run?": false,
"Reserve eligibility": ["r1"]
},
"gen2": {
"Bus": "b5",
"Type": "Thermal",
"Production cost curve (MW)": [0.0, [10.0, 8.0, 0.0, 3.0]],
"Production cost curve ($)": [0.0, 0.0],
"Initial status (h)": -100,
"Initial power (MW)": 0,
"Reserve eligibility": ["r1", "r2"],
"Commitment status": [true, false, null, true]
},
"gen3": {
"Bus": "b6",
"Type": "Profiled",
"Minimum power (MW)": 10.0,
"Maximum power (MW)": 120.0,
"Cost ($/MW)": 100.0
}
}
}
```
### Price-sensitive loads
This section describes components in the system which may increase or reduce their energy consumption according to the energy prices. Fixed loads (as described in the `buses` section) are always served, regardless of the price, unless there is significant congestion in the system or insufficient production capacity. Price-sensitive loads, on the other hand, are only served if it is economical to do so.
| Key | Description | Default | Time series?
| :---------------- | :------------------------------------------------ | :------: | :------------:
| `Bus` | Bus where the load is located. Multiple price-sensitive loads may be placed at the same bus. | Required | N
| `Revenue ($/MW)` | Revenue obtained for serving each MW of power to this load. | Required | Y
| `Demand (MW)` | Maximum amount of power required by this load. Any amount lower than this may be served. | Required | Y
#### Example
```json
{
"Price-sensitive loads": {
"p1": {
"Bus": "b3",
"Revenue ($/MW)": 23.0,
"Demand (MW)": 50.0
}
}
}
```
### Transmission lines
This section describes the characteristics of transmission system, such as its topology and the susceptance of each transmission line.
| Key | Description | Default | Time series?
| :--------------------- | :----------------------------------------------- | ------- | :------------:
| `Source bus` | Identifier of the bus where the transmission line originates. | Required | N
| `Target bus` | Identifier of the bus where the transmission line reaches. | Required | N
| `Reactance (ohms)` | Reactance of the transmission line (in ohms). | Required | N
| `Susceptance (S)` | Susceptance of the transmission line (in siemens). | Required | N
| `Normal flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in its regular, fully-operational state. | `+inf` | Y
| `Emergency flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in degraded state (for example, after the failure of another transmission line). | `+inf` | Y
| `Flow limit penalty ($/MW)` | Penalty for violating the flow limits of the transmission line (in $/MW). This is charged per time step. For example, if there is a thermal violation of 1 MW for three time steps, then three times this amount will be charged. | `5000.0` | Y
#### Example
```json
{
"Transmission lines": {
"l1": {
"Source bus": "b1",
"Target bus": "b2",
"Reactance (ohms)": 0.05917,
"Susceptance (S)": 29.49686,
"Normal flow limit (MW)": 15000.0,
"Emergency flow limit (MW)": 20000.0,
"Flow limit penalty ($/MW)": 5000.0
}
}
}
```
### Reserves
This section describes the hourly amount of reserves required.
| Key | Description | Default | Time series?
| :-------------------- | :------------------------------------------------- | --------- | :----:
| `Type` | Type of reserve product. Must be either "spinning" or "flexiramp". | Required | N
| `Amount (MW)` | Amount of reserves required. | Required | Y
| `Shortfall penalty ($/MW)` | Penalty for shortage in meeting the reserve requirements (in $/MW). This is charged per time step. Negative value implies reserve constraints must always be satisfied. | `-1` | Y
#### Example 1
```json
{
"Reserves": {
"r1": {
"Type": "spinning",
"Amount (MW)": [
57.30552,
53.88429,
51.31838,
50.46307
],
"Shortfall penalty ($/MW)": 5.0
},
"r2": {
"Type": "flexiramp",
"Amount (MW)": [
20.31042,
23.65273,
27.41784,
25.34057
],
}
}
}
```
### Contingencies
This section describes credible contingency scenarios in the optimization, such as the loss of a transmission line or generator.
| Key | Description | Default
| :-------------------- | :----------------------------------------------- | ----------
| `Affected generators` | List of generators affected by this contingency. May be omitted if no generators are affected. | `[]`
| `Affected lines` | List of transmission lines affected by this contingency. May be omitted if no lines are affected. | `[]`
#### Example
```json
{
"Contingencies": {
"c1": {
"Affected lines": ["l1", "l2", "l3"],
"Affected generators": ["g1"]
},
"c2": {
"Affected lines": ["l4"]
},
}
}
```
### Additional remarks
#### Time series parameters
Many numerical properties in the JSON file can be specified either as a single floating point number if they are time-independent, or as an array containing exactly `T` elements, if they are time-dependent, where `T` is the number of time steps in the planning horizon. For example, both formats below are valid when `T=3`:
```json
{
"Load (MW)": 800.0,
"Load (MW)": [800.0, 850.0, 730.0]
}
```
The value `T` depends on both `Time horizon (h)` and `Time step (min)`, as the table below illustrates.
Time horizon (h) | Time step (min) | T
:---------------:|:---------------:|:----:
24 | 60 | 24
24 | 15 | 96
24 | 5 | 288
36 | 60 | 36
36 | 15 | 144
36 | 5 | 432
Output Data Format
------------------
The output data format is also JSON-based, but it is not currently documented since we expect it to change significantly in a future version of the package.
Current limitations
-------------------
* Network topology remains the same for all time periods
* Only N-1 transmission contingencies are supported. Generator contingencies are not currently supported.
* Time-varying minimum production amounts are not currently compatible with ramp/startup/shutdown limits.
* Flexible ramping products can only be acquired under the `WanHob2016` formulation, which does not support spinning reserves.

View File

@@ -1,380 +0,0 @@
# JSON data format
An instance of the stochastic security-constrained unit commitment (SCUC) problem is composed multiple scenarios. Each scenario should be described in an individual JSON file containing the main section belows. For deterministic instances, a single scenario file, following the same format below, may also be provided. Fields that are allowed to differ among scenarios are marked as "uncertain". Fields that are allowed to be time-dependent are marked as "time series".
- [Parameters](#Parameters)
- [Buses](#Buses)
- [Generators](#Generators)
- [Storage units](#Storage-units)
- [Price-sensitive loads](#Price-sensitive-loads)
- [Transmission lines](#Transmission-lines)
- [Reserves](#Reserves)
- [Contingencies](#Contingencies)
Each section is described in detail below. See [case118/2017-01-01.json.gz](https://axavier.org/UnitCommitment.jl/0.4/instances/matpower/case118/2017-01-01.json.gz) for a complete example.
### Parameters
This section describes system-wide parameters, such as power balance penalty, and optimization parameters, such as the length of the planning horizon and the time.
| Key | Description | Default | Time series? | Uncertain? |
| :----------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------: | :----------: | :--------: |
| `Version` | Version of UnitCommitment.jl this file was written for. Required to ensure that the file remains readable in future versions of the package. If you are following this page to construct the file, this field should equal `0.4`. | Required | No | No |
| `Time horizon (min)` or `Time horizon (h)` | Length of the planning horizon (in minutes or hours). Either `Time horizon (min)` or `Time horizon (h)` is required, but not both. | Required | No | No |
| `Time step (min)` | Length of each time step (in minutes). Must be a divisor of 60 (e.g. 60, 30, 20, 15, etc). | `60` | No | No |
| `Power balance penalty ($/MW)` | Penalty for system-wide shortage or surplus in production (in $/MW). This is charged per time step. For example, if there is a shortage of 1 MW for three time steps, three times this amount will be charged. | `1000.0` | No | Yes |
| `Scenario name` | Name of the scenario. | `"s1"` | No | --- |
| `Scenario weight` | Weight of the scenario. The scenario weight can be any positive real number, that is, it does not have to be between zero and one. The package normalizes the weights to ensure that the probability of all scenarios sum up to one. | 1.0 | No | --- |
#### Example
```json
{
"Parameters": {
"Version": "0.4",
"Time horizon (h)": 4,
"Power balance penalty ($/MW)": 1000.0,
"Scenario name": "s1",
"Scenario weight": 0.5
}
}
```
### Buses
This section describes the characteristics of each bus in the system.
| Key | Description | Default | Time series? | Uncertain? |
| :---------- | :--------------------------------------- | -------- | :----------: | :--------: |
| `Load (MW)` | Fixed load connected to the bus (in MW). | Required | Yes | Yes |
#### Example
```json
{
"Buses": {
"b1": {
"Load (MW)": 0.0
},
"b2": {
"Load (MW)": [26.01527, 24.46212, 23.29725, 22.90897]
}
}
}
```
### Generators
This section describes all generators in the system. Two types of units can be specified:
- **Thermal units:** Units that produce power by converting heat into electrical energy, such as coal and oil power plants. These units use a more complex model, with binary decision variables, and various constraints to enforce ramp rates and minimum up/down time.
- **Profiled units:** Simplified model for units that do not require the constraints mentioned above, only a maximum and minimum power output for each time period. Typically used for renewables and hydro.
#### Thermal Units
| Key | Description | Default | Time series? | Uncertain? |
| :----------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------- | :----------: | :--------: |
| `Bus` | Identifier of the bus where this generator is located (string). | Required | No | Yes |
| `Type` | Type of the generator (string). For thermal generators, this must be `Thermal`. | Required | No | No |
| `Production cost curve (MW)` and `Production cost curve ($)` | Parameters describing the piecewise-linear production costs. See below for more details. | Required | Yes | Yes |
| `Startup costs ($)` and `Startup delays (h)` | Parameters describing how much it costs to start the generator after it has been shut down for a certain amount of time. If `Startup costs ($)` and `Startup delays (h)` are set to `[300.0, 400.0]` and `[1, 4]`, for example, and the generator is shut down at time `00:00` (h:min), then it costs \$300 to start up the generator at any time between `01:00` and `03:59`, and \$400 to start the generator at time `04:00` or any time after that. The number of startup cost points is unlimited, and may be different for each generator. Startup delays must be strictly increasing and the first entry must equal `Minimum downtime (h)`. | `[0.0]` and `[1]` | No | Yes |
| `Minimum uptime (h)` | Minimum amount of time the generator must stay operational after starting up (in hours). For example, if the generator starts up at time `00:00` (h:min) and `Minimum uptime (h)` is set to 4, then the generator can only shut down at time `04:00`. | `1` | No | Yes |
| `Minimum downtime (h)` | Minimum amount of time the generator must stay offline after shutting down (in hours). For example, if the generator shuts down at time `00:00` (h:min) and `Minimum downtime (h)` is set to 4, then the generator can only start producing power again at time `04:00`. | `1` | No | Yes |
| `Ramp up limit (MW)` | Maximum increase in production from one time step to the next (in MW). For example, if the generator is producing 100 MW at time step 1 and if this parameter is set to 40 MW, then the generator will produce at most 140 MW at time step 2. | `+inf` | No | Yes |
| `Ramp down limit (MW)` | Maximum decrease in production from one time step to the next (in MW). For example, if the generator is producing 100 MW at time step 1 and this parameter is set to 40 MW, then the generator will produce at least 60 MW at time step 2. | `+inf` | No | Yes |
| `Startup limit (MW)` | Maximum amount of power a generator can produce immediately after starting up (in MW). For example, if `Startup limit (MW)` is set to 100 MW and the unit is off at time step 1, then it may produce at most 100 MW at time step 2. | `+inf` | No | Yes |
| `Shutdown limit (MW)` | Maximum amount of power a generator can produce immediately before shutting down (in MW). Specifically, the generator can only shut down at time step `t+1` if its production at time step `t` is below this limit. | `+inf` | No | Yes |
| `Initial status (h)` | If set to a positive number, indicates the amount of time (in hours) the generator has been on at the beginning of the simulation, and if set to a negative number, the amount of time the generator has been off. For example, if `Initial status (h)` is `-2`, this means that the generator was off since `-02:00` (h:min). The simulation starts at time `00:00`. If `Initial status (h)` is `3`, this means that the generator was on since `-03:00`. A value of zero is not acceptable. | Required | No | No |
| `Initial power (MW)` | Amount of power the generator at time step `-1`, immediately before the planning horizon starts. | Required | No | No |
| `Must run?` | If `true`, the generator should be committed, even if that is not economical (Boolean). | `false` | Yes | Yes |
| `Reserve eligibility` | List of reserve products this generator is eligibe to provide. By default, the generator is not eligible to provide any reserves. | `[]` | No | Yes |
| `Commitment status` | List of commitment status over the time horizon. At time `t`, if `true`, the generator must be commited at that time period; if `false`, the generator must not be commited at that time period. If `null` at time `t`, the generator's commitment status is then decided by the model. By default, the status is a list of `null` values. | `null` | Yes | Yes |
#### Profiled Units
| Key | Description | Default | Time series? | Uncertain? |
| :------------------- | :-------------------------------------------------------------------------------- | :------: | :----------: | :--------: |
| `Bus` | Identifier of the bus where this generator is located (string). | Required | No | Yes |
| `Type` | Type of the generator (string). For profiled generators, this must be `Profiled`. | Required | No | No |
| `Cost ($/MW)` | Cost incurred for serving each MW of power by this generator. | Required | Yes | Yes |
| `Minimum power (MW)` | Minimum amount of power this generator may supply. | `0.0` | Yes | Yes |
| `Maximum power (MW)` | Maximum amount of power this generator may supply. | Required | Yes | Yes |
#### Production costs and limits
Production costs are represented as piecewise-linear curves. Figure 1 shows an example cost curve with three segments, where it costs \$1400, \$1600, \$2200 and \$2400 to generate, respectively, 100, 110, 130 and 135 MW of power. To model this generator, `Production cost curve (MW)` should be set to `[100, 110, 130, 135]`, and `Production cost curve ($)` should be set to `[1400, 1600, 2200, 2400]`.
Note that this curve also specifies the production limits. Specifically, the first point identifies the minimum power output when the unit is operational, while the last point identifies the maximum power output.
```@raw html
<center>
<img src="../../assets/cost_curve.png" style="max-width: 500px"/>
<div><b>Figure 1.</b> Piecewise-linear production cost curve.</div>
<br/>
</center>
```
#### Additional remarks:
- For time-dependent production limits or time-dependent production costs, the usage of nested arrays is allowed. For example, if `Production cost curve (MW)` is set to `[5.0, [10.0, 12.0, 15.0, 20.0]]`, then the unit may generate at most 10, 12, 15 and 20 MW of power during time steps 1, 2, 3 and 4, respectively. The minimum output for all time periods is fixed to at 5 MW.
- There is no limit to the number of piecewise-linear segments, and different generators may have a different number of segments.
- If `Production cost curve (MW)` and `Production cost curve ($)` both contain a single element, then the generator must produce exactly that amount of power when operational. To specify that the generator may produce any amount of power up to a certain limit `P`, the parameter `Production cost curve (MW)` should be set to `[0, P]`.
- Production cost curves must be convex.
#### Example
```json
{
"Generators": {
"gen1": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [100.0, 110.0, 130.0, 135.0],
"Production cost curve ($)": [1400.0, 1600.0, 2200.0, 2400.0],
"Startup costs ($)": [300.0, 400.0],
"Startup delays (h)": [1, 4],
"Ramp up limit (MW)": 232.68,
"Ramp down limit (MW)": 232.68,
"Startup limit (MW)": 232.68,
"Shutdown limit (MW)": 232.68,
"Minimum downtime (h)": 4,
"Minimum uptime (h)": 4,
"Initial status (h)": 12,
"Initial power (MW)": 115,
"Must run?": false,
"Reserve eligibility": ["r1"]
},
"gen2": {
"Bus": "b5",
"Type": "Thermal",
"Production cost curve (MW)": [0.0, [10.0, 8.0, 0.0, 3.0]],
"Production cost curve ($)": [0.0, 0.0],
"Initial status (h)": -100,
"Initial power (MW)": 0,
"Reserve eligibility": ["r1", "r2"],
"Commitment status": [true, false, null, true]
},
"gen3": {
"Bus": "b6",
"Type": "Profiled",
"Minimum power (MW)": 10.0,
"Maximum power (MW)": 120.0,
"Cost ($/MW)": 100.0
}
}
}
```
### Storage units
This section describes energy storage units in the system which charge and discharge power. The storage units consume power while charging, and generate power while discharging.
| Key | Description | Default | Time series? | Uncertain? |
| :-------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------------------: | :----------: | :--------: |
| `Bus` | Bus where the storage unit is located. Multiple storage units may be placed at the same bus. | Required | No | Yes |
| `Minimum level (MWh)` | Minimum of energy level this storage unit may contain. | `0.0` | Yes | Yes |
| `Maximum level (MWh)` | Maximum of energy level this storage unit may contain. | Required | Yes | Yes |
| `Allow simultaneous charging and discharging` | If `false`, the storage unit is not allowed to charge and discharge at the same time (Boolean). | `true` | Yes | Yes |
| `Charge cost ($/MW)` | Cost incurred for charging each MW of power into this storage unit. | Required | Yes | Yes |
| `Discharge cost ($/MW)` | Cost incurred for discharging each MW of power from this storage unit. | Required | Yes | Yes |
| `Charge efficiency` | Efficiency rate to charge power into this storage unit. This value must be greater than or equal to `0.0`, and less than or equal to `1.0`. | `1.0` | Yes | Yes |
| `Discharge efficiency` | Efficiency rate to discharge power from this storage unit. This value must be greater than or equal to `0.0`, and less than or equal to `1.0`. | `1.0` | Yes | Yes |
| `Loss factor` | The energy dissipation rate of this storage unit. This value must be greater than or equal to `0.0`, and less than or equal to `1.0`. | `0.0` | Yes | Yes |
| `Minimum charge rate (MW)` | Minimum amount of power rate this storage unit may charge. | `0.0` | Yes | Yes |
| `Maximum charge rate (MW)` | Maximum amount of power rate this storage unit may charge. | Required | Yes | Yes |
| `Minimum discharge rate (MW)` | Minimum amount of power rate this storage unit may discharge. | `0.0` | Yes | Yes |
| `Maximum discharge rate (MW)` | Maximum amount of power rate this storage unit may discharge. | Required | Yes | Yes |
| `Initial level (MWh)` | Amount of energy this storage unit at time step `-1`, immediately before the planning horizon starts. | `0.0` | No | Yes |
| `Last period minimum level (MWh)` | Minimum of energy level this storage unit may contain in the last time step. By default, this value is the same as the last value of `Minimum level (MWh)`. | `Minimum level (MWh)` | No | Yes |
| `Last period maximum level (MWh)` | Maximum of energy level this storage unit may contain in the last time step. By default, this value is the same as the last value of `Maximum level (MWh)`. | `Maximum level (MWh)` | No | Yes |
#### Example
```json
{
"Storage units": {
"su1": {
"Bus": "b2",
"Maximum level (MWh)": 100.0,
"Charge cost ($/MW)": 2.0,
"Discharge cost ($/MW)": 2.5,
"Maximum charge rate (MW)": 10.0,
"Maximum discharge rate (MW)": 8.0
},
"su2": {
"Bus": "b2",
"Minimum level (MWh)": 10.0,
"Maximum level (MWh)": 100.0,
"Allow simultaneous charging and discharging": false,
"Charge cost ($/MW)": 3.0,
"Discharge cost ($/MW)": 3.5,
"Charge efficiency": 0.8,
"Discharge efficiency": 0.85,
"Loss factor": 0.01,
"Minimum charge rate (MW)": 5.0,
"Maximum charge rate (MW)": 10.0,
"Minimum discharge rate (MW)": 2.0,
"Maximum discharge rate (MW)": 10.0,
"Initial level (MWh)": 70.0,
"Last period minimum level (MWh)": 80.0,
"Last period maximum level (MWh)": 85.0
},
"su3": {
"Bus": "b9",
"Minimum level (MWh)": [10.0, 11.0, 12.0, 13.0],
"Maximum level (MWh)": [100.0, 110.0, 120.0, 130.0],
"Allow simultaneous charging and discharging": [false, false, true, true],
"Charge cost ($/MW)": [2.0, 2.1, 2.2, 2.3],
"Discharge cost ($/MW)": [1.0, 1.1, 1.2, 1.3],
"Charge efficiency": [0.8, 0.81, 0.82, 0.82],
"Discharge efficiency": [0.85, 0.86, 0.87, 0.88],
"Loss factor": [0.01, 0.01, 0.02, 0.02],
"Minimum charge rate (MW)": [5.0, 5.1, 5.2, 5.3],
"Maximum charge rate (MW)": [10.0, 10.1, 10.2, 10.3],
"Minimum discharge rate (MW)": [4.0, 4.1, 4.2, 4.3],
"Maximum discharge rate (MW)": [8.0, 8.1, 8.2, 8.3],
"Initial level (MWh)": 20.0,
"Last period minimum level (MWh)": 21.0,
"Last period maximum level (MWh)": 22.0
}
}
}
```
### Price-sensitive loads
This section describes components in the system which may increase or reduce their energy consumption according to the energy prices. Fixed loads (as described in the `buses` section) are always served, regardless of the price, unless there is significant congestion in the system or insufficient production capacity. Price-sensitive loads, on the other hand, are only served if it is economical to do so.
| Key | Description | Default | Time series? | Uncertain? |
| :--------------- | :------------------------------------------------------------------------------------------- | :------: | :----------: | :--------: |
| `Bus` | Bus where the load is located. Multiple price-sensitive loads may be placed at the same bus. | Required | No | Yes |
| `Revenue ($/MW)` | Revenue obtained for serving each MW of power to this load. | Required | Yes | Yes |
| `Demand (MW)` | Maximum amount of power required by this load. Any amount lower than this may be served. | Required | Yes | Yes |
#### Example
```json
{
"Price-sensitive loads": {
"p1": {
"Bus": "b3",
"Revenue ($/MW)": 23.0,
"Demand (MW)": 50.0
}
}
}
```
### Transmission lines
This section describes the characteristics of transmission system, such as its topology and the susceptance of each transmission line.
| Key | Description | Default | Time series? | Uncertain? |
| :-------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- | :----------: | :--------: |
| `Source bus` | Identifier of the bus where the transmission line originates. | Required | No | Yes |
| `Target bus` | Identifier of the bus where the transmission line reaches. | Required | No | Yes |
| `Susceptance (S)` | Susceptance of the transmission line (in siemens). | Required | No | Yes |
| `Normal flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in its regular, fully-operational state. | `+inf` | Yes | Yes |
| `Emergency flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in degraded state (for example, after the failure of another transmission line). | `+inf` | Y | Yes |
| `Flow limit penalty ($/MW)` | Penalty for violating the flow limits of the transmission line (in $/MW). This is charged per time step. For example, if there is a thermal violation of 1 MW for three time steps, then three times this amount will be charged. | `5000.0` | Yes | Yes |
#### Example
```json
{
"Transmission lines": {
"l1": {
"Source bus": "b1",
"Target bus": "b2",
"Susceptance (S)": 29.49686,
"Normal flow limit (MW)": 15000.0,
"Emergency flow limit (MW)": 20000.0,
"Flow limit penalty ($/MW)": 5000.0
}
}
}
```
### Reserves
This section describes the hourly amount of reserves required.
| Key | Description | Default | Time series? | Uncertain? |
| :------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- | :----------: | :--------: |
| `Type` | Type of reserve product. Must be either "spinning" or "flexiramp". | Required | No | No |
| `Amount (MW)` | Amount of reserves required. | Required | Yes | Yes |
| `Shortfall penalty ($/MW)` | Penalty for shortage in meeting the reserve requirements (in $/MW). This is charged per time step. Negative value implies reserve constraints must always be satisfied. | `-1` | Yes | Yes |
#### Example 1
```json
{
"Reserves": {
"r1": {
"Type": "spinning",
"Amount (MW)": [57.30552, 53.88429, 51.31838, 50.46307],
"Shortfall penalty ($/MW)": 5.0
},
"r2": {
"Type": "flexiramp",
"Amount (MW)": [20.31042, 23.65273, 27.41784, 25.34057]
}
}
}
```
### Contingencies
This section describes credible contingency scenarios in the optimization, such as the loss of a transmission line or generator.
| Key | Description | Default | Uncertain? |
| :-------------------- | :------------------------------------------------------------------------------------------------ | :-----: | :--------: |
| `Affected generators` | List of generators affected by this contingency. May be omitted if no generators are affected. | `[]` | Yes |
| `Affected lines` | List of transmission lines affected by this contingency. May be omitted if no lines are affected. | `[]` | Yes |
#### Example
```json
{
"Contingencies": {
"c1": {
"Affected lines": ["l1", "l2", "l3"],
"Affected generators": ["g1"]
},
"c2": {
"Affected lines": ["l4"]
}
}
}
```
### Additional remarks
#### Time series parameters
Many numerical properties in the JSON file can be specified either as a single floating point number if they are time-independent, or as an array containing exactly `T` elements, if they are time-dependent, where `T` is the number of time steps in the planning horizon. For example, both formats below are valid when `T=3`:
```json
{
"Load (MW)": 800.0,
"Load (MW)": [800.0, 850.0, 730.0]
}
```
The value `T` depends on both `Time horizon (h)` and `Time step (min)`, as the table below illustrates.
| Time horizon (h) | Time step (min) | T |
| :--------------: | :-------------: | :-: |
| 24 | 60 | 24 |
| 24 | 15 | 96 |
| 24 | 5 | 288 |
| 36 | 60 | 36 |
| 36 | 15 | 144 |
| 36 | 5 | 432 |
## Current limitations
- Network topology must remain the same for all time periods.
- Only N-1 transmission contingencies are supported. Generator contingencies are not currently supported.
- Time-varying minimum production amounts are not currently compatible with ramp/startup/shutdown limits.
- Flexible ramping products can only be acquired under the `WanHob2016` formulation, which does not support spinning reserves.
- The set of generators must be the same in all scenarios.

View File

@@ -1,289 +0,0 @@
# Benchmark instances
UnitCommitment.jl provides a large collection of benchmark instances collected from the literature and converted to a [common data format](../guides/format.md). In some cases, as indicated below, the original instances have been extended, with realistic parameters, using data-driven methods. If you use these instances in your research, we request that you cite UnitCommitment.jl, as well as the original sources, as listed below. Benchmark instances can be loaded with `UnitCommitment.read_benchmark(name)`, as explained in the [tutorials](../tutorials/usage.md). Instance files can also be [directly downloaded from our website](https://axavier.org/UnitCommitment.jl/0.4/instances/).
!!! warning
The instances included in UC.jl are still under development and may change in the future. If you use these instances in your research, for reproducibility, you should specify what version of UC.jl they came from.
## MATPOWER
[MATPOWER](https://github.com/MATPOWER/matpower) is an open-source package for solving power flow problems in MATLAB and Octave. It contains a number of power flow test cases, which have been widely used in the power systems literature.
Because most MATPOWER test cases were originally designed for power flow studies, they lack a number of important unit commitment parameters, such as time-varying loads, production cost curves, ramp limits, reserves and initial conditions. The test cases included in UnitCommitment.jl are extended versions of the original MATPOWER test cases, modified as following:
- **Production cost** curves were generated using a data-driven approach, based on publicly available data. More specifically, machine learning models were trained to predict typical production cost curves, for each day of the year, based on a generator's maximum and minimum power output.
- **Load profiles** were generated using a similar data-driven approach.
- **Ramp-up, ramp-down, startup and shutdown rates** were set to a fixed proportion of the generator's maximum output.
- **Minimum reserves** were set to a fixed proportion of the total demand.
- **Contingencies** were set to include all N-1 transmission line contingencies that do not generate islands or isolated buses. More specifically, there is one contingency for each transmission line, as long as that transmission line is not a bridge in the network graph.
For each MATPOWER test case, UC.jl provides 365 variations (`2017-01-01` to `2017-12-31`) corresponding different days of the year.
### MATPOWER/UW-PSTCA
A variety of smaller IEEE test cases, [compiled by University of Washington](http://labs.ece.uw.edu/pstca/), corresponding mostly to small portions of the American Electric Power System in the 1960s.
| Name | Buses | Generators | Lines | Contingencies | References |
| ----------------------------- | ----- | ---------- | ----- | ------------- | -------------- |
| `matpower/case14/2017-01-01` | 14 | 5 | 20 | 19 | [MTPWR, PSTCA] |
| `matpower/case30/2017-01-01` | 30 | 6 | 41 | 38 | [MTPWR, PSTCA] |
| `matpower/case57/2017-01-01` | 57 | 7 | 80 | 79 | [MTPWR, PSTCA] |
| `matpower/case118/2017-01-01` | 118 | 54 | 186 | 177 | [MTPWR, PSTCA] |
| `matpower/case300/2017-01-01` | 300 | 69 | 411 | 320 | [MTPWR, PSTCA] |
### MATPOWER/Polish
Test cases based on the Polish 400, 220 and 110 kV networks, originally provided by **Roman Korab** (Politechnika Śląska) and corrected by the MATPOWER team.
| Name | Buses | Generators | Lines | Contingencies | References |
| --------------------------------- | ----- | ---------- | ----- | ------------- | ---------- |
| `matpower/case2383wp/2017-01-01` | 2383 | 323 | 2896 | 2240 | [MTPWR] |
| `matpower/case2736sp/2017-01-01` | 2736 | 289 | 3504 | 3159 | [MTPWR] |
| `matpower/case2737sop/2017-01-01` | 2737 | 267 | 3506 | 3161 | [MTPWR] |
| `matpower/case2746wop/2017-01-01` | 2746 | 443 | 3514 | 3155 | [MTPWR] |
| `matpower/case2746wp/2017-01-01` | 2746 | 457 | 3514 | 3156 | [MTPWR] |
| `matpower/case3012wp/2017-01-01` | 3012 | 496 | 3572 | 2854 | [MTPWR] |
| `matpower/case3120sp/2017-01-01` | 3120 | 483 | 3693 | 2950 | [MTPWR] |
| `matpower/case3375wp/2017-01-01` | 3374 | 590 | 4161 | 3245 | [MTPWR] |
### MATPOWER/PEGASE
Test cases from the [Pan European Grid Advanced Simulation and State Estimation (PEGASE) project](https://cordis.europa.eu/project/id/211407), describing part of the European high voltage transmission network.
| Name | Buses | Generators | Lines | Contingencies | References |
| ------------------------------------- | ----- | ---------- | ----- | ------------- | --------------------------- |
| `matpower/case89pegase/2017-01-01` | 89 | 12 | 210 | 192 | [JoFlMa16, FlPaCa13, MTPWR] |
| `matpower/case1354pegase/2017-01-01` | 1354 | 260 | 1991 | 1288 | [JoFlMa16, FlPaCa13, MTPWR] |
| `matpower/case2869pegase/2017-01-01` | 2869 | 510 | 4582 | 3579 | [JoFlMa16, FlPaCa13, MTPWR] |
| `matpower/case9241pegase/2017-01-01` | 9241 | 1445 | 16049 | 13932 | [JoFlMa16, FlPaCa13, MTPWR] |
| `matpower/case13659pegase/2017-01-01` | 13659 | 4092 | 20467 | 13932 | [JoFlMa16, FlPaCa13, MTPWR] |
### MATPOWER/RTE
Test cases from the R&D Division at [Reseau de Transport d'Electricite](https://www.rte-france.com) representing the size and complexity of the French very high voltage transmission network.
| Name | Buses | Generators | Lines | Contingencies | References |
| --------------------------------- | ----- | ---------- | ----- | ------------- | ----------------- |
| `matpower/case1888rte/2017-01-01` | 1888 | 296 | 2531 | 1484 | [MTPWR, JoFlMa16] |
| `matpower/case1951rte/2017-01-01` | 1951 | 390 | 2596 | 1497 | [MTPWR, JoFlMa16] |
| `matpower/case2848rte/2017-01-01` | 2848 | 544 | 3776 | 2242 | [MTPWR, JoFlMa16] |
| `matpower/case2868rte/2017-01-01` | 2868 | 596 | 3808 | 2260 | [MTPWR, JoFlMa16] |
| `matpower/case6468rte/2017-01-01` | 6468 | 1262 | 9000 | 6094 | [MTPWR, JoFlMa16] |
| `matpower/case6470rte/2017-01-01` | 6470 | 1306 | 9005 | 6085 | [MTPWR, JoFlMa16] |
| `matpower/case6495rte/2017-01-01` | 6495 | 1352 | 9019 | 6060 | [MTPWR, JoFlMa16] |
| `matpower/case6515rte/2017-01-01` | 6515 | 1368 | 9037 | 6063 | [MTPWR, JoFlMa16] |
## PGLIB-UC Instances
[PGLIB-UC](https://github.com/power-grid-lib/pglib-uc) is a benchmark library curated and maintained by the [IEEE PES Task Force on Benchmarks for Validation of Emerging Power System Algorithms](https://power-grid-lib.github.io/). These test cases have been used in [KnOsWa20].
### PGLIB-UC/California
Test cases based on publicly available data from the California ISO. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
| Name | Buses | Generators | Lines | Contingencies | References |
| ------------------------------------ | ----- | ---------- | ----- | ------------- | ---------- |
| `pglib-uc/ca/2014-09-01_reserves_0` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2014-09-01_reserves_1` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2014-09-01_reserves_3` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2014-09-01_reserves_5` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2014-12-01_reserves_0` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2014-12-01_reserves_1` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2014-12-01_reserves_3` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2014-12-01_reserves_5` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2015-03-01_reserves_0` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2015-03-01_reserves_1` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2015-03-01_reserves_3` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2015-03-01_reserves_5` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2015-06-01_reserves_0` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2015-06-01_reserves_1` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2015-06-01_reserves_3` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/2015-06-01_reserves_5` | 1 | 610 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/Scenario400_reserves_0` | 1 | 611 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/Scenario400_reserves_1` | 1 | 611 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/Scenario400_reserves_3` | 1 | 611 | 0 | 0 | [KnOsWa20] |
| `pglib-uc/ca/Scenario400_reserves_5` | 1 | 611 | 0 | 0 | [KnOsWa20] |
### PGLIB-UC/FERC
Test cases based on a publicly available [unit commitment test case produced by the Federal Energy Regulatory Commission](https://www.ferc.gov/industries-data/electric/power-sales-and-markets/increasing-efficiency-through-improved-software-1). For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
| Name | Buses | Generators | Lines | Contingencies | References |
| ----------------------------- | ----- | ---------- | ----- | ------------- | -------------------- |
| `pglib-uc/ferc/2015-01-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-01-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-02-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-02-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-03-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-03-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-04-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-04-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-05-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-05-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-06-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-06-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-07-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-07-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-08-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-08-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-09-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-09-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-10-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-10-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-11-02_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-11-02_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-12-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
| `pglib-uc/ferc/2015-12-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] |
### PGLIB-UC/RTS-GMLC
[RTS-GMLC](https://github.com/GridMod/RTS-GMLC) is an updated version of the RTS-96 test system produced by the United States Department of Energy's [Grid Modernization Laboratory Consortium](https://gmlc.doe.gov/). The PGLIB-UC/RTS-GMLC instances are modified versions of the original RTS-GMLC instances, with modified ramp-rates and without a transmission network. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
| Name | Buses | Generators | Lines | Contingencies | References |
| ------------------------------ | ----- | ---------- | ----- | ------------- | ---------- |
| `pglib-uc/rts_gmlc/2020-01-27` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-02-09` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-03-05` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-04-03` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-05-05` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-06-09` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-07-06` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-08-12` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-09-20` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-10-27` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-11-25` | 1 | 154 | 0 | 0 | [BaBlEh19] |
| `pglib-uc/rts_gmlc/2020-12-23` | 1 | 154 | 0 | 0 | [BaBlEh19] |
## OR-LIB/UC
[OR-LIB](http://people.brunel.ac.uk/~mastjjb/jeb/info.html) is a collection of test data sets for a variety of operations research problems, including unit commitment. The UC instances in OR-LIB are synthetic instances generated by a [random problem generator](http://groups.di.unipi.it/optimize/Data/UC.html) developed by the [Operations Research Group at University of Pisa](http://groups.di.unipi.it/optimize/). These test cases have been used in [FrGe06] and many other publications.
| Name | Hours | Buses | Generators | Lines | Contingencies | References |
| ------------------- | ----- | ----- | ---------- | ----- | ------------- | --------------- |
| `or-lib/10_0_1_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/10_0_2_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/10_0_3_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/10_0_4_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/10_0_5_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/20_0_1_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/20_0_2_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/20_0_3_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/20_0_4_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/20_0_5_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/50_0_1_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/50_0_2_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/50_0_3_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/50_0_4_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/50_0_5_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/75_0_1_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/75_0_2_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/75_0_3_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/75_0_4_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/75_0_5_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/100_0_1_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/100_0_2_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/100_0_3_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/100_0_4_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/100_0_5_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/150_0_1_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/150_0_2_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/150_0_3_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/150_0_4_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/150_0_5_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_10_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_11_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_12_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_1_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_2_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_3_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_4_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_5_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_6_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_7_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_8_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
| `or-lib/200_0_9_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] |
## Tejada19
Test cases used in [TeLuSa19]. These instances are similar to OR-LIB/UC, in the sense that they use the same random problem generator, but are much larger.
| Name | Hours | Buses | Generators | Lines | Contingencies | References |
| ----------------------- | ----- | ----- | ---------- | ----- | ------------- | ---------- |
| `tejada19/UC_24h_214g` | 24 | 1 | 214 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_250g` | 24 | 1 | 250 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_290g` | 24 | 1 | 290 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_480g` | 24 | 1 | 480 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_505g` | 24 | 1 | 505 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_623g` | 24 | 1 | 623 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_647g` | 24 | 1 | 647 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_836g` | 24 | 1 | 836 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_850g` | 24 | 1 | 850 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_918g` | 24 | 1 | 918 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_931g` | 24 | 1 | 931 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_940g` | 24 | 1 | 940 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_957g` | 24 | 1 | 957 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_959g` | 24 | 1 | 959 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1069g` | 24 | 1 | 1069 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1130g` | 24 | 1 | 1130 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1376g` | 24 | 1 | 1376 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1393g` | 24 | 1 | 1393 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1577g` | 24 | 1 | 1577 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1615g` | 24 | 1 | 1615 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1632g` | 24 | 1 | 1632 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1768g` | 24 | 1 | 1768 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1804g` | 24 | 1 | 1804 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1820g` | 24 | 1 | 1820 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1823g` | 24 | 1 | 1823 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_24h_1888g` | 24 | 1 | 1888 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_36g` | 168 | 1 | 36 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_38g` | 168 | 1 | 38 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_40g` | 168 | 1 | 40 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_53g` | 168 | 1 | 53 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_58g` | 168 | 1 | 58 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_59g` | 168 | 1 | 59 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_72g` | 168 | 1 | 72 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_84g` | 168 | 1 | 84 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_86g` | 168 | 1 | 86 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_88g` | 168 | 1 | 88 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_93g` | 168 | 1 | 93 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_105g` | 168 | 1 | 105 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_110g` | 168 | 1 | 110 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_125g` | 168 | 1 | 125 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_130g` | 168 | 1 | 130 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_131g` | 168 | 1 | 131 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_140g` | 168 | 1 | 140 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_165g` | 168 | 1 | 165 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_175g` | 168 | 1 | 175 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_179g` | 168 | 1 | 179 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_188g` | 168 | 1 | 188 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_192g` | 168 | 1 | 192 | 0 | 0 | [TeLuSa19] |
| `tejada19/UC_168h_199g` | 168 | 1 | 199 | 0 | 0 | [TeLuSa19] |
## References
- [UCJL] **Alinson S. Xavier, Aleksandr M. Kazachkov, Ogün Yurdakul, Feng Qiu.** "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment (Version 0.3)". Zenodo (2022). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874)
- [KnOsWa20] **Bernard Knueven, James Ostrowski and Jean-Paul Watson.** "On Mixed-Integer Programming Formulations for the Unit Commitment Problem". INFORMS Journal on Computing (2020). [DOI: 10.1287/ijoc.2019.0944](https://doi.org/10.1287/ijoc.2019.0944)
- [KrHiOn12] **Eric Krall, Michael Higgins and Richard P. ONeill.** "RTO unit commitment test system." Federal Energy Regulatory Commission. Available at: <https://www.ferc.gov/industries-data/electric/power-sales-and-markets/increasing-efficiency-through-improved-software-1> (Accessed: Nov 14, 2020)
- [BaBlEh19] **Clayton Barrows, Aaron Bloom, Ali Ehlen, Jussi Ikaheimo, Jennie Jorgenson, Dheepak Krishnamurthy, Jessica Lau et al.** "The IEEE Reliability Test System: A Proposed 2019 Update." IEEE Transactions on Power Systems (2019). [DOI: 10.1109/TPWRS.2019.2925557](https://doi.org/10.1109/TPWRS.2019.2925557)
- [JoFlMa16] **C. Josz, S. Fliscounakis, J. Maeght, and P. Panciatici.** "AC Power Flow Data in MATPOWER and QCQP Format: iTesla, RTE Snapshots, and PEGASE". [ArXiv (2016)](https://arxiv.org/abs/1603.01533).
- [FlPaCa13] **S. Fliscounakis, P. Panciatici, F. Capitanescu, and L. Wehenkel.** "Contingency ranking with respect to overloads in very large power systems taking into account uncertainty, preventive and corrective actions", Power Systems, IEEE Trans. on, (28)4:4909-4917, 2013. [DOI: 10.1109/TPWRS.2013.2251015](https://doi.org/10.1109/TPWRS.2013.2251015)
- [MTPWR] **D. Zimmerman, C. E. Murillo-Sandnchez and R. J. Thomas.** "Matpower: Steady-state operations, planning, and analysis tools forpower systems research and education", IEEE Transactions on PowerSystems, vol. 26, no. 1, pp. 12 19, Feb. 2011. [DOI: 10.1109/TPWRS.2010.2051168](https://doi.org/10.1109/TPWRS.2010.2051168)
- [PSTCA] **University of Washington, Dept. of Electrical Engineering.** "Power Systems Test Case Archive". Available at: <http://www.ee.washington.edu/research/pstca/> (Accessed: Nov 14, 2020)
- [ORLIB] **J.E.Beasley.** "OR-Library: distributing test problems by electronic mail", Journal of the Operational Research Society 41(11) (1990). [DOI: 10.2307/2582903](https://doi.org/10.2307/2582903)
- [FrGe06] **A. Frangioni, C. Gentile.** "Solving nonlinear single-unit commitment problems with ramping constraints" Operations Research 54(4), p. 767 - 775, 2006. [DOI: 10.1287/opre.1060.0309](https://doi.org/10.1287/opre.1060.0309)
- [TeLuSa19] **D. A. Tejada-Arango, S. Lumbreras, P. Sanchez-Martin and A. Ramos.** "Which Unit-Commitment Formulation is Best? A Systematic Comparison," in IEEE Transactions on Power Systems. [DOI: 10.1109/TPWRS.2019.2962024](https://ieeexplore.ieee.org/document/8941313/).

View File

@@ -1,78 +0,0 @@
JuMP Model
==========
In this page, we describe the JuMP optimization model produced by the function `build_model`. A detailed understanding of this model is not necessary if you are just interested in using the package to solve some standard unit commitment cases, but it may be useful, for example, if you need to solve a slightly different problem, with additional variables and constraints. The notation in this page generally follows [KnOsWa20].
Decision variables
------------------
UC.jl models the security-constrained unit commitment problem as a two-stage stochastic program. In this approach, some of the decision variables are *first-stage decisions*, which are taken before the uncertainty is realized and must therefore be the same across all scenarios, while the remaining variables are *second-stage decisions*, which can attain a different values in each scenario. In the current version of the package, all binary variables (which model commitment decisions of thermal units) are first-stage decisions and all continuous variables are second-stage decisions.
!!! note
UC.jl treats deterministic SCUC instances as a special case of the stochastic problem in which there is only one scenario, named `"s1"` by default. To access second-stage decisions, therefore, you must provide this scenario name as the value for `sn`. For example, `model[:prod_above]["s1", g, t]`.
### Generators
In this section, we describe the decision variables associated with the generators, which include both thermal units (e.g., natural gas-fired power plant) and profiled units (e.g., wind turbine).
#### Thermal Units
Name | Description | Unit | Stage
:-----|:-------------|:------: | :------:
`is_on[g,t]` | True if generator `g` is on at time `t`. | Binary | 1
`switch_on[g,t]` | True is generator `g` switches on at time `t`. | Binary| 1
`switch_off[g,t]` | True if generator `g` switches off at time `t`. | Binary| 1
`startup[g,t,s]` | True if generator `g` switches on at time `t` incurring start-up costs from start-up category `s`. | Binary| 1
`prod_above[sn,g,t]` | Amount of power produced by generator `g` above its minimum power output at time `t` in scenario `sn`. For example, if the minimum power of generator `g` is 100 MW and `g` is producing 115 MW of power at time `t` in scenario `sn`, then `prod_above[sn,g,t]` equals `15.0`. | MW | 2
`segprod[sn,g,t,k]` | Amount of power from piecewise linear segment `k` produced by generator `g` at time `t` in scenario `sn`. For example, if cost curve for generator `g` is defined by the points `(100, 1400)`, `(110, 1600)`, `(130, 2200)` and `(135, 2400)`, and if the generator is producing 115 MW of power at time `t` in scenario `sn`, then `segprod[sn,g,t,:]` equals `[10.0, 5.0, 0.0]`.| MW | 2
`reserve[sn,r,g,t]` | Amount of reserve `r` provided by unit `g` at time `t` in scenario `sn`. | MW | 2
!!! warning
The first-stage decision variables of the JuMP model are `is_on[g,t]`, `switch_on[g,t]`, `switch_off[g,t]`, and `startup[g,t,s]`. As such, the dictionaries corresponding to these variables do not include the scenario index in their keys. In contrast, all other variables of the created JuMP model are allowed to obtain a different value in each scenario and are thus modeled as second-stage decision variables. Accordingly, the dictionaries of all second-stage decision variables have the scenario index in their keys. This is true even if the model is created to solve the deterministic SCUC, in which case the default scenario index `s1` is included in the dictionary key.
#### Profiled Units
Name | Description | Unit | Stage
:-----|:-------------|:------: | :------:
`prod_profiled[s,t]` | Amount of power produced by profiled unit `g` at time `t`. | MW | 2
### Buses
Name | Description | Unit | Stage
:-----|:-------------|:------:| :------:
`net_injection[sn,b,t]` | Net injection at bus `b` at time `t` in scenario `sn`. | MW | 2
`curtail[sn,b,t]` | Amount of load curtailed at bus `b` at time `t` in scenario `sn`. | MW | 2
### Price-sensitive loads
Name | Description | Unit | Stage
:-----|:-------------|:------:| :------:
`loads[sn,s,t]` | Amount of power served to price-sensitive load `s` at time `t` in scenario `sn`. | MW | 2
### Transmission lines
Name | Description | Unit | Stage
:-----|:-------------|:------:| :------:
`flow[sn,l,t]` | Power flow on line `l` at time `t` in scenario `sn`. | MW | 2
`overflow[sn,l,t]` | Amount of flow above the limit for line `l` at time `t` in scenario `sn`. | MW | 2
!!! warning
Since transmission and N-1 security constraints are enforced in a lazy way, most of the `flow[l,t]` variables are never added to the model. Accessing `model[:flow][sn,l,t]` without first checking that the variable exists will likely generate an error.
Objective function
------------------
TODO
Constraints
-----------
TODO

View File

@@ -1,618 +0,0 @@
# Problem definition
The **Security-Constrained Unit Commitment Problem** (SCUC) is formulated in
UC.jl as a two-stage stochastic mixed-integer linear optimization problem that
aims to find the minimum-cost schedule for electricity generation while
satisfying various physical, operational and economic constraints. In its most
basic form, the problem is composed by:
- A set of generators, which produce power, at a given cost;
- A set of loads, which consume power;
- A transmission network, which delivers power from generators to the loads.
In addition to the basic components above, SCUC also include a wide variety of
additional components, such as _energy storage devices_, _reserves_ and _network
interfaces_, to name a few. On this page, we present a complete definition of
the problem, as modeled in UC.jl. Please note that different sources in the
literature may have significantly different definitions and assumptions.
!!! note
UC.jl treats deterministic SCUC instances as a special case of the stochastic problem in which there is only one scenario, named `"s1"` by default. To access second-stage decisions, therefore, you must provide this scenario name as the value for `s`. For example, `model[:prod_above]["s1", g, t]`.
!!! warning
The problem definition presented in this page is mathematically equivalent to the one solved by UC.jl. However, some constraints (ramping, piecewise-linear costs and start-up costs) have been simplified in this page for clarity. The set of constraints actually enforced by UC.jl better describes the convex hull of the problem and leads to better computational performance, but it is much more complex to describe. For further details, we refer to the package's source code and associated references.
## 1. General modeling assumptions
- **Time discretization:** SCUC is a multi-period problem, with decisions
typically covering a 24-hour or 36-hour time window. UC.jl assumes that this
time window is discretized into time steps of fixed length. The number of time
steps, as well as the duration of each time step, are configurable. In the
equations below, the set of time steps is denoted by $T=\{1,2,\ldots,|T|\}$.
- **Decision under uncertainty:** SCUC is a two-stage stochastic problem. In the
first stage, we must decide the _commitment status_ of all thermal generators.
In the second stage, we determine the remaining decision variables, such power
output of all generators, the operation of energy storage devices and load
shedding. Stochasticity is modeled through a discrete number of scenarios
$s \in S$, each with given probability $p(S)$. The goal is to minimize the
minimum expected cost.
## 2. Thermal generators
A _thermal generator_ is a power generation unit that converts thermal energy,
typically from the combustion of coal, natural gas or oil, into electrical
energy. Scheduling thermal generators is particularly complex due to their
operational characteristics, including minimum up and down times, ramping rates,
and start-up and shutdown limits.
### Important concepts
- **Commitment, power output and startup costs:** Thermal generators can either
be online (on) or offline (off). When a thermal generator is on, it can
produce between a minimum and a maximum amount of power; when it is off, it
cannot produce any power. Switching a generator on incurs a startup cost,
which depends on how long the unit has been offline. More precisely, each
thermal generator $g$ has a number $K^{start}_g$ of startup categories (e.g.,
cold, warm and hot). Each category $k$ has a corresponding startup cost
$Z^{\text{start}}_{gk}$, and is available only if the unit has spent at most
$M^{\text{delay}}_{gk}$ time steps offline.
- **Piecewise-linear production cost curve:** Besides startup costs, thermal
generators also incur production costs based on their power output. The
relationship between production cost and power output is not a linear, but a
convex curve, which is simplified using a piecewise-linear approximation. For
this purpose, each thermal generator $g$ has a number $K^{\text{cost}}_g$ of
piecewise-linear segments and its power output $y^{\text{prod-above}}_{gts}$
are broken down into
$\sum_{k=1}^{K^{\text{cost}}_g} y^{\text{seg-prod}}_{gtks}$, so that
production costs can be more easily calculated.
- **Ramping, minimum up/down:** Due to physical and operational limits, such as
thermal inertia and mechanical stress, thermal generators cannot vary their
power output too dramatically from one time period to the next. Similarly,
thermal generators cannot switch on and off too frequently; after switching on
or off, units must remain at that state for a minimum specified number of time
steps.
- **Startup and shutdown limit:** A thermal generator cannot shut off if its
output power level in the immediately preceding time step is very high (above
a specified value); the unit must first ramp down, over potentially multiple
time steps, and only then shut off. Similarly, the unit cannot produce a very
large amount of power (above a specified limit) immediately after starting up;
it must ramp up over potentially multiple time steps.
- **Initial status:** The optimization process finds optimal commitment status
and power output level for all thermal generators starting at time period 1.
Many constraints, however, require knowledge of previous time periods (0, -1,
-2, ...) which are not part of the optimization model. For this reason, part
of the input data is the initial power output $M^{\text{init-power}}_{g}$ of
unit $g$ (that is, the output at time 0) and the initial status
$M^{\text{init-status}}_{g}$ of unit g (how many time steps has it been
online/offline at time time 0). If $M^{\text{init-status}}_{g}$ is positive,
its magnitude indicates how many time periods has the unit been online; and if
negative, how has it been offline.
- **Must-run:** Due to various factors, including reliability considerations,
some units must remain operational regardless of whether it is economical for
them to do so. Must-run constraints are used to enforce such requirements.
### Sets and constants
| Symbol | Unit | Description |
| :------------------------------ | :----- | :----------------------------------------------------------------------------------------- |
| $K^{cost}_g$ | | Number of piecewise linear segments in the production cost curve. |
| $K^{start}_g$ | | Number of startup categories (e.g. cold, warm, hot). |
| $M^{\text{delay}}_{gk}$ | | Delay for startup category $k$. |
| $M^{\text{init-power}}_{g}$ | MW | Initial power output of unit $g$. |
| $M^{\text{init-status}}_{g}$ | | Initial status of unit $g$. |
| $M^{\text{min-up}}_{g}$ | | Minimum amount of time $g$ must stay on after switching on. |
| $M^{\text{must-run}}_{gt}$ | Binary | One if unit $g$ must be on at time $t$. |
| $M^{\text{pmax}}_{gt}$ | MW | Maximum power output at time $t$. |
| $M^{\text{pmin}}_{gt}$ | MW | Minimum power output at time $t$. |
| $M^{\text{ramp-down}}_{g}$ | MW | Ramp down limit. |
| $M^{\text{ramp-up}}_{g}$ | MW | Ramp up limit. |
| $M^{\text{seg-pmax}}_{gtks}$ | MW | Maximum power output for piecewise-linear segment $k$ at time $t$ and scenario $s$. |
| $M^{\text{shutdown-limit}}_{g}$ | MW | Maximum power unit $g$ produces immediately before shutting down |
| $M^{\text{startup-limit}}_{g}$ | MW | Maximum power unit $g$ produces immediately after starting up |
| $R_g$ | | Set of spinning reserves that may be served by $g$. |
| $Z^{\text{pmin}}_{gt}$ | \$ | Cost to keep $g$ operational at time $t$ generating at minimum power. |
| $Z^{\text{pvar}}_{gtks}$ | \$/MW | Cost for unit $g$ to produce 1 MW of power under piecewise-linear segment $k$ at time $t$. |
| $Z^{\text{start}}_{gk}$ | \$ | Cost to start unit $g$ at startup category $k$. |
| $G^\text{therm}$ | | Set of thermal generators. |
### Decision variables
| Symbol | JuMP name | Description | Unit | Stage |
| :---------------------------- | :------------------ | :-------------------------------------------------------------------------------------------- | :----- | :---- |
| $x^{\text{is-on}}_{gt}$ | `is_on[g,t]` | One if generator $g$ is on at time $t$. | Binary | 1 |
| $x^{\text{switch-on}}_{gt}$ | `switch_on[g,t]` | One if generator $g$ switches on at time $t$. | Binary | 1 |
| $x^{\text{switch-off}}_{gt}$ | `switch_off[g,t]` | One if generator $g$ switches off at time $t$. | Binary | 1 |
| $x^{\text{start}}_{gtk}$ | `startup[g,t,s]` | One if generator $g$ starts up at time $t$ under startup category $k$. | Binary | 1 |
| $y^{\text{prod-above}}_{gts}$ | `prod_above[s,g,t]` | Amount of power produced by $g$ at time $t$ in scenario $s$ above the minimum power. | MW | 2 |
| $y^{\text{seg-prod}}_{gtks}$ | `segprod[s,g,t,k]` | Amount of power produced by $g$ at time $t$ in piecewise-linear segment $k$ and scenario $s$. | MW | 2 |
| $y^{\text{res}}_{grts}$ | `reserve[s,r,g,t]` | Amount of spinning reserve $r$ supplied by $g$ at time $t$ in scenario $s$. | MW | 2 |
### Objective function terms
- Production costs:
```math
\sum_{g \in G^\text{therm}} \sum_{t \in T} x^{\text{is-on}}_{gt} Z^{\text{pmin}}_{gt}
+ \sum_{s \in S} p(s) \left[
\sum_{g \in G^\text{therm}} \sum_{t \in T} \sum_{k=1}^{K^{cost}_g}
y^{\text{seg-prod}}_{gtks} Z^{\text{pvar}}_{gtks}
\right]
```
- Start-up costs:
```math
\sum_{g \in G} \sum_{t \in T} \sum_{k=1}^{K^{start}_g} x^{\text{start}}_{gtk} Z^{\text{start}}_{gk}
```
### Constraints
- Some units must remain on, even if it is not economical for them to do so:
```math
x^{\text{is-on}}_{gt} \geq M^{\text{must-run}}_{gt}
```
- After switching on, unit must remain on for some amount of time
(`eq_min_uptime[g,t]`):
```math
\sum_{i=max(1,t-M^{\text{min-up}}_{g}+1)}^t x^{\text{switch-on}}_{gi} \leq x^{\text{is-on}}_{gt}
```
- Same as above, but covering the initial time steps (`eq_min_uptime[g,0]`):
```math
\sum_{i=1}^{min(T,M^{\text{min-up}}_{g}-M^{\text{init-status}}_{g})} x^{\text{switch-off}}_{gi} = 0 \; \text{ if } \; M^{\text{init-status}}_{g} > 0
```
- After switching off, unit must remain offline for some amount of time
(`eq_min_downtime[g,t]`):
```math
\sum_{i=max(1,t-M^{\text{min-down}}_{g}+1)}^t x^{\text{switch-off}}_{gi} \leq 1 - x^{\text{is-on}}_{gt}
```
- Same as above, but covering the initial time steps (`eq_min_downtime[g,0]`):
```math
\sum_{i=1}^{min(T,M^{\text{min-down}}_{g}+M^{\text{init-status}}_{g})} x^{\text{switch-on}}_{gi} = 0 \; \text{ if } \; M^{\text{init-status}}_{g} < 0
```
- If the unit switches on, it must choose exactly one startup category
(`eq_startup_choose[g,t]`):
```math
x^{\text{switch-on}}_{gt} = \sum_{k=1}^{K^{start}_g} x^{\text{start}}_{gtk}
```
- If unit has not switched off in the last "delay" time periods, then startup
category is forbidden (`eq_startup_restrict[g,t,s]`). The last startup
category is always allowed. In the equation below, $L^{\text{start}}_{gtk}=1$
if category should be allowed based on initial status.
```math
x^{\text{start}}_{gtk} \leq L^{\text{start}}_{gtk} + \sum_{i=min\left(1,t - M^{\text{delay}}_{g,k+1} + 1\right)}^{t - M^{\text{delay}}_{kg}} x^{\text{switch-off}}_{gi}
```
- Link the binary variables together (`eq_binary_link[g,t]`):
```math
\begin{align*}
& x^{\text{is-on}}_{gt} - x^{\text{is-on}}_{g,t-1} = x^{\text{switch-on}}_{gt} - x^{\text{switch-off}}_{gt} & \forall t > 1 \\
\end{align*}
```
- Cannot switch on and off at the same time (`eq_switch_on_off[g,t]`):
```math
x^{\text{switch-on}}_{gt} + x^{\text{switch-off}}_{gt} \leq 1
```
- If the unit is off, it cannot produce power or provide reserves. If it is on,
it must to so within the specified production limits (`eq_prod_limit[s,g,t]`):
```math
y^{\text{prod-above}}_{gts} + \sum_{r \in R_g} y^{\text{res}}_{grts} \leq
(M^{\text{pmax}}_{gt} - M^{\text{pmin}}_{gt}) x^{\text{is-on}}_{gt}
```
- Break down the "production above" variable into smaller "segment production"
variables, to simplify the objective function (`eq_prod_above_def[s,g,t]`):
```math
y^{\text{prod-above}}_{gts} = \sum_{k=1}^{K^{cost}_g} y^{\text{seg-prod}}_{gtks}
```
- Impose upper limit on segment production variables
(`eq_segprod_limit[s,g,t,k]`):
```math
0 \leq y^{\text{seg-prod}}_{gtks} \leq M^{\text{seg-pmax}}_{gtks}
```
- Unit cannot increase its production too quickly (`eq_ramp_up[s,g,t]`):
```math
y^{\text{prod-above}}_{gts} + \sum_{r \in R_g} y^{\text{res}}_{grts} \leq
y^{\text{prod-above}}_{g,t-1,s} + M^{\text{ramp-up}}_{g}
```
- Same as above, for initial time (`eq_ramp_up[s,g,1]`):
```math
y^{\text{prod-above}}_{g,1,s} + \sum_{r \in R_g} y^{\text{res}}_{gr,1,s} \leq
\left(M^{\text{init-power}}_{g} - M^{\text{pmin}}_{gt}\right) + M^{\text{ramp-up}}_{g}
```
- Unit cannot decrease its production too quickly (`eq_ramp_down[s,g,t]`):
```math
y^{\text{prod-above}}_{gts} \geq
y^{\text{prod-above}}_{g,t-1,s} - M^{\text{ramp-down}}_{g}
```
- Same as above, for initial time (`eq_ramp_down[s,g,1]`):
```math
y^{\text{prod-above}}_{g,1,s} \geq
\left(M^{\text{init-power}}_{g} - M^{\text{pmin}}_{gt}\right) - M^{\text{ramp-down}}_{g}
```
- Unit cannot produce excessive amount of power immediately after starting up
(`eq_startup_limit[s,g,t]`):
```math
y^{\text{prod-above}}_{gts} + \sum_{r \in R_g} y^{\text{res}}_{grts} \leq
(M^{\text{pmax}}_{gt} - M^{\text{pmin}}_{gt}) x^{\text{is-on}}_{gt} -
max\left\{0,M^{\text{pmax}}_{gt} - M^{\text{startup-limit}}_{g}\right\}
x^{\text{switch-on}}_{gt}
```
- Unit cannot shutoff it it's producing too much power
(`eq_shutdown_limit[s,g,t]`):
```math
y^{\text{prod-above}}_{gts} \leq
(M^{\text{pmax}}_{gt} - M^{\text{pmin}}_{gt}) x^{\text{is-on}}_{gt} -
max\left\{0,M^{\text{pmax}}_{gt} - M^{\text{shutdown-limit}}_{g}\right\}
x^{\text{switch-off}}_{g,t+1}
```
## 3. Profiled generators
A _profiled generator_ is a simplified generator model that can be used to
represent renewable energy resources, including wind, solar and hydro. Unlike
thermal generators, which can be either on or off, profiled generators do not
have status variables; the only optimization decision is on their power output
level, which must remain between minimum and maximum time-varying amounts.
Production cost curves for profiled generators are linear, making them again
much simpler than thermal units.
### Constants
| Symbol | Unit | Description |
| :---------------------- | :---- | :------------------------------------------------- |
| $M^{\text{pmax}}_{sgt}$ | MW | Maximum power output at time $t$ and scenario $s$. |
| $M^{\text{pmin}}_{sgt}$ | MW | Minimum power output at time $t$ and scenario $s$. |
| $Z^{\text{pvar}}_{sgt}$ | \$/MW | Generation cost at time $t$ and scenario $s$. |
### Decision variables
| Symbol | JuMP name | Unit | Description | Stage |
| :-------------------- | :--------------------- | :--- | :------------------------------------------------------------ | :---- |
| $y^\text{prod}_{sgt}$ | `prod_profiled[s,g,t]` | MW | Amount of power produced by $g$ in time $t$ and scenario $s$. | 2 |
### Objective function terms
- Production cost:
```math
\sum_{s \in S} p(s) \left[
\sum_{t \in T} y^\text{prod}_{sgt} Z^{\text{pvar}}_{sgt}
\right]
```
### Constraints
- Variable bounds:
```math
M^{\text{pmin}}_{sgt} \leq y^\text{prod}_{sgt} \leq M^{\text{pmax}}_{sgt}
```
## 4. Conventional loads
Loads represent the demand for electrical power by consumers and devices
connected to the system. This section describes _conventional_ (or inelastic)
loads, which are not sensitive to changes in electricity prices, and must always
be served. Each bus in the transmission network has exactly one load; multiple
loads in the same bus can be modelled by aggregating them. If there is not
enough production or transmission capacity to serve all loads, some load can be
curtailed, at a penalty.
### Constants
| Symbol | Unit | Description |
| :---------------------- | :---- | :--------------------------------------------------------- |
| $M^\text{load}_{sbt}$ | MW | Conventional load on bus $b$ at time $s$ and scenario $s$. |
| $Z^\text{curtail}_{st}$ | \$/MW | Load curtailment penalty at time $t$ in scenario $s$. |
### Decision variables
| Symbol | JuMP name | Unit | Description | Stage |
| :----------------------- | :--------------- | :--- | :--------------------------------------------------------------- | :---- |
| $y^\text{curtail}_{sbt}$ | `curtail[s,b,t]` | MW | Amount of load curtailed at bus $b$ in time $t$ and scenario $s$ | 2 |
### Objective function terms
- Load curtailment penalty:
```math
\sum_{s \in S} p(s) \left[
\sum_{b \in B} \sum_{t \in T} y^\text{curtail}_{sbt} Z^\text{curtail}_{ts}
\right]
```
### Constraints
- Variable bounds:
```math
0 \leq y^\text{curtail}_{sbt} \leq M^\text{load}_{bts}
```
## 5. Price-sensitive loads
_Price-sensitive loads_ refer to components in the system which may increase or
reduce their power consumption according to energy prices. Unlike conventional
loads, described above, price-sensitive loads are only served if it is
economical to do so. More specifically, there are no constraints forcing these
loads to be served; instead, there is a term in the objective function rewarding
each MW served. Unlike conventional loads, there may be multiple price-sensitive
loads per bus.
!!! note
Some unit commitment models allow price-sensitive loads to have a piecewise-linear convex revenue curves, similar to thermal generators. This can be achieved in UC.jl by adding multiple price-sensitive loads to the bus, one for each piecewise-linear segment.
### Sets and constants
| Symbol | Unit | Description |
| :--------------------------- | :---- | :--------------------------------------------------------------- |
| $M^\text{psl-demand}_{spt}$ | MW | Demand of price-sensitive load $p$ at time $t$ and scenario $s$. |
| $Z^\text{psl-revenue}_{spt}$ | \$/MW | Revenue from serving load $p$ at $t$ in scenario $s$. |
| $\text{PSL}$ | | Set of price-sensitive loads. |
### Decision variables
| Symbol | JuMP name | Unit | Description | Stage |
| :------------------- | :------------- | :--- | :------------------------------------------------ | :---- |
| $y^\text{psl}_{spt}$ | `loads[s,p,t]` | MW | Amount served to $p$ in time $t$ and scenario $s$ | 2 |
### Objective function terms
- Revenue from serving price-sensitive loads:
```math
- \sum_{s \in S} p(s) \left[
\sum_{p \in \text{PSL}} \sum_{t \in T} y^\text{psl}_{spt} Z^\text{psl-revenue}_{spt}
\right]
```
### Constraints
- Variable bounds:
```math
0 \leq y^\text{psl}_{spt} \leq M^\text{psl-demand}_{spt}
```
## 6. Energy storage
_Energy storage_ units are able to store energy during periods of low demand,
then release energy back to the grid during periods of high demand. These
devices include _batteries_, _pumped hydroelectric storage_, _compressed air
energy storage_ and _flywheels_. They are becoming increasingly important in the
modern power grid, and can help to enhance grid reliability, efficiency and
integration of renewable energy resources.
### Concepts
- **Min/max energy level and charge rate:** Energy storage units can only store
a limited amount of energy (in MWh). To maintain the operational safety and
longevity of these devices, a minimum energy level may also be imposed. The
rate (in MW) at which these units can charge and discharge is also limited,
due to chemical, physical and operational considerations.
- **Operational costs:** Charging and discharging energy storage units may incur
a cost/revenue. We assume that this cost/revenue is linear on the
charge/discharte rate ($/MW).
- **Efficiency:** Charging an energy storage unit for one hour with an input of
1 MW might not result in an increase of the energy level in the device by
exactly 1 MWh, due to various inneficiencies in the charging process,
including coversion losses and heat generation. For similar reasons,
discharging a storage unit for one hour at 1 MW might reduce the energy level
by more than 1 MWh. Furthermore, even when the unit is not charging or
discharging, some energy level may be gradually lost over time, due to
unwanted chemical reactions, thermal effects of mechanical losses.
- **Myopic effect:** Because the optimization process considers a fixed time
window, there is an inherent bias towards exploiting energy storage units to
their maximum within the window, completely ignoring their operation just
beyond this horizon. For instance, without further constraints, the
optimization algorithm will often ensure that all storage units are fully
discharged at the end of the last time step, which may not be desirable. To
mitigate this myopic effect, a minimum and maximum energy level may be imposed
at the last time step.
- **Simultaneous charging and discharging:** Depending on charge and discharge
costs/revenue, it may make sense mathematically to simultaneously charge and
discharge the storage unit, thus keeping its energy level unchanged while
potentially collecting revenue. Additional binary variables and constraints
are required to prevent this incorrect model behavior.
### Sets and constants
| Symbol | Unit | Description |
| :------------------------------------ | :---- | :---------------------------------------------------------------------------------------------------- |
| $\text{SU}$ | | Set of storage units |
| $Z^\text{charge}_{sut}$ | \$/MW | Linear charge cost/revenue for unit $u$ at time $t$ in scenario $s$. |
| $Z^\text{discharge}_{sut}$ | \$/MW | Linear discharge cost/revenue for unit $u$ at time $t$ in scenario $s$. |
| $M^\text{discharge-max}_{sut}$ | \$/MW | Maximum discharge rate for unit $u$ at time $t$ in scenario $s$. |
| $M^\text{discharge-min}_{sut}$ | \$/MW | Minimum discharge rate for unit $u$ at time $t$ in scenario $s$. |
| $M^\text{charge-max}_{sut}$ | \$/MW | Maximum charge rate for unit $u$ at time $t$ in scenario $s$. |
| $M^\text{charge-min}_{sut}$ | \$/MW | Minimum charge rate for unit $u$ at time $t$ in scenario $s$. |
| $M^\text{max-end-level}_{su}$ | MWh | Maximum storage level of unit $u$ at the last time step in scenario $s$ |
| $M^\text{min-end-level}_{su}$ | MWh | Minimum storage level of unit $u$ at the last time step in scenario $s$ |
| $\gamma^\text{loss}_{s,u,t}$ | | Self-discharge factor. |
| $\gamma^\text{charge-eff}_{s,u,t}$ | | Charging efficiency factor. |
| $\gamma^\text{discharge-eff}_{s,u,t}$ | | Discharging efficiency factor. |
| $\gamma^\text{time-step}$ | | Length of a time step, in hours. Should be 1.0 for hourly time steps, 0.5 for 30-min half steps, etc. |
### Decision variables
| Symbol | JuMP name | Unit | Description | Stage |
| :------------------------------ | :---------------------- | :----- | :----------------------------------------------------------- | :---- |
| $y^\text{level}_{sut}$ | `storage_level[s,u,t]` | MWh | Storage level of unit $u$ at time $t$ in scenario $s$. | 2 |
| $y^\text{charge}_{sut}$ | `charge_rate[s,u,t]` | MW | Charge rate of unit $u$ at time $t$ in scenario $s$. | 2 |
| $y^\text{discharge}_{sut}$ | `discharge_rate[s,u,t]` | MW | Discharge rate of unit $u$ at time $t$ in scenario $s$. | 2 |
| $x^\text{is-charging}_{sut}$ | `is_charging[s,u,t]` | Binary | True if unit $u$ is charging at time $t$ in scenario $s$. | 2 |
| $x^\text{is-discharging}_{sut}$ | `is_discharging[s,u,t]` | Binary | True if unit $u$ is discharging at time $t$ in scenario $s$. | 2 |
### Objective function terms
- Charge and discharge cost/revenue:
```math
\sum_{s \in S} p(s) \left[
\sum_{u \in \text{SU}} \sum_{t \in T} \left(
y^\text{charge}_{sut} Z^\text{charge}_{sut} +
y^\text{discharge}_{sut} Z^\text{discharge}_{sut}
\right)
\right]
```
### Constraints
- Prevent simultaneous charge and discharge
(`eq_simultaneous_charge_and_discharge[s,u,t]`):
```math
x^\text{is-charging}_{sut} + x^\text{is-discharging}_{sut} \leq 1
```
- Limit charge/discharge rate (`eq_min_charge_rate[s,u,t]`,
`eq_max_charge_rate[s,u,t]`, `eq_min_discharge_rate[s,u,t]` and
`eq_max_discharge_rate[s,u,t]`):
```math
\begin{align*}
y^\text{charge}_{sut} \leq x^\text{is-charging}_{sut} M^\text{charge-max}_{sut} \\
y^\text{charge}_{sut} \geq x^\text{is-charging}_{sut} M^\text{charge-min}_{sut} \\
y^\text{discharge}_{sut} \leq x^\text{is-discharging}_{sut} M^\text{discharge-max}_{sut} \\
y^\text{discharge}_{sut} \geq x^\text{is-discharging}_{sut} M^\text{discharge-min}_{sut} \\
\end{align*}
```
- Calculate current storage level (`eq_storage_transition[s,u,t]`):
```math
y^\text{level}_{sut} =
(1 - \gamma^\text{loss}_{s,u,t}) y^\text{level}_{su,t-1} +
\gamma^\text{time-step} \gamma^\text{charge-eff}_{s,u,t} y^\text{charge}_{sut} -
\frac{\gamma^\text{time-step}}{\gamma^\text{discharge-eff}_{s,u,t}} y^\text{charge}_{sut}
```
- Enforce storage level at last time step (`eq_ending_level[s,u]`):
```math
M^\text{min-end-level}_{su} \leq y^\text{level}_{sut} \leq M^\text{max-end-level}_{su}
```
## 7. Buses and transmission lines
So far, we have described generators, which produce power, loads, which consume
power, and storage units, which store energy for later use. Another important
element is the transmission network, which delivers the power produced by the
generators to the loads and storage units. Mathematically, the network is
represented as a graph $(B,L)$ where $B$ is the set of **buses** and $L$ is the
set of **transmission lines**. Each generator, load and storage unit is located
at a bus. The **net injection** at the bus is the sum of all power injected
minus withdrawn at the bus. To balance production and consumption, we must
enforce that the sum of all net injections over the entire network equal to
zero.
Besides the net balance equations, we must also enforce flow limits on the
transmission lines. Unlike flows in other optimization problems, power flows are
directly determined by net injections and transmission line parameters, and must
follow physical laws. UC.jl uses the DC linearization of AC power flow
equations. Under this linearization, the flow $f_l$ in transmission line $l$ is
given by $\sum_{b \in B} \delta_{bl} n_b$, where $\delta_{bl}$ is a constant
known as _injection shift factor_ (also commonly called _power transfer
distribution factor_), computed from the line parameters, and $n_b$ is the net
injection at bus $b$.
!!! warning
To improve computational performance, power flow variables and constraints are generated on-the-fly, during `UnitCommitment.optimize!`; they are **not** added by `UnitCommitment.build_model`.
### Sets and constants
| Symbol | Unit | Description |
| :------------------------ | :---- | :---------------------------------------------------------- |
| $M^\text{limit}_{slt}$ | MW | Flow limit for line $l$ at time $t$ and scenario $s$. |
| $Z^\text{overflow}_{slt}$ | \$/MW | Overflow penalty for line $l$ at time $t$ and scenario $s$. |
| $L$ | | Set of transmission lines. |
| $B$ | | Set of buses. |
### Decision variables
| Symbol | JuMP name | Unit | Description | Stage |
| :------------------------ | :--------------------- | :--- | :-------------------------------------------------------------------- | :---- |
| $y^\text{flow}_{slt}$ | _(added on-the-fly)_ | MW | Flow in line $l$ at time $t$ and scenario $s$. | 2 |
| $y^\text{inj}_{sbt}$ | `net_injection[s,b,t]` | MW | Total net injection at bus $b$, time $t$ and scenario $s$. | 2 |
| $y^\text{overflow}_{slt}$ | `overflow[s,l,t]` | MW | Amount of flow above limit for line $l$ at time $t$ and scenario $s$. | 2 |
### Objective function terms
- Penalty for exceeding line limits:
```math
\sum_{s \in S} p(s) \left[
\sum_{l \in L} \sum_{t \in T} y^\text{overflow}_{slt} Z^\text{overflow}_{slt}
\right]
```
### Constraints
- Power produced equal power consumed (`eq_power_balance[s,t]`):
```math
\sum_{b \in B} \sum_{t \in T} y^\text{inj}_{sbt} = 0
```
- Definition of flow (_enforced on-the-fly_):
```math
y^\text{flow}_{slt} = \sum_{b \in B} \delta_{sbl} y^\text{inj}_{sbt}
```
- Flow limits (_enforced on-the-fly_):
```math
\begin{align*}
y^\text{flow}_{slt} & \leq M^\text{limit}_{slt} + y^\text{overflow}_{slt} \\
-y^\text{flow}_{slt} & \leq M^\text{limit}_{slt} + y^\text{overflow}_{slt}
\end{align*}
```

View File

@@ -1,43 +1,49 @@
# UnitCommitment.jl
**UnitCommitment.jl** (UC.jl) is an optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the electricity markets. Both deterministic and two-stage stochastic versions of the problem are supported. The package provides benchmark instances for the problem, a flexible and well-documented data format for the problem, as well as Julia/JuMP implementations of state-of-the-art mixed-integer programming formulations and solution methods.
**UnitCommitment.jl** (UC.jl) is a Julia/JuMP optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and Julia/JuMP implementations of state-of-the-art mixed-integer programming formulations.
## Package Components
- **Data Format:** The package proposes an extensible and fully-documented JSON-based data specification format for SCUC, developed in collaboration with Independent System Operators (ISOs), which describes the most important aspects of the problem. The format supports all the most common thermal generator characteristics (including ramping, piecewise-linear production cost curves and time-dependent startup costs), as well as profiled generators, reserves, price-sensitive loads, battery storage, transmission, and contingencies.
- **Benchmark Instances:** The package provides a diverse collection of large-scale benchmark instances collected from the literature, converted into a common data format, and extended using data-driven methods to make them more challenging and realistic.
- **Model Implementation**: The package provides a Julia/JuMP implementations of state-of-the-art formulations and solution methods for the deterministic and stochastic SCUC, including multiple ramping formulations ([ArrCon2000](https://doi.org/10.1109/59.871739), [MorLatRam2013](https://doi.org/10.1109/TPWRS.2013.2251373), [DamKucRajAta2016](https://doi.org/10.1007/s10107-015-0919-9), [PanGua2016](https://doi.org/10.1287/opre.2016.1520)), piecewise-linear costs formulations ([Gar1962](https://doi.org/10.1109/AIEEPAS.1962.4501405), [CarArr2006](https://doi.org/10.1109/TPWRS.2006.876672), [KnuOstWat2018](https://doi.org/10.1109/TPWRS.2017.2783850)), contingency screening methods ([XavQiuWanThi2019](https://doi.org/10.1109/TPWRS.2019.2892620)) and decomposition methods. Our goal is to keep these implementations up-to-date as new methods are proposed in the literature.
- **Benchmark Tools:** The package provides automated benchmark scripts to accurately evaluate the performance impact of proposed code changes.
* **Data Format:** The package proposes an extensible and fully-documented JSON-based data specification format for SCUC, developed in collaboration with Independent System Operators (ISOs), which describes the most important aspects of the problem. The format supports all the most common generator characteristics (including ramping, piecewise-linear production cost curves and time-dependent startup costs), as well as operating reserves, price-sensitive loads, transmission networks and contingencies.
* **Benchmark Instances:** The package provides a diverse collection of large-scale benchmark instances collected from the literature, converted into a common data format, and extended using data-driven methods to make them more challenging and realistic.
* **Model Implementation**: The package provides a Julia/JuMP implementations of state-of-the-art formulations and solution methods for SCUC, including multiple ramping formulations ([ArrCon2000](https://doi.org/10.1109/59.871739), [MorLatRam2013](https://doi.org/10.1109/TPWRS.2013.2251373), [DamKucRajAta2016](https://doi.org/10.1007/s10107-015-0919-9), [PanGua2016](https://doi.org/10.1287/opre.2016.1520)), multiple piecewise-linear costs formulations ([Gar1962](https://doi.org/10.1109/AIEEPAS.1962.4501405), [CarArr2006](https://doi.org/10.1109/TPWRS.2006.876672), [KnuOstWat2018](https://doi.org/10.1109/TPWRS.2017.2783850)) and contingency screening methods ([XavQiuWanThi2019](https://doi.org/10.1109/TPWRS.2019.2892620)). Our goal is to keep these implementations up-to-date as new methods are proposed in the literature.
* **Benchmark Tools:** The package provides automated benchmark scripts to accurately evaluate the performance impact of proposed code changes.
## Table of Contents
```@contents
Pages = ["usage.md", "format.md", "instances.md", "model.md", "api.md"]
Depth = 3
```
## Authors
- **Alinson S. Xavier** (Argonne National Laboratory)
- **Aleksandr M. Kazachkov** (University of Florida)
- **Ogün Yurdakul** (Technische Universität Berlin)
- **Jun He** (Purdue University)
- **Feng Qiu** (Argonne National Laboratory)
* **Alinson S. Xavier** (Argonne National Laboratory)
* **Aleksandr M. Kazachkov** (University of Florida)
* **Ogün Yurdakul** (Technische Universität Berlin)
* **Jun He** (Purdue University)
* **Feng Qiu** (Argonne National Laboratory)
## Acknowledgments
- We would like to thank **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package.
* We would like to thank **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package.
- Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357
* Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357
- Based upon work supported by the **U.S. Department of Energy Advanced Grid Modeling Program** under Grant DE-OE0000875.
* Based upon work supported by the **U.S. Department of Energy Advanced Grid Modeling Program** under Grant DE-OE0000875.
## Citing
If you use UnitCommitment.jl in your research (instances, models or algorithms), we kindly request that you cite the package as follows:
- **Alinson S. Xavier, Aleksandr M. Kazachkov, Ogün Yurdakul, Jun He, Feng Qiu**, "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment (Version 0.4)". Zenodo (2024). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874).
* **Alinson S. Xavier, Aleksandr M. Kazachkov, Ogün Yurdakul, Feng Qiu**, "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment (Version 0.3)". Zenodo (2022). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874).
If you use the instances, we additionally request that you cite the original sources, as described in the [instances page](guides/instances.md).
If you use the instances, we additionally request that you cite the original sources, as described in the [instances page](instances.md).
## License
```text
UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment
Copyright © 2020-2024, UChicago Argonne, LLC. All Rights Reserved.
Copyright © 2020-2022, UChicago Argonne, LLC. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:

303
docs/src/instances.md Normal file
View File

@@ -0,0 +1,303 @@
Instances
=========
UnitCommitment.jl provides a large collection of benchmark instances collected from the literature and converted to a [common data format](format.md). In some cases, as indicated below, the original instances have been extended, with realistic parameters, using data-driven methods. If you use these instances in your research, we request that you cite UnitCommitment.jl, as well as the original sources, as listed below. Benchmark instances can be loaded with `UnitCommitment.read_benchmark(name)`, as explained in the [usage section](usage.md). Instance files can also be [directly downloaded from our website](https://axavier.org/UnitCommitment.jl/0.3/instances/).
!!! warning
The instances included in UC.jl are still under development and may change in the future. If you use these instances in your research, for reproducibility, you should specify what version of UC.jl they came from.
MATPOWER
--------
[MATPOWER](https://github.com/MATPOWER/matpower) is an open-source package for solving power flow problems in MATLAB and Octave. It contains a number of power flow test cases, which have been widely used in the power systems literature.
Because most MATPOWER test cases were originally designed for power flow studies, they lack a number of important unit commitment parameters, such as time-varying loads, production cost curves, ramp limits, reserves and initial conditions. The test cases included in UnitCommitment.jl are extended versions of the original MATPOWER test cases, modified as following:
* **Production cost** curves were generated using a data-driven approach, based on publicly available data. More specifically, machine learning models were trained to predict typical production cost curves, for each day of the year, based on a generator's maximum and minimum power output.
* **Load profiles** were generated using a similar data-driven approach.
* **Ramp-up, ramp-down, startup and shutdown rates** were set to a fixed proportion of the generator's maximum output.
* **Minimum reserves** were set to a fixed proportion of the total demand.
* **Contingencies** were set to include all N-1 transmission line contingencies that do not generate islands or isolated buses. More specifically, there is one contingency for each transmission line, as long as that transmission line is not a bridge in the network graph.
For each MATPOWER test case, UC.jl provides 365 variations (`2017-01-01` to `2017-12-31`) corresponding different days of the year.
### MATPOWER/UW-PSTCA
A variety of smaller IEEE test cases, [compiled by University of Washington](http://labs.ece.uw.edu/pstca/), corresponding mostly to small portions of the American Electric Power System in the 1960s.
| Name | Buses | Generators | Lines | Contingencies | References |
|------|-------|------------|-------|---------------|--------|
| `matpower/case14/2017-01-01` | 14 | 5 | 20 | 19 | [MTPWR, PSTCA]
| `matpower/case30/2017-01-01` | 30 | 6 | 41 | 38 | [MTPWR, PSTCA]
| `matpower/case57/2017-01-01` | 57 | 7 | 80 | 79 | [MTPWR, PSTCA]
| `matpower/case118/2017-01-01` | 118 | 54 | 186 | 177 | [MTPWR, PSTCA]
| `matpower/case300/2017-01-01` | 300 | 69 | 411 | 320 | [MTPWR, PSTCA]
### MATPOWER/Polish
Test cases based on the Polish 400, 220 and 110 kV networks, originally provided by **Roman Korab** (Politechnika Śląska) and corrected by the MATPOWER team.
| Name | Buses | Generators | Lines | Contingencies | References |
|------|-------|------------|-------|---------------|--------|
| `matpower/case2383wp/2017-01-01` | 2383 | 323 | 2896 | 2240 | [MTPWR]
| `matpower/case2736sp/2017-01-01` | 2736 | 289 | 3504 | 3159 | [MTPWR]
| `matpower/case2737sop/2017-01-01` | 2737 | 267 | 3506 | 3161 | [MTPWR]
| `matpower/case2746wop/2017-01-01` | 2746 | 443 | 3514 | 3155 | [MTPWR]
| `matpower/case2746wp/2017-01-01` | 2746 | 457 | 3514 | 3156 | [MTPWR]
| `matpower/case3012wp/2017-01-01` | 3012 | 496 | 3572 | 2854 | [MTPWR]
| `matpower/case3120sp/2017-01-01` | 3120 | 483 | 3693 | 2950 | [MTPWR]
| `matpower/case3375wp/2017-01-01` | 3374 | 590 | 4161 | 3245 | [MTPWR]
### MATPOWER/PEGASE
Test cases from the [Pan European Grid Advanced Simulation and State Estimation (PEGASE) project](https://cordis.europa.eu/project/id/211407), describing part of the European high voltage transmission network.
| Name | Buses | Generators | Lines | Contingencies | References |
|------|-------|------------|-------|---------------|--------|
| `matpower/case89pegase/2017-01-01` | 89 | 12 | 210 | 192 | [JoFlMa16, FlPaCa13, MTPWR]
| `matpower/case1354pegase/2017-01-01` | 1354 | 260 | 1991 | 1288 | [JoFlMa16, FlPaCa13, MTPWR]
| `matpower/case2869pegase/2017-01-01` | 2869 | 510 | 4582 | 3579 | [JoFlMa16, FlPaCa13, MTPWR]
| `matpower/case9241pegase/2017-01-01` | 9241 | 1445 | 16049 | 13932 | [JoFlMa16, FlPaCa13, MTPWR]
| `matpower/case13659pegase/2017-01-01` | 13659 | 4092 | 20467 | 13932 | [JoFlMa16, FlPaCa13, MTPWR]
### MATPOWER/RTE
Test cases from the R&D Division at [Reseau de Transport d'Electricite](https://www.rte-france.com) representing the size and complexity of the French very high voltage transmission network.
| Name | Buses | Generators | Lines | Contingencies | References |
|------|-------|------------|-------|---------------|--------|
| `matpower/case1888rte/2017-01-01` | 1888 | 296 | 2531 | 1484 | [MTPWR, JoFlMa16]
| `matpower/case1951rte/2017-01-01` | 1951 | 390 | 2596 | 1497 | [MTPWR, JoFlMa16]
| `matpower/case2848rte/2017-01-01` | 2848 | 544 | 3776 | 2242 | [MTPWR, JoFlMa16]
| `matpower/case2868rte/2017-01-01` | 2868 | 596 | 3808 | 2260 | [MTPWR, JoFlMa16]
| `matpower/case6468rte/2017-01-01` | 6468 | 1262 | 9000 | 6094 | [MTPWR, JoFlMa16]
| `matpower/case6470rte/2017-01-01` | 6470 | 1306 | 9005 | 6085 | [MTPWR, JoFlMa16]
| `matpower/case6495rte/2017-01-01` | 6495 | 1352 | 9019 | 6060 | [MTPWR, JoFlMa16]
| `matpower/case6515rte/2017-01-01` | 6515 | 1368 | 9037 | 6063 | [MTPWR, JoFlMa16]
PGLIB-UC Instances
------------------
[PGLIB-UC](https://github.com/power-grid-lib/pglib-uc) is a benchmark library curated and maintained by the [IEEE PES Task Force on Benchmarks for Validation of Emerging Power System Algorithms](https://power-grid-lib.github.io/). These test cases have been used in [KnOsWa20].
### PGLIB-UC/California
Test cases based on publicly available data from the California ISO. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
| Name | Buses | Generators | Lines | Contingencies | References |
|------|-------|------------|-------|---------------|--------|
| `pglib-uc/ca/2014-09-01_reserves_0` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2014-09-01_reserves_1` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2014-09-01_reserves_3` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2014-09-01_reserves_5` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2014-12-01_reserves_0` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2014-12-01_reserves_1` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2014-12-01_reserves_3` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2014-12-01_reserves_5` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2015-03-01_reserves_0` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2015-03-01_reserves_1` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2015-03-01_reserves_3` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2015-03-01_reserves_5` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2015-06-01_reserves_0` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2015-06-01_reserves_1` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2015-06-01_reserves_3` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/2015-06-01_reserves_5` | 1 | 610 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/Scenario400_reserves_0` | 1 | 611 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/Scenario400_reserves_1` | 1 | 611 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/Scenario400_reserves_3` | 1 | 611 | 0 | 0 | [KnOsWa20]
| `pglib-uc/ca/Scenario400_reserves_5` | 1 | 611 | 0 | 0 | [KnOsWa20]
### PGLIB-UC/FERC
Test cases based on a publicly available [unit commitment test case produced by the Federal Energy Regulatory Commission](https://www.ferc.gov/industries-data/electric/power-sales-and-markets/increasing-efficiency-through-improved-software-1). For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
| Name | Buses | Generators | Lines | Contingencies | References |
|------|-------|------------|-------|---------------|--------|
| `pglib-uc/ferc/2015-01-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-01-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-02-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-02-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-03-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-03-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-04-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-04-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-05-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-05-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-06-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-06-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-07-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-07-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-08-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-08-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-09-01_hw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-09-01_lw` | 1 | 979 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-10-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-10-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-11-02_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-11-02_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-12-01_hw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
| `pglib-uc/ferc/2015-12-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
### PGLIB-UC/RTS-GMLC
[RTS-GMLC](https://github.com/GridMod/RTS-GMLC) is an updated version of the RTS-96 test system produced by the United States Department of Energy's [Grid Modernization Laboratory Consortium](https://gmlc.doe.gov/). The PGLIB-UC/RTS-GMLC instances are modified versions of the original RTS-GMLC instances, with modified ramp-rates and without a transmission network. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
| Name | Buses | Generators | Lines | Contingencies | References |
|------|-------|------------|-------|---------------|--------|
| `pglib-uc/rts_gmlc/2020-01-27` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-02-09` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-03-05` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-04-03` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-05-05` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-06-09` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-07-06` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-08-12` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-09-20` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-10-27` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-11-25` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-12-23` | 1 | 154 | 0 | 0 | [BaBlEh19]
OR-LIB/UC
---------
[OR-LIB](http://people.brunel.ac.uk/~mastjjb/jeb/info.html) is a collection of test data sets for a variety of operations research problems, including unit commitment. The UC instances in OR-LIB are synthetic instances generated by a [random problem generator](http://groups.di.unipi.it/optimize/Data/UC.html) developed by the [Operations Research Group at University of Pisa](http://groups.di.unipi.it/optimize/). These test cases have been used in [FrGe06] and many other publications.
| Name | Hours | Buses | Generators | Lines | Contingencies | References |
|------|-------|-------|------------|-------|---------------|------------|
| `or-lib/10_0_1_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/10_0_2_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/10_0_3_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/10_0_4_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/10_0_5_w` | 24 | 1 | 10 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/20_0_1_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/20_0_2_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/20_0_3_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/20_0_4_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/20_0_5_w` | 24 | 1 | 20 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/50_0_1_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/50_0_2_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/50_0_3_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/50_0_4_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/50_0_5_w` | 24 | 1 | 50 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/75_0_1_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/75_0_2_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/75_0_3_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/75_0_4_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/75_0_5_w` | 24 | 1 | 75 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/100_0_1_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/100_0_2_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/100_0_3_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/100_0_4_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/100_0_5_w` | 24 | 1 | 100 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/150_0_1_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/150_0_2_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/150_0_3_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/150_0_4_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/150_0_5_w` | 24 | 1 | 150 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_10_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_11_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_12_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_1_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_2_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_3_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_4_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_5_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_6_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_7_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_8_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_9_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
Tejada19
--------
Test cases used in [TeLuSa19]. These instances are similar to OR-LIB/UC, in the sense that they use the same random problem generator, but are much larger.
| Name | Hours | Buses | Generators | Lines | Contingencies | References |
|------|-------|-------|------------|-------|---------------|------------|
| `tejada19/UC_24h_214g` | 24 | 1 | 214 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_250g` | 24 | 1 | 250 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_290g` | 24 | 1 | 290 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_480g` | 24 | 1 | 480 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_505g` | 24 | 1 | 505 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_623g` | 24 | 1 | 623 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_647g` | 24 | 1 | 647 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_836g` | 24 | 1 | 836 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_850g` | 24 | 1 | 850 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_918g` | 24 | 1 | 918 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_931g` | 24 | 1 | 931 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_940g` | 24 | 1 | 940 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_957g` | 24 | 1 | 957 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_959g` | 24 | 1 | 959 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1069g` | 24 | 1 | 1069 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1130g` | 24 | 1 | 1130 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1376g` | 24 | 1 | 1376 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1393g` | 24 | 1 | 1393 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1577g` | 24 | 1 | 1577 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1615g` | 24 | 1 | 1615 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1632g` | 24 | 1 | 1632 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1768g` | 24 | 1 | 1768 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1804g` | 24 | 1 | 1804 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1820g` | 24 | 1 | 1820 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1823g` | 24 | 1 | 1823 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_24h_1888g` | 24 | 1 | 1888 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_36g` | 168 | 1 | 36 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_38g` | 168 | 1 | 38 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_40g` | 168 | 1 | 40 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_53g` | 168 | 1 | 53 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_58g` | 168 | 1 | 58 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_59g` | 168 | 1 | 59 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_72g` | 168 | 1 | 72 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_84g` | 168 | 1 | 84 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_86g` | 168 | 1 | 86 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_88g` | 168 | 1 | 88 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_93g` | 168 | 1 | 93 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_105g` | 168 | 1 | 105 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_110g` | 168 | 1 | 110 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_125g` | 168 | 1 | 125 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_130g` | 168 | 1 | 130 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_131g` | 168 | 1 | 131 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_140g` | 168 | 1 | 140 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_165g` | 168 | 1 | 165 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_175g` | 168 | 1 | 175 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_179g` | 168 | 1 | 179 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_188g` | 168 | 1 | 188 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_192g` | 168 | 1 | 192 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_199g` | 168 | 1 | 199 | 0 | 0 | [TeLuSa19]
References
----------
* [UCJL] **Alinson S. Xavier, Aleksandr M. Kazachkov, Ogün Yurdakul, Feng Qiu.** "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment (Version 0.3)". Zenodo (2022). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874)
* [KnOsWa20] **Bernard Knueven, James Ostrowski and Jean-Paul Watson.** "On Mixed-Integer Programming Formulations for the Unit Commitment Problem". INFORMS Journal on Computing (2020). [DOI: 10.1287/ijoc.2019.0944](https://doi.org/10.1287/ijoc.2019.0944)
* [KrHiOn12] **Eric Krall, Michael Higgins and Richard P. ONeill.** "RTO unit commitment test system." Federal Energy Regulatory Commission. Available at: <https://www.ferc.gov/industries-data/electric/power-sales-and-markets/increasing-efficiency-through-improved-software-1> (Accessed: Nov 14, 2020)
* [BaBlEh19] **Clayton Barrows, Aaron Bloom, Ali Ehlen, Jussi Ikaheimo, Jennie Jorgenson, Dheepak Krishnamurthy, Jessica Lau et al.** "The IEEE Reliability Test System: A Proposed 2019 Update." IEEE Transactions on Power Systems (2019). [DOI: 10.1109/TPWRS.2019.2925557](https://doi.org/10.1109/TPWRS.2019.2925557)
* [JoFlMa16] **C. Josz, S. Fliscounakis, J. Maeght, and P. Panciatici.** "AC Power Flow Data in MATPOWER and QCQP Format: iTesla, RTE Snapshots, and PEGASE". [ArXiv (2016)](https://arxiv.org/abs/1603.01533).
* [FlPaCa13] **S. Fliscounakis, P. Panciatici, F. Capitanescu, and L. Wehenkel.** "Contingency ranking with respect to overloads in very large power systems taking into account uncertainty, preventive and corrective actions", Power Systems, IEEE Trans. on, (28)4:4909-4917, 2013. [DOI: 10.1109/TPWRS.2013.2251015](https://doi.org/10.1109/TPWRS.2013.2251015)
* [MTPWR] **D. Zimmerman, C. E. Murillo-Sandnchez and R. J. Thomas.** "Matpower: Steady-state operations, planning, and analysis tools forpower systems research and education", IEEE Transactions on PowerSystems, vol. 26, no. 1, pp. 12 19, Feb. 2011. [DOI: 10.1109/TPWRS.2010.2051168](https://doi.org/10.1109/TPWRS.2010.2051168)
* [PSTCA] **University of Washington, Dept. of Electrical Engineering.** "Power Systems Test Case Archive". Available at: <http://www.ee.washington.edu/research/pstca/> (Accessed: Nov 14, 2020)
* [ORLIB] **J.E.Beasley.** "OR-Library: distributing test problems by electronic mail", Journal of the Operational Research Society 41(11) (1990). [DOI: 10.2307/2582903](https://doi.org/10.2307/2582903)
* [FrGe06] **A. Frangioni, C. Gentile.** "Solving nonlinear single-unit commitment problems with ramping constraints" Operations Research 54(4), p. 767 - 775, 2006. [DOI: 10.1287/opre.1060.0309](https://doi.org/10.1287/opre.1060.0309)
* [TeLuSa19] **D. A. Tejada-Arango, S. Lumbreras, P. Sanchez-Martin and A. Ramos.** "Which Unit-Commitment Formulation is Best? A Systematic Comparison," in IEEE Transactions on Power Systems. [DOI: 10.1109/TPWRS.2019.2962024](https://ieeexplore.ieee.org/document/8941313/).

200
docs/src/model.md Normal file
View File

@@ -0,0 +1,200 @@
JuMP Model
==========
In this page, we describe the JuMP optimization model produced by the function `UnitCommitment.build_model`. A detailed understanding of this model is not necessary if you are just interested in using the package to solve some standard unit commitment cases, but it may be useful, for example, if you need to solve a slightly different problem, with additional variables and constraints. The notation in this page generally follows [KnOsWa20].
Decision variables
------------------
### Generators
#### Thermal Units
Name | Symbol | Description | Unit
:-----|:--------:|:-------------|:------:
`is_on[g,t]` | $u_{g}(t)$ | True if generator `g` is on at time `t`. | Binary
`switch_on[g,t]` | $v_{g}(t)$ | True is generator `g` switches on at time `t`. | Binary
`switch_off[g,t]` | $w_{g}(t)$ | True if generator `g` switches off at time `t`. | Binary
`prod_above[g,t]` |$p'_{g}(t)$ | Amount of power produced by generator `g` above its minimum power output at time `t`. For example, if the minimum power of generator `g` is 100 MW and `g` is producing 115 MW of power at time `t`, then `prod_above[g,t]` equals `15.0`. | MW
`segprod[g,t,k]` | $p^k_g(t)$ | Amount of power from piecewise linear segment `k` produced by generator `g` at time `t`. For example, if cost curve for generator `g` is defined by the points `(100, 1400)`, `(110, 1600)`, `(130, 2200)` and `(135, 2400)`, and if the generator is producing 115 MW of power at time `t`, then `segprod[g,t,:]` equals `[10.0, 5.0, 0.0]`.| MW
`reserve[r,g,t]` | $r_g(t)$ | Amount of reserve `r` provided by unit `g` at time `t`. | MW
`startup[g,t,s]` | $\delta^s_g(t)$ | True if generator `g` switches on at time `t` incurring start-up costs from start-up category `s`. | Binary
#### Profiled Units
Name | Symbol | Description | Unit
:-----|:------:|:-------------|:------:
`prod_profiled[s,t]` | $p^{\dagger}_{g}(t)$ | Amount of power produced by profiled unit `g` at time `t`. | MW
### Buses
Name | Symbol | Description | Unit
:-----|:------:|:-------------|:------:
`net_injection[b,t]` | $n_b(t)$ | Net injection at bus `b` at time `t`. | MW
`curtail[b,t]` | $s^+_b(t)$ | Amount of load curtailed at bus `b` at time `t` | MW
### Price-sensitive loads
Name | Symbol | Description | Unit
:-----|:------:|:-------------|:------:
`loads[s,t]` | $d_{s}(t)$ | Amount of power served to price-sensitive load `s` at time `t`. | MW
### Transmission lines
Name | Symbol | Description | Unit
:-----|:------:|:-------------|:------:
`flow[l,t]` | $f_l(t)$ | Power flow on line `l` at time `t`. | MW
`overflow[l,t]` | $f^+_l(t)$ | Amount of flow above the limit for line `l` at time `t`. | MW
!!! warning
Since transmission and N-1 security constraints are enforced in a lazy way, most of the `flow[l,t]` variables are never added to the model. Accessing `model[:flow][l,t]` without first checking that the variable exists will likely generate an error.
Objective function
------------------
TODO
Constraints
-----------
TODO
Inspecting and modifying the model
----------------------------------
### Accessing decision variables
After building a model using `UnitCommitment.build_model`, it is possible to obtain a reference to the decision variables by calling `model[:varname][index]`. For example, `model[:is_on]["g1",1]` returns a direct reference to the JuMP variable indicating whether generator named "g1" is on at time 1. The script below illustrates how to build a model, solve it and display the solution without using the function `UnitCommitment.solution`.
```julia
using Cbc
using Printf
using JuMP
using UnitCommitment
# Load benchmark instance
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
# Build JuMP model
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# Solve the model
UnitCommitment.optimize!(model)
# Display commitment status
for g in instance.units
for t in 1:instance.time
@printf(
"%-10s %5d %5.1f %5.1f %5.1f\n",
g.name,
t,
value(model[:is_on][g.name, t]),
value(model[:switch_on][g.name, t]),
value(model[:switch_off][g.name, t]),
)
end
end
```
### Fixing variables, modifying objective function and adding constraints
Since we now have a direct reference to the JuMP decision variables, it is possible to fix variables, change the coefficients in the objective function, or even add new constraints to the model before solving it. The script below shows how can this be accomplished. For more information on modifying an existing model, [see the JuMP documentation](https://jump.dev/JuMP.jl/stable/manual/variables/).
```julia
using Cbc
using JuMP
using UnitCommitment
# Load benchmark instance
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
# Construct JuMP model
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# Fix a decision variable to 1.0
JuMP.fix(
model[:is_on]["g1",1],
1.0,
force=true,
)
# Change the objective function
JuMP.set_objective_coefficient(
model,
model[:switch_on]["g2",1],
1000.0,
)
# Create a new constraint
@constraint(
model,
model[:is_on]["g3",1] + model[:is_on]["g4",1] <= 1,
)
# Solve the model
UnitCommitment.optimize!(model)
```
### Adding new component to a bus
The following snippet shows how to add a new grid component to a particular bus. For each time step, we create decision variables for the new grid component, add these variables to the objective function, then attach the component to a particular bus by modifying some existing model constraints.
```julia
using Cbc
using JuMP
using UnitCommitment
# Load instance and build base model
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# Get the number of time steps in the original instance
T = instance.time
# Create decision variables for the new grid component.
# In this example, we assume that the new component can
# inject up to 10 MW of power at each time step, so we
# create new continuous variables 0 ≤ x[t] ≤ 10.
@variable(model, x[1:T], lower_bound=0.0, upper_bound=10.0)
# For each time step
for t in 1:T
# Add production costs to the objective function.
# In this example, we assume a cost of $5/MW.
set_objective_coefficient(model, x[t], 5.0)
# Attach the new component to bus b1, by modifying the
# constraint `eq_net_injection`.
set_normalized_coefficient(
model[:eq_net_injection]["b1", t],
x[t],
1.0,
)
end
# Solve the model
UnitCommitment.optimize!(model)
# Show optimal values for the x variables
@show value.(x)
```
References
----------
* [KnOsWa20] **Bernard Knueven, James Ostrowski and Jean-Paul Watson.** "On Mixed-Integer Programming Formulations for the Unit Commitment Problem". INFORMS Journal on Computing (2020). [DOI: 10.1287/ijoc.2019.0944](https://doi.org/10.1287/ijoc.2019.0944)

View File

@@ -1,122 +0,0 @@
# # Model customization
# In the previous tutorial, we used UnitCommitment.jl to solve benchmark and user-provided instances using a default mathematical formulation for the problem. In this tutorial, we will explore how to customize this formulation.
# !!! warning
# This tutorial is not required for using UnitCommitment.jl, unless you plan to make changes to the problem formulation. In this page, we assume familiarity with the JuMP modeling language. Please see [JuMP's official documentation](https://jump.dev/JuMP.jl/stable/) for resources on getting started with JuMP.
# ## Selecting modeling components
# By default, `UnitCommitment.build_model` uses a formulation that combines modeling components from different publications, and that has been carefully tested, using our own benchmark scripts, to provide good performance across a wide variety of instances. This default formulation is expected to change over time, as new methods are proposed in the literature. You can, however, construct your own formulation, based on the modeling components that you choose, as shown in the next example.
# We start by importing the necessary packages and reading a benchmark instance:
using HiGHS
using JuMP
using UnitCommitment
instance = UnitCommitment.read_benchmark("matpower/case14/2017-01-01");
# Next, instead of calling `UnitCommitment.build_model` with default arguments, we can provide a `UnitCommitment.Formulation` object, which describes what modeling components to use, and how should they be configured. For a complete list of modeling components available in UnitCommitment.jl, see the [API docs](../api.md).
# In the example below, we switch to piecewise-linear cost modeling as defined in [KnuOstWat2018](https://doi.org/10.1109/TPWRS.2017.2783850), as well as ramping and startup costs formulation as defined in [MorLatRam2013](https://doi.org/10.1109/TPWRS.2013.2251373). In addition, we specify custom cutoffs for the shift factors formulation.
model = UnitCommitment.build_model(
instance = instance,
optimizer = HiGHS.Optimizer,
formulation = UnitCommitment.Formulation(
pwl_costs = UnitCommitment.KnuOstWat2018.PwlCosts(),
ramping = UnitCommitment.MorLatRam2013.Ramping(),
startup_costs = UnitCommitment.MorLatRam2013.StartupCosts(),
transmission = UnitCommitment.ShiftFactorsFormulation(
isf_cutoff = 0.008,
lodf_cutoff = 0.003,
),
),
);
# ## Accessing decision variables
# In the previous tutorial, we saw how to access the optimal solution through `UnitCommitment.solution`. While this approach works well for basic usage, it is also possible to get a direct reference to the JuMP decision variables and query their values, as the next example illustrates.
# First, we load a benchmark instance and solve it, as before.
instance = UnitCommitment.read_benchmark("matpower/case14/2017-01-01");
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer);
UnitCommitment.optimize!(model)
# At this point, it is possible to obtain a reference to the decision variables by calling `model[:varname][index]`. For example, `model[:is_on]["g1",1]` returns a direct reference to the JuMP variable indicating whether generator named "g1" is on at time 1. For a complete list of decision variables available, and how are they indexed, see the [problem definition](../guides/problem.md).
@show JuMP.value(model[:is_on]["g1", 1])
# To access second-stage decisions, it is necessary to specify the scenario name. UnitCommitment.jl models deterministic instances as a particular case in which there is a single scenario named "s1", so we need to use this key.
@show JuMP.value(model[:prod_above]["s1", "g1", 1])
# ## Modifying variables and constraints
# When testing variations of the unit commitment problem, it is often necessary to modify the objective function, variables and constraints of the formulation. UnitCommitment.jl makes this process relatively easy. The first step is to construct the standard model using `UnitCommitment.build_model`:
instance = UnitCommitment.read_benchmark("matpower/case14/2017-01-01");
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer);
# Now, before calling `UnitCommitment.optimize`, we can make any desired changes to the formulation. In the previous section, we saw how to obtain a direct reference to the decision variables. It is possible to modify them by using standard JuMP methods. For example, to fix the commitment status of a particular generator, we can use `JuMP.fix`:
JuMP.fix(model[:is_on]["g1", 1], 1.0, force = true)
# To modify the cost coefficient of a particular variable, we can use `JuMP.set_objective_coefficient`:
JuMP.set_objective_coefficient(model, model[:switch_on]["g1", 1], 1000.0)
# It is also possible to make changes to the set of constraints. For example, we can add a custom constraint, using the `JuMP.@constraint` macro:
@constraint(model, model[:is_on]["g3", 1] + model[:is_on]["g4", 1] <= 1,);
# We can also remove an existing model constraint using `JuMP.delete`. See the [problem definition](../guides/problem.md) for a list of constraint names and indices.
JuMP.delete(model, model[:eq_min_uptime]["g1", 1])
# After we are done with all changes, we can call `UnitCommitment.optimize` and extract the optimal solution:
UnitCommitment.optimize!(model)
@show UnitCommitment.solution(model)
# ## Modeling new grid components
# In this section we demonstrate how to add a new grid component to a particular bus in the network. This is useful, for example, when developing formulations for a new type of generator, energy storage, or any other grid device. We start by reading the instance data and buliding a standard model:
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer);
# Next, we create decision variables for the new grid component. In this example, we assume that the new component can inject up to 10 MW of power at each time step, so we create new continuous variables $0 \leq x_t \leq 10$.
T = instance.time
@variable(model, x[1:T], lower_bound = 0.0, upper_bound = 10.0);
# Next, we add the production costs to the objective function. In this example, we assume a generation cost of \$5/MW:
for t in 1:T
set_objective_coefficient(model, x[t], 5.0)
end
# We then attach the new component to bus `b1` by modifying the net injection constraint (`eq_net_injection`):
for t in 1:T
set_normalized_coefficient(
model[:eq_net_injection]["s1", "b1", t],
x[t],
1.0,
)
end
# Next, we solve the model:
UnitCommitment.optimize!(model)
# We then finally extract the optimal value of the $x$ variables:
@show value.(x)

View File

@@ -1,105 +0,0 @@
# Decomposition methods
## 1. Time decomposition for production cost modeling
Solving unit commitment instances that have long time horizons (for example, year-long 8760-hour instances in production cost modeling) requires a substantial amount of computational power. To address this issue, UC.jl offers a time decomposition method, which breaks the instance down into multiple overlapping subproblems, solves them sequentially, then reassembles the solution.
When solving a unit commitment instance with a dense time slot structure, computational complexity can become a significant challenge. For instance, if the instance contains hourly data for an entire year (8760 hours), solving such a model can require a substantial amount of computational power. To address this issue, UC.jl provides a time_decomposition method within the `optimize!` function. This method decomposes the problem into multiple sub-problems, solving them sequentially.
The `optimize!` function takes 5 parameters: a unit commitment instance, a `TimeDecomposition` method, an optimizer, and two optional functions `after_build` and `after_optimize`. It returns a solution dictionary. The `TimeDecomposition` method itself requires four arguments: `time_window`, `time_increment`, `inner_method` (optional), and `formulation` (optional). These arguments define the time window for each sub-problem, the time increment to move to the next sub-problem, the method used to solve each sub-problem, and the formulation employed, respectively. The two functions, namely `after_build` and `after_optimize`, are invoked subsequent to the construction and optimization of each sub-model, respectively. It is imperative that the `after_build` function requires its two arguments to be consistently mapped to `model` and `instance`, while the `after_optimize` function necessitates its three arguments to be consistently mapped to `solution`, `model`, and `instance`.
The code snippet below illustrates an example of solving an instance by decomposing the model into multiple 36-hour sub-problems using the `XavQiuWanThi2019` method. Each sub-problem advances 24 hours at a time. The first sub-problem covers time steps 1 to 36, the second covers time steps 25 to 60, the third covers time steps 49 to 84, and so on. The initial power levels and statuses of the second and subsequent sub-problems are set based on the results of the first 24 hours from each of their immediate prior sub-problems. In essence, this approach addresses the complexity of solving a large problem by tackling it in 24-hour intervals, while incorporating an additional 12-hour buffer to mitigate the closing window effect for each sub-problem. Furthermore, the `after_build` function imposes the restriction that `g3` and `g4` cannot be activated simultaneously during the initial time slot of each sub-problem. On the other hand, the `after_optimize` function is invoked to calculate the conventional Locational Marginal Prices (LMPs) for each sub-problem, and subsequently appends the computed values to the `lmps` vector.
> **Warning**
> Specifying `TimeDecomposition` as the value of the `inner_method` field of another `TimeDecomposition` causes errors when calling the `optimize!` function due to the different argument structures between the two `optimize!` functions.
```julia
using UnitCommitment, JuMP, Cbc, HiGHS
import UnitCommitment:
TimeDecomposition,
ConventionalLMP,
XavQiuWanThi2019,
Formulation
# specifying the after_build and after_optimize functions
function after_build(model, instance)
@constraint(
model,
model[:is_on]["g3", 1] + model[:is_on]["g4", 1] <= 1,
)
end
lmps = []
function after_optimize(solution, model, instance)
lmp = UnitCommitment.compute_lmp(
model,
ConventionalLMP(),
optimizer = HiGHS.Optimizer,
)
return push!(lmps, lmp)
end
# assume the instance is given as a 120h problem
instance = UnitCommitment.read("instance.json")
solution = UnitCommitment.optimize!(
instance,
TimeDecomposition(
time_window = 36, # solve 36h problems
time_increment = 24, # advance by 24h each time
inner_method = XavQiuWanThi2019.Method(),
formulation = Formulation(),
),
optimizer = Cbc.Optimizer,
after_build = after_build,
after_optimize = after_optimize,
)
```
## 2. Scenario decomposition with Progressive Hedging for stochstic UC
By default, UC.jl uses the Extensive Form (EF) when solving stochastic instances. This approach involves constructing a single JuMP model that contains data and decision variables for all scenarios. Although EF has optimality guarantees and performs well with small test cases, it can become computationally intractable for large instances or substantial number of scenarios.
Progressive Hedging (PH) is an alternative (heuristic) solution method provided by UC.jl in which the problem is decomposed into smaller scenario-based subproblems, which are then solved in parallel in separate Julia processes, potentially across multiple machines. Quadratic penalty terms are used to enforce convergence of first-stage decision variables. The method is closely related to the Alternative Direction Method of Multipliers (ADMM) and can handle larger instances, although it is not guaranteed to converge to the optimal solution. Our implementation of PH relies on Message Passing Interface (MPI) for communication. We refer to [MPI.jl Documentation](https://github.com/JuliaParallel/MPI.jl) for more details on installing MPI.
The following example shows how to solve SCUC instances using progressive hedging. The script should be saved in a file, say `ph.jl`, and executed using `mpiexec -n <num-scenarios> julia ph.jl`.
```julia
using HiGHS
using MPI
using UnitCommitment
using Glob
# 1. Initialize MPI
MPI.Init()
# 2. Configure progressive hedging method
ph = UnitCommitment.ProgressiveHedging()
# 3. Read problem instance
instance = UnitCommitment.read(["example/s1.json", "example/s2.json"], ph)
# 4. Build JuMP model
model = UnitCommitment.build_model(
instance = instance,
optimizer = HiGHS.Optimizer,
)
# 5. Run the decentralized optimization algorithm
UnitCommitment.optimize!(model, ph)
# 6. Fetch the solution
solution = UnitCommitment.solution(model, ph)
# 7. Close MPI
MPI.Finalize()
```
When using PH, the model can be customized as usual, with different formulations or additional user-provided constraints. Note that `read`, in this case, takes `ph` as an argument. This allows each Julia process to read only the instance files that are relevant to it. Similarly, the `solution` function gathers the optimal solution of each processes and returns a combined dictionary.
Each process solves a sub-problem with $\frac{s}{p}$ scenarios, where $s$ is the total number of scenarios and $p$ is the number of MPI processes. For instance, if we have 15 scenario files and 5 processes, then each process will solve a JuMP model that contains data for 3 scenarios. If the total number of scenarios is not divisible by the number of processes, then an error will be thrown.
!!! warning
Currently, PH can handle only equiprobable scenarios. Further, `solution(model, ph)` can only handle cases where only one scenario is modeled in each process.

View File

@@ -1,57 +0,0 @@
# # Locational Marginal Prices
# Locational Marginal Prices (LMPs) refer to the cost of supplying electricity at specific locations of the network. LMPs are crucial for the operation of electricity markets and have many other applications, such as indicating what areas of the network may require additional generation or transmission capacity. UnitCommitment.jl implements two methods for calculating LMPS: Conventional LMPs and Approximated Extended LMPs (AELMPs). In this tutorial, we introduce each method and illustrate their usage.
# ### Conventional LMPs
# Conventional LMPs work by (1) solving the original SCUC problem, (2) fixing all binary variables to their optimal values, and (3) re-solving the resulting linear programming model. In this approach, the LMPs are defined as the values of the dual variables associated with the net injection constraints.
# The first step to use this method is to load and optimize an instance, as explained in previous tutorials:
using UnitCommitment
using HiGHS
instance = UnitCommitment.read_benchmark("matpower/case14/2017-01-01")
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer)
UnitCommitment.optimize!(model)
# Next, we call `UnitCommitment.compute_lmp`, as shown below. The function accepts three arguments -- a solved SCUC model, the LMP method, and a linear optimizer -- and it returns a dictionary mapping `(scenario_name, bus_name, time)` to the marginal price.
lmp = UnitCommitment.compute_lmp(
model,
UnitCommitment.ConventionalLMP(),
optimizer = HiGHS.Optimizer,
)
# For example, the following code queries the LMP of bus `b1` in scenario `s1` at time 1:
@show lmp["s1", "b1", 1]
# ### Approximate Extended LMPs
# Approximate Extended LMPs (AELMPs) are an alternative method to calculate locational marginal prices which attemps to minimize uplift payments. The method internally works by modifying the instance data in three ways: (1) it sets the minimum power output of each generator to zero, (2) it averages the start-up cost over the offer blocks for each generator, and (3) it relaxes all integrality constraints. To compute AELMPs, as shown in the example below, we call `compute_lmp` and provide `UnitCommitment.AELMP()` as the second argument.
# This method has two configurable parameters: `allow_offline_participation` and `consider_startup_costs`. If `allow_offline_participation = true`, then offline generators are allowed to participate in the pricing. If instead `allow_offline_participation = false`, offline generators are not allowed and therefore are excluded from the system. A solved UC model is optional if offline participation is allowed, but is required if not allowed. The method forces offline participation to be allowed if the UC model supplied by the user is not solved. For the second field, If `consider_startup_costs = true`, then start-up costs are integrated and averaged over each unit production; otherwise the production costs stay the same. By default, both fields are set to `true`.
# !!! warning
# This method is still under active research, and has several limitations. The implementation provided in the package is based on MISO Phase I only. It only supports fast start resources. More specifically, the minimum up/down time of all generators must be 1, the initial power of all generators must be 0, and the initial status of all generators must be negative. The method does not support time-varying start-up costs, and only currently works for deterministic instances. If offline participation is not allowed, AELMPs treats an asset to be offline if it is never on throughout all time periods.
instance = UnitCommitment.read_benchmark("test/aelmp_simple")
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer)
UnitCommitment.optimize!(model)
lmp = UnitCommitment.compute_lmp(
model,
UnitCommitment.AELMP(
allow_offline_participation = false,
consider_startup_costs = true,
),
optimizer = HiGHS.Optimizer,
)
@show lmp["s1", "B1", 1]

View File

@@ -1,183 +0,0 @@
# # Market Clearing
# In North America, electricity markets are structured around two primary types of markets: the day-ahead (DA) market and the real-time (RT) market. The DA market schedules electricity generation and consumption for the next day, based on forecasts and bids from electricity suppliers and consumers. The RT market, on the other hand, operates continuously throughout the day, addressing the discrepancies between the DA schedule and actual demand, typically every five minutes. UnitCommitment.jl is able to simulate the DA and RT market clearing process. Specifically, the package provides the function `UnitCommitment.solve_market` which performs the following steps:
# 1. Solve the DA market problem.
# 2. Extract commitment status of all generators.
# 3. Solve a sequence of RT market problems, fixing the commitment status of each generator to the corresponding optimal solution of the DA problem.
# To use this function, we need to prepare an instance file corresponding to the DA market problem and multiple instance files corresponding to the RT market problems. The number of required files depends on the time granularity and window. For example, suppose that the DA problem is solved at hourly granularity and has 24 time periods, whereas the RT problems are solved at 5-minute granularity and have a single time period. Then we would need to prepare one files for the DA problem and 288 files $\left(24 \times \frac{60}{5}\right)$ for the RT market problems.
# ## A small example
# For simplicity, in this tutorial we illustate the usage of `UnitCommitment.solve_market` with a very small example, in which the DA problem has only two time periods. We start by creating the DA instance file:
da_contents = """
{
"Parameters": {
"Version": "0.4",
"Time horizon (h)": 2
},
"Buses": {
"b1": {
"Load (MW)": [200, 400]
}
},
"Generators": {
"g1": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 200],
"Production cost curve (\$)": [0, 1000],
"Initial status (h)": -24,
"Initial power (MW)": 0
},
"g2": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 300],
"Production cost curve (\$)": [0, 3000],
"Initial status (h)": -24,
"Initial power (MW)": 0
}
}
}
""";
open("da.json", "w") do file
return write(file, da_contents)
end;
# Next, we create eight single-period RT market problems, each one with a 15-minute time granularity:
for i in 1:8
rt_contents = """
{
"Parameters": {
"Version": "0.4",
"Time horizon (min)": 15,
"Time step (min)": 15
},
"Buses": {
"b1": {
"Load (MW)": [$(150 + 50 * i)]
}
},
"Generators": {
"g1": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 200],
"Production cost curve (\$)": [0, 1000],
"Initial status (h)": -24,
"Initial power (MW)": 0
},
"g2": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 300],
"Production cost curve (\$)": [0, 3000],
"Initial status (h)": -24,
"Initial power (MW)": 0
}
}
}
"""
open("rt_$i.json", "w") do file
return write(file, rt_contents)
end
end
# Finally, we call `UnitCommitment.solve_market`, providing as arguments (1) the path to the DA problem; (2) a list of paths to the RT problems; (3) the mixed-integer linear optimizer.
using UnitCommitment
using HiGHS
solution = UnitCommitment.solve_market(
"da.json",
[
"rt_1.json",
"rt_2.json",
"rt_3.json",
"rt_4.json",
"rt_5.json",
"rt_6.json",
"rt_7.json",
"rt_8.json",
],
optimizer = HiGHS.Optimizer,
)
# To retrieve the day-ahead market solution, we can query `solution["DA"]`:
@show solution["DA"]
# To query each real-time market solution, we can query `solution["RT"][i]`. Note that LMPs are automativally calculated.
@show solution["RT"][1]
# ## Customizing the model and LMPs
# When using the `solve_market` function it is still possible to customize the problem formulation and the LMP calculation method. In the next example, we use a custom formulation and explicitly specify the LMP method through the `settings` keyword argument:
UnitCommitment.solve_market(
"da.json",
[
"rt_1.json",
"rt_2.json",
"rt_3.json",
"rt_4.json",
"rt_5.json",
"rt_6.json",
"rt_7.json",
"rt_8.json",
],
settings = UnitCommitment.MarketSettings(
lmp_method = UnitCommitment.ConventionalLMP(),
formulation = UnitCommitment.Formulation(
pwl_costs = UnitCommitment.KnuOstWat2018.PwlCosts(),
ramping = UnitCommitment.MorLatRam2013.Ramping(),
startup_costs = UnitCommitment.MorLatRam2013.StartupCosts(),
transmission = UnitCommitment.ShiftFactorsFormulation(
isf_cutoff = 0.008,
lodf_cutoff = 0.003,
),
),
),
optimizer = HiGHS.Optimizer,
)
# It is also possible to add custom variables and constraints to either the DA or RT market problems, through the usage of `after_build_da` and `after_build_rt` callback functions. Similarly, the `after_optimize_da` and `after_optimize_rt` can be used to directly analyze the JuMP models, after they have been optimized:
using JuMP
function after_build_da(model, instance)
@constraint(model, model[:is_on]["g1", 1] <= model[:is_on]["g2", 1])
end
function after_optimize_da(solution, model, instance)
@show value(model[:is_on]["g1", 1])
end
UnitCommitment.solve_market(
"da.json",
[
"rt_1.json",
"rt_2.json",
"rt_3.json",
"rt_4.json",
"rt_5.json",
"rt_6.json",
"rt_7.json",
"rt_8.json",
],
after_build_da = after_build_da,
after_optimize_da = after_optimize_da,
optimizer = HiGHS.Optimizer,
)
# ## Additional considerations
# - UC.jl supports two-stage stochastic DA market problems. In this case, we need one file for each DA market scenario. All RT market problems must be deterministic.
# - UC.jl also supports multi-period RT market problems. Assume, for example, that the DA market problem is an hourly problem with 24 time periods, whereas the RT market problem uses 5-minute granularity with 4 time periods. UC.jl assumes that the first RT file covers period `0:00` to `0:20`, the second covers `0:05` to `0:25` and so on. We therefore still need 288 RT market files. To avoid going beyond the 24-hour period covered by the DA market solution, however, the last few RT market problems must have only 3, 2, and 1 time periods, covering `23:45` to `24:00`, `23:50` to `24:00` and `23:55` to `24:00`, respectively.
# - Some MILP solvers (such as Cbc) have issues handling linear programming problems, which are required for the RT market. In this case, a separate linear programming solver can be provided to `solve_market` using the `lp_optimizer` argument. For example, `solve_market(da_file, rt_files, optimizer=Cbc.Optimizer, lp_optimizer=Clp.Optimizer)`.

View File

@@ -1,211 +0,0 @@
# # Getting started
# ## Installing the package
# UnitCommitment.jl was tested and developed with [Julia 1.10](https://julialang.org/). To install Julia, please follow the [installation guide on the official Julia website](https://julialang.org/downloads/). To install UnitCommitment.jl, run the Julia interpreter, type `]` to open the package manager, then type:
# ```text
# pkg> add UnitCommitment@0.4
# ```
# To solve the optimization models, a mixed-integer linear programming (MILP) solver is also required. Please see the [JuMP installation guide](https://jump.dev/JuMP.jl/stable/installation/) for more instructions on installing a solver. Typical open-source choices are [HiGHS](https://github.com/jump-dev/HiGHS.jl), [Cbc](https://github.com/JuliaOpt/Cbc.jl) and [GLPK](https://github.com/JuliaOpt/GLPK.jl). In the instructions below, HiGHS will be used, but any other MILP solver should also be compatible.
# ## Solving a benchmark instance
# We start this tutorial by illustrating how to use UnitCommitment.jl to solve one of the provided benchmark instances. The package contains a large number of deterministic benchmark instances collected from the literature and converted into a common data format, which can be used to evaluate the performance of different solution methods. See [Instances](../guides/instances.md) for more details. The first step is to import `UnitCommitment` and HiGHS.
using HiGHS
using UnitCommitment
# Next, we use the function `UnitCommitment.read_benchmark` to read the instance.
instance = UnitCommitment.read_benchmark("matpower/case14/2017-01-01");
# Now that we have the instance loaded in memory, we build the JuMP optimization model using `UnitCommitment.build_model`:
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer);
# Next, we run the optimization process, with `UnitCommitment.optimize!`:
UnitCommitment.optimize!(model)
# Finally, we extract the optimal solution from the model:
solution = UnitCommitment.solution(model)
# We can then explore the solution using Julia:
@show solution["Thermal production (MW)"]["g1"]
# Or export the entire solution to a JSON file:
UnitCommitment.write("solution.json", solution)
# ## Solving a custom deterministic instance
# In the previous example, we solved a benchmark instance provided by the package. To solve a custom instance, the first step is to create an input file describing the list of elements (generators, loads and transmission lines) in the network. See [Data Format](../guides/format.md) for a complete description of the data format UC.jl expects. To keep this tutorial self-contained, we will create the input JSON file using Julia; however, this step can also be done with a simple text editor. First, we define the contents of the file:
json_contents = """
{
"Parameters": {
"Version": "0.4",
"Time horizon (h)": 4
},
"Buses": {
"b1": {
"Load (MW)": [100, 150, 200, 250]
}
},
"Generators": {
"g1": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 200],
"Production cost curve (\$)": [0, 1000],
"Initial status (h)": -24,
"Initial power (MW)": 0
},
"g2": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 300],
"Production cost curve (\$)": [0, 3000],
"Initial status (h)": -24,
"Initial power (MW)": 0
}
}
}
""";
# Next, we write it to `example.json`.
open("example.json", "w") do file
return write(file, json_contents)
end;
# Now that we have the input file, we can proceed as before, but using `UnitCommitment.read` instead of `UnitCommitment.read_benchmark`:
instance = UnitCommitment.read("example.json");
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer);
UnitCommitment.optimize!(model)
# Finally, we extract and display the solution:
solution = UnitCommitment.solution(model)
#
@show solution["Thermal production (MW)"]["g1"]
#
@show solution["Thermal production (MW)"]["g2"]
# ## Solving a custom stochastic instance
# In addition to deterministic test cases, UnitCommitment.jl can also solve two-stage stochastic instances of the problem. In this section, we demonstrate the most simple form, which builds a single (extensive form) model containing information for all scenarios. See [Decomposition](../tutorials/decomposition.md) for more advanced methods.
# First, we need to create one JSON input file for each scenario. Parameters that are allowed to change across scenarios are marked as "uncertain" in the [JSON data format](../guides/format.md) page. It is also possible to specify the name and weight of each scenario, as shown below.
# We start by creating `example_s1.json`, the first scenario file:
json_contents_s1 = """
{
"Parameters": {
"Version": "0.4",
"Time horizon (h)": 4,
"Scenario name": "s1",
"Scenario weight": 3.0
},
"Buses": {
"b1": {
"Load (MW)": [100, 150, 200, 250]
}
},
"Generators": {
"g1": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 200],
"Production cost curve (\$)": [0, 1000],
"Initial status (h)": -24,
"Initial power (MW)": 0
},
"g2": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 300],
"Production cost curve (\$)": [0, 3000],
"Initial status (h)": -24,
"Initial power (MW)": 0
}
}
}
"""
open("example_s1.json", "w") do file
return write(file, json_contents_s1)
end;
# Next, we create `example_s2.json`, the second scenario file:
json_contents_s2 = """
{
"Parameters": {
"Version": "0.4",
"Time horizon (h)": 4,
"Scenario name": "s2",
"Scenario weight": 1.0
},
"Buses": {
"b1": {
"Load (MW)": [200, 300, 400, 500]
}
},
"Generators": {
"g1": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 200],
"Production cost curve (\$)": [0, 1000],
"Initial status (h)": -24,
"Initial power (MW)": 0
},
"g2": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 300],
"Production cost curve (\$)": [0, 3000],
"Initial status (h)": -24,
"Initial power (MW)": 0
}
}
}
""";
open("example_s2.json", "w") do file
return write(file, json_contents_s2)
end;
# Now that we have our two scenario files, we can read them using `UnitCommitment.read`. Note that, instead of a single file, we now provide a list.
instance = UnitCommitment.read(["example_s1.json", "example_s2.json"])
# If we have a large number of scenario files, the [Glob](https://github.com/vtjnash/Glob.jl) package can also be used to avoid having to list them individually:
using Glob
instance = UnitCommitment.read(glob("example_s*.json"))
# Finally, we build the model and optimize as before:
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer);
UnitCommitment.optimize!(model)
# The solution to stochastic instances follows a slightly different format, as shown below:
solution = UnitCommitment.solution(model)
# The solution for each scenario can be accessed through `solution[scenario_name]`. For conveniance, this includes both first- and second-stage optimal decisions:
solution["s1"]

View File

@@ -1,74 +0,0 @@
# ## Generating initial conditions
# When creating random unit commitment instances for benchmark purposes, it is often hard to compute, in advance, sensible initial conditions for all thermal generators. Setting initial conditions naively (for example, making all generators initially off and producing no power) can easily cause the instance to become infeasible due to excessive ramping. Initial conditions can also make it hard to modify existing instances. For example, increasing the system load without carefully modifying the initial conditions may make the problem infeasible or unrealistically challenging to solve.
# To help with this issue, UC.jl provides a utility function which can generate feasible initial conditions by solving a single-period optimization problem. To illustrate its usage, we first generate a JSON file without initial conditions:
json_contents = """
{
"Parameters": {
"Version": "0.4",
"Time horizon (h)": 4
},
"Buses": {
"b1": {
"Load (MW)": [100, 150, 200, 250]
}
},
"Generators": {
"g1": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 200],
"Production cost curve (\$)": [0, 1000]
},
"g2": {
"Bus": "b1",
"Type": "Thermal",
"Production cost curve (MW)": [0, 300],
"Production cost curve (\$)": [0, 3000]
}
}
}
""";
open("example_initial.json", "w") do file
return write(file, json_contents)
end;
# Next, we read the instance and generate the initial conditions (in-place):
instance = UnitCommitment.read("example_initial.json")
UnitCommitment.generate_initial_conditions!(instance, HiGHS.Optimizer)
# Finally, we optimize the resulting problem:
model =
UnitCommitment.build_model(instance = instance, optimizer = HiGHS.Optimizer)
UnitCommitment.optimize!(model)
# !!! warning
# The function `generate_initial_conditions!` may return different initial conditions after each call, even if the same instance and the same optimizer is provided. The particular algorithm may also change in a future version of UC.jl. For these reasons, it is recommended that you generate initial conditions exactly once for each instance and store them for later use.
# ## 6. Verifying solutions
# When developing new formulations, it is very easy to introduce subtle errors in the model that result in incorrect solutions. To help avoiding this, UC.jl includes a utility function that verifies if a given solution is feasible, and, if not, prints all the validation errors it found. The implementation of this function is completely independent from the implementation of the optimization model, and therefore can be used to validate it.
# ```jldoctest; output = false
# using JSON
# using UnitCommitment
# # Read instance
# instance = UnitCommitment.read("example/s1.json")
# # Read solution (potentially produced by other packages)
# solution = JSON.parsefile("example/out.json")
# # Validate solution and print validation errors
# UnitCommitment.validate(instance, solution)
# # output
# true
# ```

226
docs/src/usage.md Normal file
View File

@@ -0,0 +1,226 @@
Usage
=====
Installation
------------
UnitCommitment.jl was tested and developed with [Julia 1.7](https://julialang.org/). To install Julia, please follow the [installation guide on the official Julia website](https://julialang.org/downloads/). To install UnitCommitment.jl, run the Julia interpreter, type `]` to open the package manager, then type:
```text
pkg> add UnitCommitment@0.3
```
To test that the package has been correctly installed, run:
```text
pkg> test UnitCommitment
```
If all tests pass, the package should now be ready to be used by any Julia script on the machine.
To solve the optimization models, a mixed-integer linear programming (MILP) solver is also required. Please see the [JuMP installation guide](https://jump.dev/JuMP.jl/stable/installation/) for more instructions on installing a solver. Typical open-source choices are [Cbc](https://github.com/JuliaOpt/Cbc.jl) and [GLPK](https://github.com/JuliaOpt/GLPK.jl). In the instructions below, Cbc will be used, but any other MILP solver listed in JuMP installation guide should also be compatible.
Typical Usage
-------------
### Solving user-provided instances
The first step to use UC.jl is to construct a JSON file describing your unit commitment instance. See [Data Format](format.md) for a complete description of the data format UC.jl expects. The next steps, as shown below, are to: (1) read the instance from file; (2) construct the optimization model; (3) run the optimization; and (4) extract the optimal solution.
```julia
using Cbc
using JSON
using UnitCommitment
# 1. Read instance
instance = UnitCommitment.read("/path/to/input.json")
# 2. Construct optimization model
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# 3. Solve model
UnitCommitment.optimize!(model)
# 4. Write solution to a file
solution = UnitCommitment.solution(model)
UnitCommitment.write("/path/to/output.json", solution)
```
### Solving benchmark instances
UnitCommitment.jl contains a large number of benchmark instances collected from the literature and converted into a common data format. To solve one of these instances individually, instead of constructing your own, the function `read_benchmark` can be used, as shown below. See [Instances](instances.md) for the complete list of available instances.
```julia
using UnitCommitment
instance = UnitCommitment.read_benchmark("matpower/case3375wp/2017-02-01")
```
## Customizing the formulation
By default, `build_model` uses a formulation that combines modeling components from different publications, and that has been carefully tested, using our own benchmark scripts, to provide good performance across a wide variety of instances. This default formulation is expected to change over time, as new methods are proposed in the literature. You can, however, construct your own formulation, based on the modeling components that you choose, as shown in the next example.
```julia
using Cbc
using UnitCommitment
import UnitCommitment:
Formulation,
KnuOstWat2018,
MorLatRam2013,
ShiftFactorsFormulation
instance = UnitCommitment.read_benchmark(
"matpower/case118/2017-02-01",
)
model = UnitCommitment.build_model(
instance = instance,
optimizer = Cbc.Optimizer,
formulation = Formulation(
pwl_costs = KnuOstWat2018.PwlCosts(),
ramping = MorLatRam2013.Ramping(),
startup_costs = MorLatRam2013.StartupCosts(),
transmission = ShiftFactorsFormulation(
isf_cutoff = 0.005,
lodf_cutoff = 0.001,
),
),
)
```
## Generating initial conditions
When creating random unit commitment instances for benchmark purposes, it is often hard to compute, in advance, sensible initial conditions for all generators. Setting initial conditions naively (for example, making all generators initially off and producing no power) can easily cause the instance to become infeasible due to excessive ramping. Initial conditions can also make it hard to modify existing instances. For example, increasing the system load without carefully modifying the initial conditions may make the problem infeasible or unrealistically challenging to solve.
To help with this issue, UC.jl provides a utility function which can generate feasible initial conditions by solving a single-period optimization problem, as shown below:
```julia
using Cbc
using UnitCommitment
# Read original instance
instance = UnitCommitment.read("instance.json")
# Generate initial conditions (in-place)
UnitCommitment.generate_initial_conditions!(instance, Cbc.Optimizer)
# Construct and solve optimization model
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
UnitCommitment.optimize!(model)
```
!!! warning
The function `generate_initial_conditions!` may return different initial conditions after each call, even if the same instance and the same optimizer is provided. The particular algorithm may also change in a future version of UC.jl. For these reasons, it is recommended that you generate initial conditions exactly once for each instance and store them for later use.
## Verifying solutions
When developing new formulations, it is very easy to introduce subtle errors in the model that result in incorrect solutions. To help with this, UC.jl includes a utility function that verifies if a given solution is feasible, and, if not, prints all the validation errors it found. The implementation of this function is completely independent from the implementation of the optimization model, and therefore can be used to validate it. The function can also be used to verify solutions produced by other optimization packages, as long as they follow the [UC.jl data format](format.md).
```julia
using JSON
using UnitCommitment
# Read instance
instance = UnitCommitment.read("instance.json")
# Read solution (potentially produced by other packages)
solution = JSON.parsefile("solution.json")
# Validate solution and print validation errors
UnitCommitment.validate(instance, solution)
```
## Computing Locational Marginal Prices
Locational marginal prices (LMPs) refer to the cost of supplying electricity at a particular location of the network. Multiple methods for computing LMPs have been proposed in the literature. UnitCommitment.jl implements two commonly-used methods: conventional LMPs and Approximated Extended LMPs (AELMPs). To compute LMPs for a given unit commitment instance, the `compute_lmp` function can be used, as shown in the examples below. The function accepts three arguments -- a solved SCUC model, an LMP method, and a linear optimizer -- and it returns a dictionary mapping `(bus_name, time)` to the marginal price.
!!! warning
Most mixed-integer linear optimizers, such as `HiGHS`, `Gurobi` and `CPLEX` can be used with `compute_lmp`, with the notable exception of `Cbc`, which does not support dual value evaluations. If using `Cbc`, please provide `Clp` as the linear optimizer.
### Conventional LMPs
LMPs are conventionally computed by: (1) solving the SCUC model, (2) fixing all binary variables to their optimal values, and (3) re-solving the resulting linear programming model. In this approach, the LMPs are defined as the dual variables' values associated with the net injection constraints. The example below shows how to compute conventional LMPs for a given unit commitment instance. First, we build and optimize the SCUC model. Then, we call the `compute_lmp` function, providing as the second argument `ConventionalLMP()`.
```julia
using UnitCommitment
using HiGHS
import UnitCommitment: ConventionalLMP
# Read benchmark instance
instance = UnitCommitment.read_benchmark("matpower/case118/2018-01-01")
# Build the model
model = UnitCommitment.build_model(
instance = instance,
optimizer = HiGHS.Optimizer,
)
# Optimize the model
UnitCommitment.optimize!(model)
# Compute the LMPs using the conventional method
lmp = UnitCommitment.compute_lmp(
model,
ConventionalLMP(),
optimizer = HiGHS.Optimizer,
)
# Access the LMPs
# Example: "s1" is the scenario name, "b1" is the bus name, 1 is the first time slot
@show lmp["s1","b1", 1]
```
### Approximate Extended LMPs
Approximate Extended LMPs (AELMPs) are an alternative method to calculate locational marginal prices which attemps to minimize uplift payments. The method internally works by modifying the instance data in three ways: (1) it sets the minimum power output of each generator to zero, (2) it averages the start-up cost over the offer blocks for each generator, and (3) it relaxes all integrality constraints. To compute AELMPs, as shown in the example below, we call `compute_lmp` and provide `AELMP()` as the second argument.
This method has two configurable parameters: `allow_offline_participation` and `consider_startup_costs`. If `allow_offline_participation = true`, then offline generators are allowed to participate in the pricing. If instead `allow_offline_participation = false`, offline generators are not allowed and therefore are excluded from the system. A solved UC model is optional if offline participation is allowed, but is required if not allowed. The method forces offline participation to be allowed if the UC model supplied by the user is not solved. For the second field, If `consider_startup_costs = true`, then start-up costs are integrated and averaged over each unit production; otherwise the production costs stay the same. By default, both fields are set to `true`.
!!! warning
This approximation method is still under active research, and has several limitations. The implementation provided in the package is based on MISO Phase I only. It only supports fast start resources. More specifically, the minimum up/down time of all generators must be 1, the initial power of all generators must be 0, and the initial status of all generators must be negative. The method does not support time-varying start-up costs. The method does not support multiple scenarios. If offline participation is not allowed, AELMPs treats an asset to be offline if it is never on throughout all time periods.
```julia
using UnitCommitment
using HiGHS
import UnitCommitment: AELMP
# Read benchmark instance
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
# Build the model
model = UnitCommitment.build_model(
instance = instance,
optimizer = HiGHS.Optimizer,
)
# Optimize the model
UnitCommitment.optimize!(model)
# Compute the AELMPs
aelmp = UnitCommitment.compute_lmp(
model,
AELMP(
allow_offline_participation = false,
consider_startup_costs = true
),
optimizer = HiGHS.Optimizer
)
# Access the AELMPs
# Example: "s1" is the scenario name, "b1" is the bus name, 1 is the first time slot
# Note: although scenario is supported, the query still keeps the scenario keys for consistency.
@show aelmp["s1", "b1", 1]
```

View File

@@ -10,7 +10,6 @@ include("instance/structs.jl")
include("model/formulations/base/structs.jl")
include("solution/structs.jl")
include("lmp/structs.jl")
include("market/structs.jl")
include("model/formulations/ArrCon2000/structs.jl")
include("model/formulations/CarArr2006/structs.jl")
@@ -20,9 +19,7 @@ include("model/formulations/KnuOstWat2018/structs.jl")
include("model/formulations/MorLatRam2013/structs.jl")
include("model/formulations/PanGua2016/structs.jl")
include("solution/methods/XavQiuWanThi2019/structs.jl")
include("solution/methods/ProgressiveHedging/structs.jl")
include("model/formulations/WanHob2016/structs.jl")
include("solution/methods/TimeDecomposition/structs.jl")
include("import/egret.jl")
include("instance/read.jl")
@@ -36,7 +33,6 @@ include("model/formulations/base/sensitivity.jl")
include("model/formulations/base/system.jl")
include("model/formulations/base/unit.jl")
include("model/formulations/base/punit.jl")
include("model/formulations/base/storage.jl")
include("model/formulations/CarArr2006/pwlcosts.jl")
include("model/formulations/DamKucRajAta2016/ramp.jl")
include("model/formulations/Gar1962/pwlcosts.jl")
@@ -53,10 +49,6 @@ include("solution/methods/XavQiuWanThi2019/enforce.jl")
include("solution/methods/XavQiuWanThi2019/filter.jl")
include("solution/methods/XavQiuWanThi2019/find.jl")
include("solution/methods/XavQiuWanThi2019/optimize.jl")
include("solution/methods/TimeDecomposition/optimize.jl")
include("solution/methods/ProgressiveHedging/optimize.jl")
include("solution/methods/ProgressiveHedging/read.jl")
include("solution/methods/ProgressiveHedging/solution.jl")
include("solution/optimize.jl")
include("solution/solution.jl")
include("solution/warmstart.jl")
@@ -70,6 +62,5 @@ include("validation/repair.jl")
include("validation/validate.jl")
include("lmp/conventional.jl")
include("lmp/aelmp.jl")
include("market/market.jl")
end

View File

@@ -8,13 +8,13 @@ using DataStructures
using GZip
import Base: getindex, time
const INSTANCES_URL = "https://axavier.org/UnitCommitment.jl/0.4/instances"
const INSTANCES_URL = "https://axavier.org/UnitCommitment.jl/0.3/instances"
"""
read_benchmark(name::AbstractString)::UnitCommitmentInstance
Read one of the benchmark instances included in the package. See
[Instances](guides/instances.md) for the entire list of benchmark instances available.
[Instances](instances.md) for the entire list of benchmark instances available.
# Example
```julia
@@ -136,35 +136,22 @@ function _from_json(json; repair = true)::UnitCommitmentScenario
loads = PriceSensitiveLoad[]
reserves = Reserve[]
profiled_units = ProfiledUnit[]
storage_units = StorageUnit[]
function scalar(x; default = nothing)
x !== nothing || return default
return x
end
time_horizon = json["Parameters"]["Time horizon (min)"]
time_horizon = json["Parameters"]["Time (h)"]
if time_horizon === nothing
time_horizon = json["Parameters"]["Time (h)"]
if time_horizon === nothing
time_horizon = json["Parameters"]["Time horizon (h)"]
end
if time_horizon !== nothing
time_horizon *= 60
end
time_horizon = json["Parameters"]["Time horizon (h)"]
end
time_horizon !== nothing || error("Missing parameter: Time horizon (min)")
isinteger(time_horizon) ||
error("Time horizon must be an integer in minutes")
time_horizon = Int(time_horizon)
time_horizon !== nothing || error("Missing parameter: Time horizon (h)")
time_step = scalar(json["Parameters"]["Time step (min)"], default = 60)
(60 % time_step == 0) ||
error("Time step $time_step is not a divisor of 60")
(time_horizon % time_step == 0) || error(
"Time step $time_step is not a divisor of time horizon $time_horizon",
)
time_multiplier = 60 ÷ time_step
T = time_horizon ÷ time_step
T = time_horizon * time_multiplier
probability = json["Parameters"]["Scenario weight"]
probability !== nothing || (probability = 1)
@@ -197,7 +184,6 @@ function _from_json(json; repair = true)::UnitCommitmentScenario
ThermalUnit[],
PriceSensitiveLoad[],
ProfiledUnit[],
StorageUnit[],
)
name_to_bus[bus_name] = bus
push!(buses, bus)
@@ -354,6 +340,7 @@ function _from_json(json; repair = true)::UnitCommitmentScenario
length(lines) + 1,
name_to_bus[dict["Source bus"]],
name_to_bus[dict["Target bus"]],
scalar(dict["Reactance (ohms)"]),
scalar(dict["Susceptance (S)"]),
timeseries(
dict["Normal flow limit (MW)"],
@@ -406,52 +393,6 @@ function _from_json(json; repair = true)::UnitCommitmentScenario
end
end
# Read storage units
if "Storage units" in keys(json)
for (storage_name, dict) in json["Storage units"]
bus = name_to_bus[dict["Bus"]]
min_level =
timeseries(scalar(dict["Minimum level (MWh)"], default = 0.0))
max_level = timeseries(dict["Maximum level (MWh)"])
storage = StorageUnit(
storage_name,
bus,
min_level,
max_level,
timeseries(
scalar(
dict["Allow simultaneous charging and discharging"],
default = true,
),
),
timeseries(dict["Charge cost (\$/MW)"]),
timeseries(dict["Discharge cost (\$/MW)"]),
timeseries(scalar(dict["Charge efficiency"], default = 1.0)),
timeseries(scalar(dict["Discharge efficiency"], default = 1.0)),
timeseries(scalar(dict["Loss factor"], default = 0.0)),
timeseries(
scalar(dict["Minimum charge rate (MW)"], default = 0.0),
),
timeseries(dict["Maximum charge rate (MW)"]),
timeseries(
scalar(dict["Minimum discharge rate (MW)"], default = 0.0),
),
timeseries(dict["Maximum discharge rate (MW)"]),
scalar(dict["Initial level (MWh)"], default = 0.0),
scalar(
dict["Last period minimum level (MWh)"],
default = min_level[T],
),
scalar(
dict["Last period maximum level (MWh)"],
default = max_level[T],
),
)
push!(bus.storage_units, storage)
push!(storage_units, storage)
end
end
scenario = UnitCommitmentScenario(
name = scenario_name,
probability = probability,
@@ -467,13 +408,10 @@ function _from_json(json; repair = true)::UnitCommitmentScenario
reserves = reserves,
reserves_by_name = name_to_reserve,
time = T,
time_step = time_step,
thermal_units_by_name = Dict(g.name => g for g in thermal_units),
thermal_units = thermal_units,
profiled_units_by_name = Dict(pu.name => pu for pu in profiled_units),
profiled_units = profiled_units,
storage_units_by_name = Dict(su.name => su for su in storage_units),
storage_units = storage_units,
isf = spzeros(Float64, length(lines), length(buses) - 1),
lodf = spzeros(Float64, length(lines), length(lines)),
)

View File

@@ -9,7 +9,6 @@ mutable struct Bus
thermal_units::Vector
price_sensitive_loads::Vector
profiled_units::Vector
storage_units::Vector
end
mutable struct CostSegment
@@ -56,6 +55,7 @@ mutable struct TransmissionLine
offset::Int
source::Bus
target::Bus
reactance::Float64
susceptance::Float64
normal_flow_limit::Vector{Float64}
emergency_flow_limit::Vector{Float64}
@@ -83,26 +83,6 @@ mutable struct ProfiledUnit
cost::Vector{Float64}
end
mutable struct StorageUnit
name::String
bus::Bus
min_level::Vector{Float64}
max_level::Vector{Float64}
simultaneous_charge_and_discharge::Vector{Bool}
charge_cost::Vector{Float64}
discharge_cost::Vector{Float64}
charge_efficiency::Vector{Float64}
discharge_efficiency::Vector{Float64}
loss_factor::Vector{Float64}
min_charge_rate::Vector{Float64}
max_charge_rate::Vector{Float64}
min_discharge_rate::Vector{Float64}
max_discharge_rate::Vector{Float64}
initial_level::Float64
min_ending_level::Float64
max_ending_level::Float64
end
Base.@kwdef mutable struct UnitCommitmentScenario
buses_by_name::Dict{AbstractString,Bus}
buses::Vector{Bus}
@@ -123,10 +103,7 @@ Base.@kwdef mutable struct UnitCommitmentScenario
reserves::Vector{Reserve}
thermal_units_by_name::Dict{AbstractString,ThermalUnit}
thermal_units::Vector{ThermalUnit}
storage_units_by_name::Dict{AbstractString,StorageUnit}
storage_units::Vector{StorageUnit}
time::Int
time_step::Int
end
Base.@kwdef mutable struct UnitCommitmentInstance

View File

@@ -1,219 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
"""
solve_market(
da_path::Union{String, Vector{String}},
rt_paths::Vector{String},
settings::MarketSettings;
optimizer,
lp_optimizer = nothing,
after_build_da = nothing,
after_optimize_da = nothing,
after_build_rt = nothing,
after_optimize_rt = nothing,
)::OrderedDict
Solve the day-ahead and the real-time markets by the means of commitment status mapping.
The method firstly acquires the commitment status outcomes through the resolution of the day-ahead market;
and secondly resolves each real-time market based on the corresponding results obtained previously.
Arguments
---------
- `da_path`:
the data file path of the day-ahead market, can be stochastic.
- `rt_paths`:
the list of data file paths of the real-time markets, must be deterministic for each market.
- `settings`:
the MarketSettings which include the problem formulation, the solving method, and LMP method.
- `optimizer`:
the optimizer for solving the problem.
- `lp_optimizer`:
the linear programming optimizer for solving the LMP problem, defaults to `nothing`.
If not specified by the user, the program uses `optimizer` instead.
- `after_build_da`:
a user-defined function that allows modifying the DA model after building,
must have 2 arguments `model` and `instance` in order.
- `after_optimize_da`:
a user-defined function that allows handling additional steps after optimizing the DA model,
must have 3 arguments `solution`, `model` and `instance` in order.
- `after_build_rt`:
a user-defined function that allows modifying each RT model after building,
must have 2 arguments `model` and `instance` in order.
- `after_optimize_rt`:
a user-defined function that allows handling additional steps after optimizing each RT model,
must have 3 arguments `solution`, `model` and `instance` in order.
Examples
--------
```julia
using UnitCommitment, Cbc, HiGHS
import UnitCommitment:
MarketSettings,
XavQiuWanThi2019,
ConventionalLMP,
Formulation
solution = UnitCommitment.solve_market(
"da_instance.json",
["rt_instance_1.json", "rt_instance_2.json", "rt_instance_3.json"],
MarketSettings(
inner_method = XavQiuWanThi2019.Method(),
lmp_method = ConventionalLMP(),
formulation = Formulation(),
),
optimizer = Cbc.Optimizer,
lp_optimizer = HiGHS.Optimizer,
)
"""
function solve_market(
da_path::Union{String,Vector{String}},
rt_paths::Vector{String};
settings::MarketSettings = MarketSettings(),
optimizer,
lp_optimizer = nothing,
after_build_da = nothing,
after_optimize_da = nothing,
after_build_rt = nothing,
after_optimize_rt = nothing,
)::OrderedDict
# solve da instance as usual
@info "Solving the day-ahead market with file $da_path..."
instance_da = UnitCommitment.read(da_path)
# LP optimizer is optional: if not specified, use optimizer
lp_optimizer = lp_optimizer === nothing ? optimizer : lp_optimizer
# build and optimize the DA market
model_da, solution_da = _build_and_optimize(
instance_da,
settings,
optimizer = optimizer,
lp_optimizer = lp_optimizer,
after_build = after_build_da,
after_optimize = after_optimize_da,
)
# prepare the final solution
solution = OrderedDict()
solution["DA"] = solution_da
solution["RT"] = []
# count the time, sc.time = n-slots, sc.time_step = slot-interval
# sufficient to look at only one scenario
sc = instance_da.scenarios[1]
# max time (min) of the DA market
max_time = sc.time * sc.time_step
# current time increments through the RT market list
current_time = 0
# DA market time slots in (min)
da_time_intervals = [sc.time_step * ts for ts in 1:sc.time]
# get the uc status and set each uc fixed
solution_rt = OrderedDict()
prev_initial_status = OrderedDict()
for rt_path in rt_paths
@info "Solving the real-time market with file $rt_path..."
instance_rt = UnitCommitment.read(rt_path)
# check instance time
sc = instance_rt.scenarios[1]
# check each time slot in the RT model
for ts in 1:sc.time
slot_t_end = current_time + ts * sc.time_step
# ensure this RT's slot time ub never exceeds max time of DA
slot_t_end <= max_time || error(
"The time of the real-time market cannot exceed the time of the day-ahead market.",
)
# get the slot start time to determine commitment status
slot_t_start = slot_t_end - sc.time_step
# find the index of the first DA time slot that covers slot_t_start
da_time_slot = findfirst(ti -> slot_t_start < ti, da_time_intervals)
# update thermal unit commitment status
for g in sc.thermal_units
g.commitment_status[ts] =
value(model_da[:is_on][g.name, da_time_slot]) == 1.0
end
end
# update current time by ONE slot only
current_time += sc.time_step
# set initial status for all generators in all scenarios
if !isempty(solution_rt) && !isempty(prev_initial_status)
for g in sc.thermal_units
g.initial_power =
solution_rt["Thermal production (MW)"][g.name][1]
g.initial_status = UnitCommitment._determine_initial_status(
prev_initial_status[g.name],
[solution_rt["Is on"][g.name][1]],
)
end
end
# build and optimize the RT market
_, solution_rt = _build_and_optimize(
instance_rt,
settings,
optimizer = optimizer,
lp_optimizer = lp_optimizer,
after_build = after_build_rt,
after_optimize = after_optimize_rt,
)
prev_initial_status =
OrderedDict(g.name => g.initial_status for g in sc.thermal_units)
push!(solution["RT"], solution_rt)
end # end of for-loop that checks each RT market
return solution
end
function _build_and_optimize(
instance::UnitCommitmentInstance,
settings::MarketSettings;
optimizer,
lp_optimizer,
after_build = nothing,
after_optimize = nothing,
)::Tuple{JuMP.Model,OrderedDict}
# build model with after build
model = UnitCommitment.build_model(
instance = instance,
optimizer = optimizer,
formulation = settings.formulation,
)
if after_build !== nothing
after_build(model, instance)
end
# optimize model
UnitCommitment.optimize!(model, settings.inner_method)
solution = UnitCommitment.solution(model)
# compute lmp and add to solution
if settings.lmp_method !== nothing
lmp = UnitCommitment.compute_lmp(
model,
settings.lmp_method,
optimizer = lp_optimizer,
)
if length(instance.scenarios) == 1
solution["LMP (\$/MW)"] = lmp
else
for sc in instance.scenarios
solution[sc.name]["LMP (\$/MW)"] = OrderedDict(
key => val for (key, val) in lmp if key[1] == sc.name
)
end
end
end
# run after optimize with solution
if after_optimize !== nothing
after_optimize(solution, model, instance)
end
return model, solution
end

View File

@@ -1,33 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import ..SolutionMethod
import ..PricingMethod
import ..Formulation
"""
struct MarketSettings
inner_method::SolutionMethod = XavQiuWanThi2019.Method()
lmp_method::Union{PricingMethod, Nothing} = ConventionalLMP()
formulation::Formulation = Formulation()
end
Market setting struct, typically used to map a day-ahead market to real-time markets.
Arguments
---------
- `inner_method`:
method to solve each marketing problem.
- `lmp_method`:
a PricingMethod method to calculate the locational marginal prices.
If it is set to `nothing`, the LMPs will not be calculated.
- `formulation`:
problem formulation.
"""
Base.@kwdef struct MarketSettings
inner_method::SolutionMethod = XavQiuWanThi2019.Method()
lmp_method::Union{PricingMethod,Nothing} = ConventionalLMP()
formulation::Formulation = Formulation()
end

View File

@@ -99,9 +99,6 @@ function build_model(;
for pu in sc.profiled_units
_add_profiled_unit!(model, pu, sc)
end
for su in sc.storage_units
_add_storage_unit!(model, su, sc)
end
_add_system_wide_eqs!(model, sc)
end
@objective(model, Min, model[:obj])

View File

@@ -99,7 +99,7 @@ function _add_production_piecewise_linear_eqs!(
add_to_expression!(
model[:obj],
segprod[sc.name, gn, t, k],
sc.probability * g.cost_segments[k].cost[t],
g.cost_segments[k].cost[t],
)
# Also add an explicit upper bound on segprod to make the solver's

View File

@@ -10,8 +10,8 @@ using SparseArrays, Base.Threads, LinearAlgebra, JuMP
Returns a (B-1)xL matrix M, where B is the number of buses and L is the number
of transmission lines. For a given bus b and transmission line l, the entry
M[l.offset, b.offset] indicates the amount of power (in MW) that flows through
transmission line l when 1 MW of power is injected at b and withdrawn from the
slack bus (the bus that has offset zero).
transmission line l when 1 MW of power is injected at the slack bus (the bus
that has offset zero) and withdrawn from b.
"""
function _injection_shift_factors(;
buses::Array{Bus},

View File

@@ -1,125 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
function _add_storage_unit!(
model::JuMP.Model,
su::StorageUnit,
sc::UnitCommitmentScenario,
)::Nothing
# Initialize variables
storage_level = _init(model, :storage_level)
charge_rate = _init(model, :charge_rate)
discharge_rate = _init(model, :discharge_rate)
is_charging = _init(model, :is_charging)
is_discharging = _init(model, :is_discharging)
eq_min_charge_rate = _init(model, :eq_min_charge_rate)
eq_max_charge_rate = _init(model, :eq_max_charge_rate)
eq_min_discharge_rate = _init(model, :eq_min_discharge_rate)
eq_max_discharge_rate = _init(model, :eq_max_discharge_rate)
# Initialize constraints
net_injection = _init(model, :expr_net_injection)
eq_storage_transition = _init(model, :eq_storage_transition)
eq_ending_level = _init(model, :eq_ending_level)
# time in hours
time_step = sc.time_step / 60
for t in 1:model[:instance].time
# Decision variable
storage_level[sc.name, su.name, t] = @variable(
model,
lower_bound = su.min_level[t],
upper_bound = su.max_level[t]
)
charge_rate[sc.name, su.name, t] = @variable(model)
discharge_rate[sc.name, su.name, t] = @variable(model)
is_charging[sc.name, su.name, t] = @variable(model, binary = true)
is_discharging[sc.name, su.name, t] = @variable(model, binary = true)
# Objective function terms
add_to_expression!(
model[:obj],
charge_rate[sc.name, su.name, t],
su.charge_cost[t] * sc.probability,
)
add_to_expression!(
model[:obj],
discharge_rate[sc.name, su.name, t],
su.discharge_cost[t] * sc.probability,
)
# Net injection
add_to_expression!(
net_injection[sc.name, su.bus.name, t],
discharge_rate[sc.name, su.name, t],
1.0,
)
add_to_expression!(
net_injection[sc.name, su.bus.name, t],
charge_rate[sc.name, su.name, t],
-1.0,
)
# Simultaneous charging and discharging
if !su.simultaneous_charge_and_discharge[t]
# Initialize the model dictionary
eq_simultaneous_charge_and_discharge =
_init(model, :eq_simultaneous_charge_and_discharge)
# Constraints
eq_simultaneous_charge_and_discharge[sc.name, su.name, t] =
@constraint(
model,
is_charging[sc.name, su.name, t] +
is_discharging[sc.name, su.name, t] <= 1.0
)
end
# Charge and discharge constraints
eq_min_charge_rate[sc.name, su.name, t] = @constraint(
model,
charge_rate[sc.name, su.name, t] >=
is_charging[sc.name, su.name, t] * su.min_charge_rate[t]
)
eq_max_charge_rate[sc.name, su.name, t] = @constraint(
model,
charge_rate[sc.name, su.name, t] <=
is_charging[sc.name, su.name, t] * su.max_charge_rate[t]
)
eq_min_discharge_rate[sc.name, su.name, t] = @constraint(
model,
discharge_rate[sc.name, su.name, t] >=
is_discharging[sc.name, su.name, t] * su.min_discharge_rate[t]
)
eq_max_discharge_rate[sc.name, su.name, t] = @constraint(
model,
discharge_rate[sc.name, su.name, t] <=
is_discharging[sc.name, su.name, t] * su.max_discharge_rate[t]
)
# Storage energy transition constraint
prev_storage_level =
t == 1 ? su.initial_level : storage_level[sc.name, su.name, t-1]
eq_storage_transition[sc.name, su.name, t] = @constraint(
model,
storage_level[sc.name, su.name, t] ==
(1 - su.loss_factor[t]) * prev_storage_level +
charge_rate[sc.name, su.name, t] *
time_step *
su.charge_efficiency[t] -
discharge_rate[sc.name, su.name, t] * time_step /
su.discharge_efficiency[t]
)
# Storage ending level constraint
if t == sc.time
eq_ending_level[sc.name, su.name] = @constraint(
model,
su.min_ending_level <=
storage_level[sc.name, su.name, t] <=
su.max_ending_level
)
end
end
return
end

View File

@@ -1,230 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using MPI, Printf
using TimerOutputs
import JuMP
const to = TimerOutput()
function optimize!(model::JuMP.Model, method::ProgressiveHedging)::Nothing
mpi = MpiInfo(MPI.COMM_WORLD)
iterations = PHIterationInfo[]
consensus_vars = [var for var in all_variables(model) if is_binary(var)]
nvars = length(consensus_vars)
weights = ones(nvars)
if method.initial_weights !== nothing
weights = copy(method.initial_weights)
end
target = zeros(nvars)
if method.initial_target !== nothing
target = copy(method.initial_target)
end
params = PHSubProblemParams(
ρ = method.ρ,
λ = [method.λ for _ in 1:nvars],
target = target,
)
sp = PHSubProblem(model, model[:obj], consensus_vars, weights)
while true
iteration_time = @elapsed begin
solution = solve_subproblem(sp, params, method.inner_method)
MPI.Barrier(mpi.comm)
global_obj = compute_global_objective(mpi, solution)
target = compute_target(mpi, solution)
update_λ_and_residuals!(solution, params, target)
global_infeas = compute_global_infeasibility(solution, mpi)
global_residual = compute_global_residual(mpi, solution)
if has_numerical_issues(target)
break
end
end
total_elapsed_time =
compute_total_elapsed_time(iteration_time, iterations)
current_iteration = PHIterationInfo(
global_infeas = global_infeas,
global_obj = global_obj,
global_residual = global_residual,
iteration_number = length(iterations) + 1,
iteration_time = iteration_time,
sp_vals = solution.vals,
sp_obj = solution.obj,
target = target,
total_elapsed_time = total_elapsed_time,
)
push!(iterations, current_iteration)
print_progress(mpi, current_iteration, method.print_interval)
if should_stop(mpi, iterations, method.termination)
break
end
end
return
end
function compute_total_elapsed_time(
iteration_time::Float64,
iterations::Array{PHIterationInfo,1},
)::Float64
length(iterations) > 0 ?
current_total_time = last(iterations).total_elapsed_time :
current_total_time = 0
return current_total_time + iteration_time
end
function compute_global_objective(
mpi::MpiInfo,
s::PhSubProblemSolution,
)::Float64
global_obj = MPI.Allreduce(s.obj, MPI.SUM, mpi.comm)
global_obj /= mpi.nprocs
return global_obj
end
function compute_target(mpi::MpiInfo, s::PhSubProblemSolution)::Array{Float64,1}
sp_vals = s.vals
target = MPI.Allreduce(sp_vals, MPI.SUM, mpi.comm)
target = target / mpi.nprocs
return target
end
function compute_global_residual(mpi::MpiInfo, s::PhSubProblemSolution)::Float64
n_vars = length(s.vals)
local_residual_sum = abs.(s.residuals)
global_residual_sum = MPI.Allreduce(local_residual_sum, MPI.SUM, mpi.comm)
return sum(global_residual_sum) / n_vars
end
function compute_global_infeasibility(
solution::PhSubProblemSolution,
mpi::MpiInfo,
)::Float64
local_infeasibility = norm(solution.residuals)
global_infeas = MPI.Allreduce(local_infeasibility, MPI.SUM, mpi.comm)
return global_infeas
end
function solve_subproblem(
sp::PHSubProblem,
params::PHSubProblemParams,
method::SolutionMethod,
)::PhSubProblemSolution
G = length(sp.consensus_vars)
if norm(params.λ) < 1e-3
@objective(sp.mip, Min, sp.obj)
else
@objective(
sp.mip,
Min,
sp.obj +
sum(
sp.weights[g] *
params.λ[g] *
(sp.consensus_vars[g] - params.target[g]) for g in 1:G
) +
(params.ρ / 2) * sum(
sp.weights[g] * (sp.consensus_vars[g] - params.target[g])^2 for
g in 1:G
)
)
end
optimize!(sp.mip, method)
obj = objective_value(sp.mip)
sp_vals = value.(sp.consensus_vars)
return PhSubProblemSolution(obj = obj, vals = sp_vals, residuals = zeros(G))
end
function update_λ_and_residuals!(
solution::PhSubProblemSolution,
params::PHSubProblemParams,
target::Array{Float64,1},
)::Nothing
n_vars = length(solution.vals)
params.target = target
for n in 1:n_vars
solution.residuals[n] = solution.vals[n] - params.target[n]
params.λ[n] += params.ρ * solution.residuals[n]
end
end
function print_header(mpi::MpiInfo)::Nothing
if !mpi.root
return
end
@info "Solving via Progressive Hedging:"
@info @sprintf(
"%8s %20s %20s %14s %8s %8s",
"iter",
"obj",
"infeas",
"consensus",
"time-it",
"time"
)
end
function print_progress(
mpi::MpiInfo,
iteration::PHIterationInfo,
print_interval,
)::Nothing
if !mpi.root
return
end
if iteration.iteration_number % print_interval != 0
return
end
@info @sprintf(
"%8d %20.6e %20.6e %12.2f %% %8.2f %8.2f",
iteration.iteration_number,
iteration.global_obj,
iteration.global_infeas,
iteration.global_residual * 100,
iteration.iteration_time,
iteration.total_elapsed_time
)
end
function has_numerical_issues(target::Array{Float64,1})::Bool
if target == NaN
@warn "Numerical issues detected. Stopping."
return true
end
return false
end
function should_stop(
mpi::MpiInfo,
iterations::Array{PHIterationInfo,1},
termination::PHTermination,
)::Bool
if length(iterations) >= termination.max_iterations
if mpi.root
@info "Iteration limit reached. Stopping."
end
return true
end
if length(iterations) < termination.min_iterations
return false
end
if last(iterations).total_elapsed_time > termination.max_time
if mpi.root
@info "Time limit reached. Stopping."
end
return true
end
curr_it = last(iterations)
prev_it = iterations[length(iterations)-1]
if curr_it.global_infeas < termination.min_feasibility
obj_change = abs(prev_it.global_obj - curr_it.global_obj)
if obj_change < termination.min_improvement
if mpi.root
@info "Feasibility limit reached. Stopping."
end
return true
end
end
return false
end

View File

@@ -1,18 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
function read(
paths::Vector{String},
::ProgressiveHedging,
)::UnitCommitmentInstance
comm = MPI.COMM_WORLD
mpi = MpiInfo(comm)
(length(paths) % mpi.nprocs == 0) || error(
"Number of processes $(mpi.nprocs) is not a divisor of $(length(paths))",
)
bundled_scenarios = length(paths) ÷ mpi.nprocs
sc_num_start = (mpi.rank - 1) * bundled_scenarios + 1
sc_num_end = mpi.rank * bundled_scenarios
return read(paths[sc_num_start:sc_num_end])
end

View File

@@ -1,83 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using MPI, DataStructures
const FIRST_STAGE_VARS = ["Is on", "Switch on", "Switch off"]
function solution(model::JuMP.Model, method::ProgressiveHedging)::OrderedDict
comm = MPI.COMM_WORLD
mpi = MpiInfo(comm)
sp_solution = UnitCommitment.solution(model)
gather_solution = OrderedDict()
for (solution_key, dict) in sp_solution
if solution_key !== "Spinning reserve (MW)" &&
solution_key FIRST_STAGE_VARS
push!(gather_solution, solution_key => OrderedDict())
for (gen_bus_key, values) in dict
global T = length(values)
receive_values =
MPI.UBuffer(Vector{Float64}(undef, T * mpi.nprocs), T)
MPI.Gather!(float.(values), receive_values, comm)
if mpi.root
push!(
gather_solution[solution_key],
gen_bus_key => receive_values.data,
)
end
end
end
end
push!(gather_solution, "Spinning reserve (MW)" => OrderedDict())
for (reserve_type, dict) in sp_solution["Spinning reserve (MW)"]
push!(
gather_solution["Spinning reserve (MW)"],
reserve_type => OrderedDict(),
)
for (gen_key, values) in dict
receive_values =
MPI.UBuffer(Vector{Float64}(undef, T * mpi.nprocs), T)
MPI.Gather!(float.(values), receive_values, comm)
if mpi.root
push!(
gather_solution["Spinning reserve (MW)"][reserve_type],
gen_key => receive_values.data,
)
end
end
end
aggregate_solution = OrderedDict()
if mpi.root
for first_stage_var in FIRST_STAGE_VARS
aggregate_solution[first_stage_var] = OrderedDict()
for gen_key in keys(sp_solution[first_stage_var])
aggregate_solution[first_stage_var][gen_key] =
sp_solution[first_stage_var][gen_key]
end
end
for i in 1:mpi.nprocs
push!(aggregate_solution, "s$i" => OrderedDict())
for (solution_key, solution_dict) in gather_solution
push!(aggregate_solution["s$i"], solution_key => OrderedDict())
if solution_key !== "Spinning reserve (MW)"
for (gen_bus_key, values) in solution_dict
aggregate_solution["s$i"][solution_key][gen_bus_key] =
gather_solution[solution_key][gen_bus_key][(i-1)*T+1:i*T]
end
else
for (reserve_name, reserve_dict) in solution_dict
push!(
aggregate_solution["s$i"][solution_key],
reserve_name => OrderedDict(),
)
for (gen_key, values) in reserve_dict
aggregate_solution["s$i"][solution_key][reserve_name][gen_key] =
gather_solution[solution_key][reserve_name][gen_key][(i-1)*T+1:i*T]
end
end
end
end
end
end
return aggregate_solution
end

View File

@@ -1,73 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, MPI, TimerOutputs
Base.@kwdef mutable struct PHTermination
max_iterations::Int = 1000
max_time::Float64 = 14400.0
min_feasibility::Float64 = 1e-3
min_improvement::Float64 = 1e-3
min_iterations::Int = 2
end
Base.@kwdef mutable struct PHIterationInfo
global_infeas::Float64
global_obj::Float64
global_residual::Float64
iteration_number::Int
iteration_time::Float64
sp_vals::Array{Float64,1}
sp_obj::Float64
target::Array{Float64,1}
total_elapsed_time::Float64
end
Base.@kwdef mutable struct ProgressiveHedging <: SolutionMethod
initial_weights::Union{Vector{Float64},Nothing} = nothing
initial_target::Union{Vector{Float64},Nothing} = nothing
ρ::Float64 = 1.0
λ::Float64 = 0.0
print_interval::Int = 1
termination::PHTermination = PHTermination()
inner_method::SolutionMethod = XavQiuWanThi2019.Method()
end
struct SpResult
obj::Float64
vals::Array{Float64,1}
end
Base.@kwdef mutable struct PHSubProblem
mip::JuMP.Model
obj::AffExpr
consensus_vars::Array{VariableRef,1}
weights::Array{Float64,1}
end
Base.@kwdef struct PhSubProblemSolution
obj::Float64
vals::Array{Float64,1}
residuals::Array{Float64,1}
end
Base.@kwdef mutable struct PHSubProblemParams
ρ::Float64
λ::Array{Float64,1}
target::Array{Float64,1}
end
struct MpiInfo
comm::Any
rank::Int
root::Bool
nprocs::Int
function MpiInfo(comm)
rank = MPI.Comm_rank(comm) + 1
is_root = (rank == 1)
nprocs = MPI.Comm_size(comm)
return new(comm, rank, is_root, nprocs)
end
end

View File

@@ -1,259 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
"""
optimize!(
instance::UnitCommitmentInstance,
method::TimeDecomposition;
optimizer,
after_build = nothing,
after_optimize = nothing,
)::OrderedDict
Solve the given unit commitment instance with time decomposition.
The model solves each sub-problem of a given time length specified by method.time_window,
and proceeds to the next sub-problem by incrementing the time length of `method.time_increment`.
Arguments
---------
- `instance`:
the UnitCommitment instance.
- `method`:
the `TimeDecomposition` method.
- `optimizer`:
the optimizer for solving the problem.
- `after_build`:
a user-defined function that allows modifying the model after building,
must have 2 arguments `model` and `instance` in order.
- `after_optimize`:
a user-defined function that allows handling additional steps after optimizing,
must have 3 arguments `solution`, `model` and `instance` in order.
Examples
--------
```julia
using UnitCommitment, JuMP, Cbc, HiGHS
import UnitCommitment:
TimeDecomposition,
ConventionalLMP,
XavQiuWanThi2019,
Formulation
# specifying the after_build and after_optimize functions
function after_build(model, instance)
@constraint(
model,
model[:is_on]["g3", 1] + model[:is_on]["g4", 1] <= 1,
)
end
lmps = []
function after_optimize(solution, model, instance)
lmp = UnitCommitment.compute_lmp(
model,
ConventionalLMP(),
optimizer = HiGHS.Optimizer,
)
return push!(lmps, lmp)
end
# assume the instance is given as a 120h problem
instance = UnitCommitment.read("instance.json")
solution = UnitCommitment.optimize!(
instance,
TimeDecomposition(
time_window = 36, # solve 36h problems
time_increment = 24, # advance by 24h each time
inner_method = XavQiuWanThi2019.Method(),
formulation = Formulation(),
),
optimizer = Cbc.Optimizer,
after_build = after_build,
after_optimize = after_optimize,
)
"""
function optimize!(
instance::UnitCommitmentInstance,
method::TimeDecomposition;
optimizer,
after_build = nothing,
after_optimize = nothing,
)::OrderedDict
# get instance total length
T = instance.time
solution = OrderedDict()
if length(instance.scenarios) > 1
for sc in instance.scenarios
solution[sc.name] = OrderedDict()
end
end
# for each iteration, time increment by method.time_increment
for t_start in 1:method.time_increment:T
t_end = t_start + method.time_window - 1
# if t_end exceed total T
t_end = t_end > T ? T : t_end
# slice the model
@info "Solving the sub-problem of time $t_start to $t_end..."
sub_instance = UnitCommitment.slice(instance, t_start:t_end)
# build and optimize the model
sub_model = UnitCommitment.build_model(
instance = sub_instance,
optimizer = optimizer,
formulation = method.formulation,
)
if after_build !== nothing
@info "Calling after build..."
after_build(sub_model, sub_instance)
end
UnitCommitment.optimize!(sub_model, method.inner_method)
# get the result of each time period
sub_solution = UnitCommitment.solution(sub_model)
if after_optimize !== nothing
@info "Calling after optimize..."
after_optimize(sub_solution, sub_model, sub_instance)
end
# merge solution
if length(instance.scenarios) == 1
_update_solution!(solution, sub_solution, method.time_increment)
else
for sc in instance.scenarios
_update_solution!(
solution[sc.name],
sub_solution[sc.name],
method.time_increment,
)
end
end
# set the initial status for the next sub-problem
_set_initial_status!(instance, solution, method.time_increment)
end
return solution
end
"""
_set_initial_status!(
instance::UnitCommitmentInstance,
solution::OrderedDict,
time_increment::Int,
)
Set the thermal units' initial power levels and statuses based on the last bunch of time slots
specified by time_increment in the solution dictionary.
"""
function _set_initial_status!(
instance::UnitCommitmentInstance,
solution::OrderedDict,
time_increment::Int,
)
for sc in instance.scenarios
for thermal_unit in sc.thermal_units
if length(instance.scenarios) == 1
prod = solution["Thermal production (MW)"][thermal_unit.name]
is_on = solution["Is on"][thermal_unit.name]
else
prod =
solution[sc.name]["Thermal production (MW)"][thermal_unit.name]
is_on = solution[sc.name]["Is on"][thermal_unit.name]
end
thermal_unit.initial_power = prod[end]
thermal_unit.initial_status = _determine_initial_status(
thermal_unit.initial_status,
is_on[end-time_increment+1:end],
)
end
end
end
"""
_determine_initial_status(
prev_initial_status::Union{Float64,Int},
status_sequence::Vector{Float64},
)::Union{Float64,Int}
Determines a thermal unit's initial status based on its previous initial status, and
the on/off statuses in the last operation.
"""
function _determine_initial_status(
prev_initial_status::Union{Float64,Int},
status_sequence::Vector{Float64},
)::Union{Float64,Int}
# initialize the two flags
on_status = prev_initial_status
off_status = prev_initial_status
# read through the status sequence
# at each time if the unit is on, reset off_status, increment on_status
# if the on_status < 0, set it to 1.0
# at each time if the unit is off, reset on_status, decrement off_status
# if the off_status > 0, set it to -1.0
for status in status_sequence
if status == 1.0
on_status = on_status < 0.0 ? 1.0 : on_status + 1.0
off_status = 0.0
else
on_status = 0.0
off_status = off_status > 0.0 ? -1.0 : off_status - 1.0
end
end
# only one of them has non-zero value
return on_status + off_status
end
"""
_update_solution!(
solution::OrderedDict,
sub_solution::OrderedDict,
time_increment::Int,
)
Updates the solution (of each scenario) by concatenating the first bunch of
time slots of the newly generated sub-solution to the end of the final solution dictionary.
This function traverses through the dictionary keys, finds the vector and finally
does the concatenation. For now, the function is hardcoded to traverse at most 3 layers
of depth until it finds a vector object.
"""
function _update_solution!(
solution::OrderedDict,
sub_solution::OrderedDict,
time_increment::Int,
)
# the solution has at most 3 layers
for (l1_k, l1_v) in sub_solution
for (l2_k, l2_v) in l1_v
if l2_v isa Array
# slice the sub_solution
values_of_interest = l2_v[1:time_increment]
sub_solution[l1_k][l2_k] = values_of_interest
# append to the solution
if !isempty(solution)
append!(solution[l1_k][l2_k], values_of_interest)
end
elseif l2_v isa OrderedDict
for (l3_k, l3_v) in l2_v
# slice the sub_solution
values_of_interest = l3_v[1:time_increment]
sub_solution[l1_k][l2_k][l3_k] = values_of_interest
# append to the solution
if !isempty(solution)
append!(solution[l1_k][l2_k][l3_k], values_of_interest)
end
end
end
end
end
# if solution is never initialized, deep copy the sliced sub_solution
if isempty(solution)
merge!(solution, sub_solution)
end
end

View File

@@ -1,35 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import ..SolutionMethod
import ..Formulation
"""
mutable struct TimeDecomposition <: SolutionMethod
time_window::Int
time_increment::Int
inner_method::SolutionMethod = XavQiuWanThi2019.Method()
formulation::Formulation = Formulation()
end
Time decomposition method to solve a problem with moving time window.
Fields
------
- `time_window`:
the time window of each sub-problem during the entire optimization procedure.
- `time_increment`:
the time incremented to the next sub-problem.
- `inner_method`:
method to solve each sub-problem.
- `formulation`:
problem formulation.
"""
Base.@kwdef mutable struct TimeDecomposition <: SolutionMethod
time_window::Int
time_increment::Int
inner_method::SolutionMethod = XavQiuWanThi2019.Method()
formulation::Formulation = Formulation()
end

View File

@@ -2,7 +2,7 @@
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import Base.Threads: @threads, maxthreadid
import Base.Threads: @threads
function _find_violations(
model::JuMP.Model,
@@ -71,7 +71,7 @@ function _find_violations(;
B = length(sc.buses) - 1
L = length(sc.lines)
T = instance.time
K = maxthreadid()
K = nthreads()
size(net_injections) == (B, T) || error("net_injections has incorrect size")
size(isf) == (L, B) || error("isf has incorrect size")
@@ -104,7 +104,7 @@ function _find_violations(;
is_vulnerable[c.lines[1].offset] = true
end
@threads :static for t in 1:T
@threads for t in 1:T
k = threadid()
# Pre-contingency flows

View File

@@ -103,30 +103,6 @@ function solution(model::JuMP.Model)::OrderedDict
] for pu in sc.profiled_units
)
end
if !isempty(sc.storage_units)
sol[sc.name]["Storage level (MWh)"] =
timeseries(model[:storage_level], sc.storage_units, sc = sc)
sol[sc.name]["Is charging"] =
timeseries(model[:is_charging], sc.storage_units, sc = sc)
sol[sc.name]["Storage charging rates (MW)"] =
timeseries(model[:charge_rate], sc.storage_units, sc = sc)
sol[sc.name]["Storage charging cost (\$)"] = OrderedDict(
su.name => [
value(model[:charge_rate][sc.name, su.name, t]) *
su.charge_cost[t] for t in 1:instance.time
] for su in sc.storage_units
)
sol[sc.name]["Is discharging"] =
timeseries(model[:is_discharging], sc.storage_units, sc = sc)
sol[sc.name]["Storage discharging rates (MW)"] =
timeseries(model[:discharge_rate], sc.storage_units, sc = sc)
sol[sc.name]["Storage discharging cost (\$)"] = OrderedDict(
su.name => [
value(model[:discharge_rate][sc.name, su.name, t]) *
su.discharge_cost[t] for t in 1:instance.time
] for su in sc.storage_units
)
end
sol[sc.name]["Spinning reserve (MW)"] = OrderedDict(
r.name => OrderedDict(
g.name => [

View File

@@ -5,31 +5,13 @@
using JuMP
"""
generate_initial_conditions!(instance, optimizer)
generate_initial_conditions!(sc, optimizer)
Generates feasible initial conditions for the given instance, by constructing
Generates feasible initial conditions for the given scenario, by constructing
and solving a single-period mixed-integer optimization problem, using the given
optimizer. The instance is modified in-place.
optimizer. The scenario is modified in-place.
"""
function generate_initial_conditions!(
instance::UnitCommitmentInstance,
optimizer,
)::Nothing
# Process first scenario
_generate_initial_conditions!(instance.scenarios[1], optimizer)
# Copy initial conditions to remaining scenarios
for (si, sc) in enumerate(instance.scenarios)
si > 1 || continue
for (gi, g) in sc.thermal_units
g_ref = instance.scenarios[1].thermal_units[gi]
g.initial_power = g_ref.initial_power
g.initial_status = g_ref.initial_status
end
end
end
function _generate_initial_conditions!(
sc::UnitCommitmentScenario,
optimizer,
)::Nothing

View File

@@ -137,11 +137,6 @@ function _randomize_costs(
α = rand(rng, distribution)
pu.cost *= α
end
for su in sc.storage_units
α = rand(rng, distribution)
su.charge_cost *= α
su.discharge_cost *= α
end
return
end

View File

@@ -56,21 +56,6 @@ function slice(
ps.demand = ps.demand[range]
ps.revenue = ps.revenue[range]
end
for su in sc.storage_units
su.min_level = su.min_level[range]
su.max_level = su.max_level[range]
su.simultaneous_charge_and_discharge =
su.simultaneous_charge_and_discharge[range]
su.charge_cost = su.charge_cost[range]
su.discharge_cost = su.discharge_cost[range]
su.charge_efficiency = su.charge_efficiency[range]
su.discharge_efficiency = su.discharge_efficiency[range]
su.loss_factor = su.loss_factor[range]
su.min_charge_rate = su.min_charge_rate[range]
su.max_charge_rate = su.max_charge_rate[range]
su.min_discharge_rate = su.min_discharge_rate[range]
su.max_discharge_rate = su.max_discharge_rate[range]
end
end
return modified
end

View File

@@ -334,195 +334,6 @@ function _validate_units(instance::UnitCommitmentInstance, solution; tol = 0.01)
end
end
end
for su in sc.storage_units
storage_level = solution[sc.name]["Storage level (MWh)"][su.name]
charge_rate =
solution[sc.name]["Storage charging rates (MW)"][su.name]
discharge_rate =
solution[sc.name]["Storage discharging rates (MW)"][su.name]
actual_charge_cost =
solution[sc.name]["Storage charging cost (\$)"][su.name]
actual_discharge_cost =
solution[sc.name]["Storage discharging cost (\$)"][su.name]
is_charging = bin(solution[sc.name]["Is charging"][su.name])
is_discharging = bin(solution[sc.name]["Is discharging"][su.name])
# time in hours
time_step = sc.time_step / 60
for t in 1:instance.time
# Unit must store at least its minimum level
if storage_level[t] < su.min_level[t] - tol
@error @sprintf(
"Storage unit %s stores below its minimum level at time %d (%.2f < %.2f)",
su.name,
t,
storage_level[t],
su.min_level[t]
)
err_count += 1
end
# Unit must store at most its maximum level
if storage_level[t] > su.max_level[t] + tol
@error @sprintf(
"Storage unit %s stores above its maximum level at time %d (%.2f > %.2f)",
su.name,
t,
storage_level[t],
su.max_level[t]
)
err_count += 1
end
if t == instance.time
# Unit must store at least its minimum level at last time period
if storage_level[t] < su.min_ending_level - tol
@error @sprintf(
"Storage unit %s stores below its minimum ending level (%.2f < %.2f)",
su.name,
storage_level[t],
su.min_ending_level
)
err_count += 1
end
# Unit must store at most its maximum level at last time period
if storage_level[t] > su.max_ending_level + tol
@error @sprintf(
"Storage unit %s stores above its maximum ending level (%.2f > %.2f)",
su.name,
storage_level[t],
su.max_ending_level
)
err_count += 1
end
end
# Unit must follow the energy transition constraint
prev_level = t == 1 ? su.initial_level : storage_level[t-1]
current_level =
(1 - su.loss_factor[t]) * prev_level +
time_step * (
charge_rate[t] * su.charge_efficiency[t] -
discharge_rate[t] / su.discharge_efficiency[t]
)
if abs(storage_level[t] - current_level) > tol
@error @sprintf(
"Storage unit %s has unexpected level at time %d (%.2f should be %.2f)",
unit.name,
t,
storage_level[t],
current_level
)
err_count += 1
end
# Unit cannot simultaneous charge and discharge if it is not allowed
if !su.simultaneous_charge_and_discharge[t] &&
is_charging[t] &&
is_discharging[t]
@error @sprintf(
"Storage unit %s is charging and discharging simultaneous at time %d",
su.name,
t
)
err_count += 1
end
# Unit must charge at least its minimum rate
if is_charging[t] &&
(charge_rate[t] < su.min_charge_rate[t] - tol)
@error @sprintf(
"Storage unit %s charges below its minimum limit at time %d (%.2f < %.2f)",
unit.name,
t,
charge_rate[t],
su.min_charge_rate[t]
)
err_count += 1
end
# Unit must charge at most its maximum rate
if is_charging[t] &&
(charge_rate[t] > su.max_charge_rate[t] + tol)
@error @sprintf(
"Storage unit %s charges above its maximum limit at time %d (%.2f > %.2f)",
unit.name,
t,
charge_rate[t],
su.max_charge_rate[t]
)
err_count += 1
end
# Unit must have zero charge when it is not charging
if !is_charging[t] && (charge_rate[t] > tol)
@error @sprintf(
"Storage unit %s charges power at time %d while not charging (%.2f > 0)",
unit.name,
t,
charge_rate[t]
)
err_count += 1
end
# Unit must discharge at least its minimum rate
if is_discharging[t] &&
(discharge_rate[t] < su.min_discharge_rate[t] - tol)
@error @sprintf(
"Storage unit %s discharges below its minimum limit at time %d (%.2f < %.2f)",
unit.name,
t,
discharge_rate[t],
su.min_discharge_rate[t]
)
err_count += 1
end
# Unit must discharge at most its maximum rate
if is_discharging[t] &&
(discharge_rate[t] > su.max_discharge_rate[t] + tol)
@error @sprintf(
"Storage unit %s discharges above its maximum limit at time %d (%.2f > %.2f)",
unit.name,
t,
discharge_rate[t],
su.max_discharge_rate[t]
)
err_count += 1
end
# Unit must have zero discharge when it is not charging
if !is_discharging[t] && (discharge_rate[t] > tol)
@error @sprintf(
"Storage unit %s discharges power at time %d while not discharging (%.2f > 0)",
unit.name,
t,
discharge_rate[t]
)
err_count += 1
end
# Compute storage costs
charge_cost = su.charge_cost[t] * charge_rate[t]
discharge_cost = su.discharge_cost[t] * discharge_rate[t]
# Compare costs
if abs(actual_charge_cost[t] - charge_cost) > tol
@error @sprintf(
"Storage unit %s has unexpected charge cost at time %d (%.2f should be %.2f)",
unit.name,
t,
actual_charge_cost[t],
charge_cost
)
err_count += 1
end
if abs(actual_discharge_cost[t] - discharge_cost) > tol
@error @sprintf(
"Storage unit %s has unexpected discharge cost at time %d (%.2f should be %.2f)",
unit.name,
t,
actual_discharge_cost[t],
discharge_cost
)
err_count += 1
end
end
end
end
return err_count
end
@@ -535,8 +346,6 @@ function _validate_reserve_and_demand(instance, solution, tol = 0.01)
fixed_load = sum(b.load[t] for b in sc.buses)
ps_load = 0
production = 0
storage_charge = 0
storage_discharge = 0
if length(sc.price_sensitive_loads) > 0
ps_load = sum(
solution[sc.name]["Price-sensitive loads (MW)"][ps.name][t]
@@ -555,38 +364,23 @@ function _validate_reserve_and_demand(instance, solution, tol = 0.01)
for pu in sc.profiled_units
)
end
if length(sc.storage_units) > 0
storage_charge += sum(
solution[sc.name]["Storage charging rates (MW)"][su.name][t]
for su in sc.storage_units
)
storage_discharge += sum(
solution[sc.name]["Storage discharging rates (MW)"][su.name][t]
for su in sc.storage_units
)
end
if "Load curtail (MW)" in keys(solution)
load_curtail = sum(
solution[sc.name]["Load curtail (MW)"][b.name][t] for
b in sc.buses
)
end
balance =
fixed_load - load_curtail - production +
ps_load +
storage_charge - storage_discharge
balance = fixed_load - load_curtail - production + ps_load
# Verify that production equals demand
if abs(balance) > tol
@error @sprintf(
"Non-zero power balance at time %d (%.2f + %.2f - %.2f - %.2f + %.2f - %.2f != 0)",
"Non-zero power balance at time %d (%.2f + %.2f - %.2f - %.2f != 0)",
t,
fixed_load,
ps_load,
load_curtail,
production,
storage_charge,
storage_discharge,
)
err_count += 1
end

View File

@@ -13,7 +13,6 @@ JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MPI = "da04e1cc-30fd-572f-bb4f-1f8673147195"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -12,17 +12,12 @@ include("model/formulations_test.jl")
include("solution/methods/XavQiuWanThi19/filter_test.jl")
include("solution/methods/XavQiuWanThi19/find_test.jl")
include("solution/methods/XavQiuWanThi19/sensitivity_test.jl")
include("solution/methods/ProgressiveHedging/usage_test.jl")
include("solution/methods/TimeDecomposition/initial_status_test.jl")
include("solution/methods/TimeDecomposition/optimize_test.jl")
include("solution/methods/TimeDecomposition/update_solution_test.jl")
include("transform/initcond_test.jl")
include("transform/slice_test.jl")
include("transform/randomize/XavQiuAhm2021_test.jl")
include("validation/repair_test.jl")
include("lmp/conventional_test.jl")
include("lmp/aelmp_test.jl")
include("market/market_test.jl")
basedir = dirname(@__FILE__)
@@ -42,18 +37,12 @@ function runtests()
solution_methods_XavQiuWanThi19_filter_test()
solution_methods_XavQiuWanThi19_find_test()
solution_methods_XavQiuWanThi19_sensitivity_test()
solution_methods_ProgressiveHedging_usage_test()
solution_methods_TimeDecomposition_initial_status_test()
solution_methods_TimeDecomposition_optimize_test()
solution_methods_TimeDecomposition_update_solution_test()
transform_initcond_test()
transform_slice_test()
# transform_randomize_XavQiuAhm2021_test()
transform_randomize_XavQiuAhm2021_test()
validation_repair_test()
lmp_conventional_test()
lmp_aelmp_test()
simple_market_test()
stochastic_market_test()
end
return
end
@@ -61,7 +50,6 @@ end
function format()
JuliaFormatter.format(basedir, verbose = true)
JuliaFormatter.format("$basedir/../../src", verbose = true)
JuliaFormatter.format("$basedir/../../docs/src", verbose = true)
return
end

View File

@@ -21,11 +21,11 @@ function instance_read_test()
@test length(sc.contingencies) == 19
@test length(sc.price_sensitive_loads) == 1
@test instance.time == 4
@test sc.time_step == 60
@test sc.lines[5].name == "l5"
@test sc.lines[5].source.name == "b2"
@test sc.lines[5].target.name == "b5"
@test sc.lines[5].reactance 0.17388
@test sc.lines[5].susceptance 10.037550333
@test sc.lines[5].normal_flow_limit == [1e8 for t in 1:4]
@test sc.lines[5].emergency_flow_limit == [1e8 for t in 1:4]
@@ -35,6 +35,7 @@ function instance_read_test()
@test sc.lines[1].name == "l1"
@test sc.lines[1].source.name == "b1"
@test sc.lines[1].target.name == "b2"
@test sc.lines[1].reactance 0.059170
@test sc.lines[1].susceptance 29.496860773945
@test sc.lines[1].normal_flow_limit == [300.0 for t in 1:4]
@test sc.lines[1].emergency_flow_limit == [400.0 for t in 1:4]
@@ -137,20 +138,20 @@ function instance_read_test()
sc = instance.scenarios[1]
@test length(sc.profiled_units) == 2
pu1 = sc.profiled_units[1]
@test pu1.name == "g7"
@test pu1.bus.name == "b4"
@test pu1.cost == [100.0 for t in 1:4]
@test pu1.min_power == [60.0 for t in 1:4]
@test pu1.max_power == [100.0 for t in 1:4]
first_pu = sc.profiled_units[1]
@test first_pu.name == "g7"
@test first_pu.bus.name == "b4"
@test first_pu.cost == [100.0 for t in 1:4]
@test first_pu.min_power == [60.0 for t in 1:4]
@test first_pu.max_power == [100.0 for t in 1:4]
@test sc.profiled_units_by_name["g7"].name == "g7"
pu2 = sc.profiled_units[2]
@test pu2.name == "g8"
@test pu2.bus.name == "b5"
@test pu2.cost == [50.0 for t in 1:4]
@test pu2.min_power == [0.0 for t in 1:4]
@test pu2.max_power == [120.0 for t in 1:4]
second_pu = sc.profiled_units[2]
@test second_pu.name == "g8"
@test second_pu.bus.name == "b5"
@test second_pu.cost == [50.0 for t in 1:4]
@test second_pu.min_power == [0.0 for t in 1:4]
@test second_pu.max_power == [120.0 for t in 1:4]
@test sc.profiled_units_by_name["g8"].name == "g8"
end
@@ -164,64 +165,4 @@ function instance_read_test()
@test sc.thermal_units[6].commitment_status ==
[false, nothing, true, nothing]
end
@testset "read_benchmark storage" begin
instance = UnitCommitment.read(fixture("case14-storage.json.gz"))
sc = instance.scenarios[1]
@test length(sc.storage_units) == 4
su1 = sc.storage_units[1]
@test su1.name == "su1"
@test su1.bus.name == "b2"
@test su1.min_level == [0.0 for t in 1:4]
@test su1.max_level == [100.0 for t in 1:4]
@test su1.simultaneous_charge_and_discharge == [true for t in 1:4]
@test su1.charge_cost == [2.0 for t in 1:4]
@test su1.discharge_cost == [2.5 for t in 1:4]
@test su1.charge_efficiency == [1.0 for t in 1:4]
@test su1.discharge_efficiency == [1.0 for t in 1:4]
@test su1.loss_factor == [0.0 for t in 1:4]
@test su1.min_charge_rate == [0.0 for t in 1:4]
@test su1.max_charge_rate == [10.0 for t in 1:4]
@test su1.min_discharge_rate == [0.0 for t in 1:4]
@test su1.max_discharge_rate == [8.0 for t in 1:4]
@test su1.initial_level == 0.0
@test su1.min_ending_level == 0.0
@test su1.max_ending_level == 100.0
@test sc.storage_units_by_name["su1"].name == "su1"
su2 = sc.storage_units[2]
@test su2.name == "su2"
@test su2.bus.name == "b2"
@test su2.min_level == [10.0 for t in 1:4]
@test su2.simultaneous_charge_and_discharge == [false for t in 1:4]
@test su2.charge_cost == [3.0 for t in 1:4]
@test su2.discharge_cost == [3.5 for t in 1:4]
@test su2.charge_efficiency == [0.8 for t in 1:4]
@test su2.discharge_efficiency == [0.85 for t in 1:4]
@test su2.loss_factor == [0.01 for t in 1:4]
@test su2.min_charge_rate == [5.0 for t in 1:4]
@test su2.min_discharge_rate == [2.0 for t in 1:4]
@test su2.initial_level == 70.0
@test su2.min_ending_level == 80.0
@test su2.max_ending_level == 85.0
@test sc.storage_units_by_name["su2"].name == "su2"
su3 = sc.storage_units[3]
@test su3.bus.name == "b9"
@test su3.min_level == [10.0, 11.0, 12.0, 13.0]
@test su3.max_level == [100.0, 110.0, 120.0, 130.0]
@test su3.charge_cost == [2.0, 2.1, 2.2, 2.3]
@test su3.discharge_cost == [1.0, 1.1, 1.2, 1.3]
@test su3.charge_efficiency == [0.8, 0.81, 0.82, 0.82]
@test su3.discharge_efficiency == [0.85, 0.86, 0.87, 0.88]
@test su3.min_charge_rate == [5.0, 5.1, 5.2, 5.3]
@test su3.max_charge_rate == [10.0, 10.1, 10.2, 10.3]
@test su3.min_discharge_rate == [4.0, 4.1, 4.2, 4.3]
@test su3.max_discharge_rate == [8.0, 8.1, 8.2, 8.3]
su4 = sc.storage_units[4]
@test su4.simultaneous_charge_and_discharge ==
[false, false, true, true]
end
end

View File

@@ -12,10 +12,7 @@ function lmp_aelmp_test()
instance = UnitCommitment.read(path)
model = UnitCommitment.build_model(
instance = instance,
optimizer = optimizer_with_attributes(
Cbc.Optimizer,
"logLevel" => 0,
),
optimizer = Cbc.Optimizer,
variable_names = true,
)
JuMP.set_silent(model)
@@ -25,10 +22,7 @@ function lmp_aelmp_test()
aelmp_1 = UnitCommitment.compute_lmp(
model,
AELMP(),
optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"log_to_console" => false,
),
optimizer = HiGHS.Optimizer,
)
@test aelmp_1["s1", "B1", 1] 231.7 atol = 0.1
@@ -39,10 +33,7 @@ function lmp_aelmp_test()
allow_offline_participation = false,
consider_startup_costs = true,
),
optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"log_to_console" => false,
),
optimizer = HiGHS.Optimizer,
)
@test aelmp_2["s1", "B1", 1] 274.3 atol = 0.1
end

View File

@@ -3,12 +3,13 @@
# Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, Cbc, HiGHS, JuMP
import UnitCommitment: ConventionalLMP
function solve_conventional_testcase(path::String)
instance = UnitCommitment.read(path)
model = UnitCommitment.build_model(
instance = instance,
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0),
optimizer = Cbc.Optimizer,
variable_names = true,
)
JuMP.set_silent(model)
@@ -16,10 +17,7 @@ function solve_conventional_testcase(path::String)
lmp = UnitCommitment.compute_lmp(
model,
ConventionalLMP(),
optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"log_to_console" => false,
),
optimizer = HiGHS.Optimizer,
)
return lmp
end

View File

@@ -1,145 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, Cbc, HiGHS, JuMP
import UnitCommitment: MarketSettings
function simple_market_test()
@testset "da-to-rt simple market" begin
da_path = fixture("market_da_simple.json.gz")
rt_paths = [
fixture("market_rt1_simple.json.gz"),
fixture("market_rt2_simple.json.gz"),
fixture("market_rt3_simple.json.gz"),
fixture("market_rt4_simple.json.gz"),
]
# solve market with default setting
solution = UnitCommitment.solve_market(
da_path,
rt_paths,
settings = MarketSettings(), # keep everything default
optimizer = optimizer_with_attributes(
Cbc.Optimizer,
"logLevel" => 0,
),
lp_optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"log_to_console" => false,
),
)
# the commitment status must agree with DA market
da_solution = solution["DA"]
@test da_solution["Is on"]["GenY"] == [0.0, 1.0]
@test da_solution["LMP (\$/MW)"][("s1", "B1", 1)] == 50.0
@test da_solution["LMP (\$/MW)"][("s1", "B1", 2)] == 56.0
rt_solution = solution["RT"]
@test length(rt_solution) == 4
@test rt_solution[1]["Is on"]["GenY"] == [0.0, 0.0]
@test rt_solution[2]["Is on"]["GenY"] == [0.0, 1.0]
@test rt_solution[3]["Is on"]["GenY"] == [1.0, 1.0]
@test rt_solution[4]["Is on"]["GenY"] == [1.0]
@test length(rt_solution[1]["LMP (\$/MW)"]) == 2
@test length(rt_solution[2]["LMP (\$/MW)"]) == 2
@test length(rt_solution[3]["LMP (\$/MW)"]) == 2
@test length(rt_solution[4]["LMP (\$/MW)"]) == 1
# solve market with no lmp method
solution_no_lmp = UnitCommitment.solve_market(
da_path,
rt_paths,
settings = MarketSettings(lmp_method = nothing), # no lmp
optimizer = optimizer_with_attributes(
Cbc.Optimizer,
"logLevel" => 0,
),
)
# the commitment status must agree with DA market
da_solution = solution_no_lmp["DA"]
@test haskey(da_solution, "LMP (\$/MW)") == false
rt_solution = solution_no_lmp["RT"][1]
@test haskey(rt_solution, "LMP (\$/MW)") == false
end
end
function stochastic_market_test()
@testset "da-to-rt stochastic market" begin
da_path = [
fixture("market_da_simple.json.gz"),
fixture("market_da_scenario.json.gz"),
]
rt_paths = [
fixture("market_rt1_simple.json.gz"),
fixture("market_rt2_simple.json.gz"),
fixture("market_rt3_simple.json.gz"),
fixture("market_rt4_simple.json.gz"),
]
# after build and after optimize
function after_build(model, instance)
@constraint(model, model[:is_on]["GenY", 1] == 1,)
end
lmps_da = []
lmps_rt = []
function after_optimize_da(solution, model, instance)
lmp = UnitCommitment.compute_lmp(
model,
ConventionalLMP(),
optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"log_to_console" => false,
),
)
return push!(lmps_da, lmp)
end
function after_optimize_rt(solution, model, instance)
lmp = UnitCommitment.compute_lmp(
model,
ConventionalLMP(),
optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"log_to_console" => false,
),
)
return push!(lmps_rt, lmp)
end
# solve the stochastic market with callbacks
solution = UnitCommitment.solve_market(
da_path,
rt_paths,
settings = MarketSettings(), # keep everything default
optimizer = optimizer_with_attributes(
Cbc.Optimizer,
"logLevel" => 0,
),
lp_optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"log_to_console" => false,
),
after_build_da = after_build,
after_optimize_da = after_optimize_da,
after_optimize_rt = after_optimize_rt,
)
# the commitment status must agree with DA market
da_solution_sp = solution["DA"]["market_da_simple"]
da_solution_sc = solution["DA"]["market_da_scenario"]
@test da_solution_sc["Is on"]["GenY"] == [1.0, 1.0]
@test da_solution_sp["LMP (\$/MW)"][("market_da_simple", "B1", 1)] ==
25.0
@test da_solution_sc["LMP (\$/MW)"][("market_da_scenario", "B1", 2)] ==
0.0
rt_solution = solution["RT"]
@test rt_solution[1]["Is on"]["GenY"] == [1.0, 1.0]
@test rt_solution[2]["Is on"]["GenY"] == [1.0, 1.0]
@test rt_solution[3]["Is on"]["GenY"] == [1.0, 1.0]
@test rt_solution[4]["Is on"]["GenY"] == [1.0]
@test length(lmps_rt) == 4
end
end

View File

@@ -1,40 +0,0 @@
using HiGHS
using MPI
using JuMP
using UnitCommitment
UnitCommitment._setup_logger(level = Base.CoreLogging.Error)
function fixture(path::String)::String
basedir = dirname(@__FILE__)
return "$basedir/../../../../fixtures/$path"
end
# Initialize MPI
MPI.Init()
# Configure progressive hedging method
ph = UnitCommitment.ProgressiveHedging()
# Read problem instance
instance = UnitCommitment.read(
[fixture("case14.json.gz"), fixture("case14.json.gz")],
ph,
)
# Build JuMP model
model = UnitCommitment.build_model(
instance = instance,
optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
MOI.Silent() => true,
),
)
# Run the decentralized optimization algorithm
UnitCommitment.optimize!(model, ph)
# Fetch the solution
solution = UnitCommitment.solution(model, ph)
# Close MPI
MPI.Finalize()

View File

@@ -1,16 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using MPI
function solution_methods_ProgressiveHedging_usage_test()
basedir = dirname(@__FILE__)
@testset "ProgressiveHedging" begin
mpiexec() do exe
return run(
`$exe -n 2 $(Base.julia_cmd()) --project=test $basedir/ph.jl`,
)
end
end
end

View File

@@ -1,159 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, DataStructures
function solution_methods_TimeDecomposition_initial_status_test()
@testset "determine_initial_status" begin
hot_start = 100
cold_start = -100
# all on throughout
stat_seq = ones(36)
# hot start
new_stat = UnitCommitment._determine_initial_status(hot_start, stat_seq)
@test new_stat == 136
# cold start
new_stat =
UnitCommitment._determine_initial_status(cold_start, stat_seq)
@test new_stat == 36
# off in the last 12 periods
stat_seq = ones(36)
stat_seq[25:end] .= 0
# hot start
new_stat = UnitCommitment._determine_initial_status(hot_start, stat_seq)
@test new_stat == -12
# cold start
new_stat =
UnitCommitment._determine_initial_status(cold_start, stat_seq)
@test new_stat == -12
# off in one period
stat_seq = ones(36)
stat_seq[10] = 0
# hot start
new_stat = UnitCommitment._determine_initial_status(hot_start, stat_seq)
@test new_stat == 26
# cold start
new_stat =
UnitCommitment._determine_initial_status(cold_start, stat_seq)
@test new_stat == 26
# off in several of the first 24 periods
stat_seq = ones(36)
stat_seq[[10, 11, 20]] .= 0
# hot start
new_stat = UnitCommitment._determine_initial_status(hot_start, stat_seq)
@test new_stat == 16
# cold start
new_stat =
UnitCommitment._determine_initial_status(cold_start, stat_seq)
@test new_stat == 16
# all off throughout
stat_seq = zeros(36)
# hot start
new_stat = UnitCommitment._determine_initial_status(hot_start, stat_seq)
@test new_stat == -36
# cold start
new_stat =
UnitCommitment._determine_initial_status(cold_start, stat_seq)
@test new_stat == -136
# on in the last 12 periods
stat_seq = zeros(36)
stat_seq[25:end] .= 1
# hot start
new_stat = UnitCommitment._determine_initial_status(hot_start, stat_seq)
@test new_stat == 12
# cold start
new_stat =
UnitCommitment._determine_initial_status(cold_start, stat_seq)
@test new_stat == 12
end
@testset "set_initial_status" begin
# read one scenario
instance = UnitCommitment.read(fixture("case14.json.gz"))
psuedo_solution = OrderedDict(
"Thermal production (MW)" => OrderedDict(
"g1" => [0.0, 112.0, 114.0, 116.0],
"g2" => [0.0, 102.0, 0.0, 0.0],
"g3" => [0.0, 0.0, 0.0, 0.0],
"g4" => [0.0, 34.0, 66.0, 99.0],
"g5" => [0.0, 34.0, 66.0, 99.0],
"g6" => [0.0, 100.0, 100.0, 100.0],
),
"Is on" => OrderedDict(
"g1" => [0.0, 1.0, 1.0, 1.0],
"g2" => [0.0, 1.0, 0.0, 0.0],
"g3" => [0.0, 0.0, 0.0, 0.0],
"g4" => [0.0, 1.0, 1.0, 1.0],
"g5" => [0.0, 1.0, 1.0, 1.0],
"g6" => [0.0, 1.0, 1.0, 1.0],
),
)
UnitCommitment._set_initial_status!(instance, psuedo_solution, 3)
thermal_units = instance.scenarios[1].thermal_units
@test thermal_units[1].initial_power == 116.0
@test thermal_units[1].initial_status == 3.0
@test thermal_units[2].initial_power == 0.0
@test thermal_units[2].initial_status == -2.0
@test thermal_units[3].initial_power == 0.0
@test thermal_units[3].initial_status == -9.0
# read multiple scenarios
instance = UnitCommitment.read([
fixture("case14.json.gz"),
fixture("case14-profiled.json.gz"),
])
psuedo_solution = OrderedDict(
"case14" => OrderedDict(
"Thermal production (MW)" => OrderedDict(
"g1" => [0.0, 112.0, 114.0, 116.0],
"g2" => [0.0, 102.0, 0.0, 0.0],
"g3" => [0.0, 0.0, 0.0, 0.0],
"g4" => [0.0, 34.0, 66.0, 99.0],
"g5" => [0.0, 34.0, 66.0, 99.0],
"g6" => [0.0, 100.0, 100.0, 100.0],
),
"Is on" => OrderedDict(
"g1" => [0.0, 1.0, 1.0, 1.0],
"g2" => [0.0, 1.0, 0.0, 0.0],
"g3" => [0.0, 0.0, 0.0, 0.0],
"g4" => [0.0, 1.0, 1.0, 1.0],
"g5" => [0.0, 1.0, 1.0, 1.0],
"g6" => [0.0, 1.0, 1.0, 1.0],
),
),
"case14-profiled" => OrderedDict(
"Thermal production (MW)" => OrderedDict(
"g1" => [0.0, 113.0, 116.0, 115.0],
"g2" => [0.0, 0.0, 0.0, 0.0],
"g3" => [0.0, 0.0, 0.0, 20.0],
"g4" => [0.0, 34.0, 66.0, 98.0],
"g5" => [0.0, 34.0, 66.0, 97.0],
"g6" => [0.0, 100.0, 100.0, 100.0],
),
"Is on" => OrderedDict(
"g1" => [0.0, 1.0, 1.0, 1.0],
"g2" => [0.0, 0.0, 0.0, 0.0],
"g3" => [0.0, 0.0, 0.0, 1.0],
"g4" => [0.0, 1.0, 1.0, 1.0],
"g5" => [0.0, 1.0, 1.0, 1.0],
"g6" => [0.0, 1.0, 1.0, 1.0],
),
),
)
UnitCommitment._set_initial_status!(instance, psuedo_solution, 3)
thermal_units_sc2 = instance.scenarios[2].thermal_units
@test thermal_units_sc2[1].initial_power == 115.0
@test thermal_units_sc2[1].initial_status == 3.0
@test thermal_units_sc2[2].initial_power == 0.0
@test thermal_units_sc2[2].initial_status == -11.0
@test thermal_units_sc2[3].initial_power == 20.0
@test thermal_units_sc2[3].initial_status == 1.0
end
end

View File

@@ -1,88 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, DataStructures, Cbc, HiGHS
import UnitCommitment: TimeDecomposition, ConventionalLMP
function solution_methods_TimeDecomposition_optimize_test()
@testset "optimize_time_decomposition" begin
# read one scenario
instance = UnitCommitment.read(fixture("case14.json.gz"))
solution = UnitCommitment.optimize!(
instance,
TimeDecomposition(time_window = 3, time_increment = 2),
optimizer = optimizer_with_attributes(
Cbc.Optimizer,
"logLevel" => 0,
),
)
@test length(solution["Thermal production (MW)"]["g1"]) == 4
@test length(solution["Is on"]["g2"]) == 4
@test length(solution["Spinning reserve (MW)"]["r1"]["g2"]) == 4
# read one scenario with after_build and after_optimize
function after_build(model, instance)
@constraint(
model,
model[:is_on]["g3", 1] + model[:is_on]["g4", 1] <= 1,
)
end
lmps = []
function after_optimize(solution, model, instance)
lmp = UnitCommitment.compute_lmp(
model,
ConventionalLMP(),
optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"log_to_console" => false,
),
)
return push!(lmps, lmp)
end
instance = UnitCommitment.read(fixture("case14-profiled.json.gz"))
solution = UnitCommitment.optimize!(
instance,
TimeDecomposition(time_window = 3, time_increment = 2),
optimizer = optimizer_with_attributes(
Cbc.Optimizer,
"logLevel" => 0,
),
after_build = after_build,
after_optimize = after_optimize,
)
@test length(lmps) == 2
@test lmps[1]["s1", "b1", 1] == 50.0
@test lmps[2]["s1", "b10", 2] 38.04 atol = 0.1
@test solution["Is on"]["g3"][1] == 1.0
@test solution["Is on"]["g4"][1] == 0.0
# read multiple scenarios
instance = UnitCommitment.read([
fixture("case14.json.gz"),
fixture("case14-profiled.json.gz"),
])
solution = UnitCommitment.optimize!(
instance,
TimeDecomposition(time_window = 3, time_increment = 2),
optimizer = optimizer_with_attributes(
Cbc.Optimizer,
"logLevel" => 0,
),
)
@test length(solution["case14"]["Thermal production (MW)"]["g3"]) == 4
@test length(solution["case14"]["Is on"]["g4"]) == 4
@test length(
solution["case14-profiled"]["Thermal production (MW)"]["g5"],
) == 4
@test length(solution["case14-profiled"]["Is on"]["g6"]) == 4
@test length(
solution["case14-profiled"]["Profiled production (MW)"]["g7"],
) == 4
@test length(
solution["case14-profiled"]["Spinning reserve (MW)"]["r1"]["g3"],
) == 4
end
end

View File

@@ -1,55 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, DataStructures
function solution_methods_TimeDecomposition_update_solution_test()
@testset "update_solution" begin
psuedo_solution = OrderedDict()
time_increment = 4
psuedo_sub_solution = OrderedDict(
"Thermal production (MW)" => OrderedDict(
"g1" => [100.0, 200.0, 300.0, 400.0, 500.0, 600.0],
),
"Is on" => OrderedDict("g1" => [1.0, 0.0, 1.0, 1.0, 0.0, 1.0]),
"Profiled production (MW)" => OrderedDict(
"g1" => [199.0, 299.0, 399.0, 499.0, 599.0, 699.0],
),
"Spinning reserve (MW)" => OrderedDict(
"r1" => OrderedDict(
"g1" => [31.0, 32.0, 33.0, 34.0, 35.0, 36.0],
),
),
)
# first update should directly copy the first 4 entries of sub solution
UnitCommitment._update_solution!(
psuedo_solution,
psuedo_sub_solution,
time_increment,
)
@test psuedo_solution["Thermal production (MW)"]["g1"] ==
[100.0, 200.0, 300.0, 400.0]
@test psuedo_solution["Is on"]["g1"] == [1.0, 0.0, 1.0, 1.0]
@test psuedo_solution["Profiled production (MW)"]["g1"] ==
[199.0, 299.0, 399.0, 499.0]
@test psuedo_solution["Spinning reserve (MW)"]["r1"]["g1"] ==
[31.0, 32.0, 33.0, 34.0]
# second update should append the first 4 entries of sub solution
UnitCommitment._update_solution!(
psuedo_solution,
psuedo_sub_solution,
time_increment,
)
@test psuedo_solution["Thermal production (MW)"]["g1"] ==
[100.0, 200.0, 300.0, 400.0, 100.0, 200.0, 300.0, 400.0]
@test psuedo_solution["Is on"]["g1"] ==
[1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0]
@test psuedo_solution["Profiled production (MW)"]["g1"] ==
[199.0, 299.0, 399.0, 499.0, 199.0, 299.0, 399.0, 499.0]
@test psuedo_solution["Spinning reserve (MW)"]["r1"]["g1"] ==
[31.0, 32.0, 33.0, 34.0, 31.0, 32.0, 33.0, 34.0]
end
end

View File

@@ -2,32 +2,27 @@
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, HiGHS, JuMP
using UnitCommitment, Cbc, JuMP
function transform_initcond_test()
@testset "generate_initial_conditions!" begin
# Load instance
instance = UnitCommitment.read(fixture("case118-initcond.json.gz"))
optimizer =
optimizer_with_attributes(HiGHS.Optimizer, MOI.Silent() => true)
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
sc = instance.scenarios[1]
# All units should have unknown initial conditions
for sc in instance.scenarios
for g in sc.thermal_units
@test g.initial_power === nothing
@test g.initial_status === nothing
end
for g in sc.thermal_units
@test g.initial_power === nothing
@test g.initial_status === nothing
end
# Generate initial conditions
UnitCommitment.generate_initial_conditions!(instance, optimizer)
UnitCommitment.generate_initial_conditions!(sc, optimizer)
# All units should now have known initial conditions
for sc in instance.scenarios
for g in sc.thermal_units
@test g.initial_power !== nothing
@test g.initial_status !== nothing
end
for g in sc.thermal_units
@test g.initial_power !== nothing
@test g.initial_status !== nothing
end
# TODO: Check that initial conditions are feasible

View File

@@ -102,28 +102,5 @@ function transform_randomize_XavQiuAhm2021_test()
test_approx(pu1.cost[1], 98.039)
test_approx(pu2.cost[1], 48.385)
end
@testset "storage unit cost" begin
sc = UnitCommitment.read(
fixture("case14-storage.json.gz"),
).scenarios[1]
# Check original costs
su1 = sc.storage_units[1]
su3 = sc.storage_units[3]
test_approx(su1.charge_cost[4], 2.0)
test_approx(su1.discharge_cost[1], 2.5)
test_approx(su3.charge_cost[2], 2.1)
test_approx(su3.discharge_cost[3], 1.2)
randomize!(
sc,
XavQiuAhm2021.Randomization(randomize_load_profile = false),
rng = MersenneTwister(42),
)
# Check randomized costs
test_approx(su1.charge_cost[4], 1.961)
test_approx(su1.discharge_cost[1], 2.451)
test_approx(su3.charge_cost[2], 2.196)
test_approx(su3.discharge_cost[3], 1.255)
end
end
end

View File

@@ -65,34 +65,4 @@ function transform_slice_test()
variable_names = true,
)
end
@testset "slice storage units" begin
instance = UnitCommitment.read(fixture("case14-storage.json.gz"))
modified = UnitCommitment.slice(instance, 2:4)
sc = modified.scenarios[1]
# Should update all time-dependent fields
for su in sc.storage_units
@test length(su.min_level) == 3
@test length(su.max_level) == 3
@test length(su.simultaneous_charge_and_discharge) == 3
@test length(su.charge_cost) == 3
@test length(su.discharge_cost) == 3
@test length(su.charge_efficiency) == 3
@test length(su.discharge_efficiency) == 3
@test length(su.loss_factor) == 3
@test length(su.min_charge_rate) == 3
@test length(su.max_charge_rate) == 3
@test length(su.min_discharge_rate) == 3
@test length(su.max_discharge_rate) == 3
end
# Should be able to build model without errors
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
model = UnitCommitment.build_model(
instance = modified,
optimizer = optimizer,
variable_names = true,
)
end
end

View File

@@ -1,2 +0,0 @@
TODO.md
jobs

View File

@@ -1,20 +0,0 @@
# Use official Julia image as base
FROM julia:1.11
WORKDIR /app
# Install project & dependencies
COPY Project.toml /app/Backend/
COPY src /app/Backend/src
RUN julia --project=. -e 'using Pkg; Pkg.develop(path="Backend"); Pkg.add("HiGHS"); Pkg.add("JuMP"); Pkg.precompile()'
COPY docker/startup.jl ./
# Set timezone to Chicago
ENV TZ=America/Chicago
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Set default environment variables
ENV UCJL_HOST="0.0.0.0"
ENV UCJL_PORT="9000"
# Run the server
CMD ["julia", "--threads", "1", "--procs", "1", "--project=.", "startup.jl"]

View File

@@ -1,15 +0,0 @@
docker-build:
docker build . -t ucjl-backend
docker-run:
docker stop ucjl-backend
docker rm ucjl-backend
docker run \
--restart always \
--detach \
--network custom \
--name ucjl-backend \
--volume ucjl_data:/app/Backend/jobs \
--memory 16g \
--cpus 4 \
ucjl-backend

View File

@@ -1,25 +0,0 @@
name = "Backend"
uuid = "948642ed-e3f9-4642-9296-0f1eaf40c938"
version = "0.1.0"
authors = ["Alinson S. Xavier <git@axavier.org>"]
[deps]
CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
UnitCommitment = "64606440-39ea-11e9-0f29-3303a1d3d877"
[compat]
CodecZlib = "0.7.8"
Dates = "1.11.0"
Distributed = "1.11.0"
HTTP = "1.10.19"
JSON = "0.21.4"
Logging = "1.11.0"
Printf = "1.11.0"
Random = "1.11.0"

View File

@@ -1,32 +0,0 @@
#!/usr/bin/env julia
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
# Load required packages
using HiGHS
using JuMP
using Backend
const UCJL_HOST = get(ENV, "HOST", "0.0.0.0")
const UCJL_PORT = parse(Int, get(ENV, "PORT", "9000"))
println("Starting UnitCommitment Backend Server...")
println("Host: $UCJL_HOST")
println("Port: $UCJL_PORT")
println("Press Ctrl+C to stop the server")
Backend.setup_logger()
server = Backend.start_server(UCJL_HOST, UCJL_PORT; optimizer = optimizer_with_attributes(HiGHS.Optimizer, "mip_rel_gap" => 0.001))
try
wait()
catch e
if e isa InterruptException
println("\nShutting down server...")
Backend.stop(server)
println("Server stopped")
else
rethrow(e)
end
end

View File

@@ -1,13 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
module Backend
basedir = joinpath(dirname(@__FILE__), "..")
include("jobs.jl")
include("server.jl")
include("log.jl")
end

View File

@@ -1,82 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Distributed
import Base: put!
Base.@kwdef mutable struct JobProcessor
pending = RemoteChannel(() -> Channel{String}(Inf))
processing = RemoteChannel(() -> Channel{String}(Inf))
shutdown = RemoteChannel(() -> Channel{Bool}(1))
worker_pid = nothing
monitor_task = nothing
work_fn = nothing
end
function Base.put!(processor::JobProcessor, job_id::String)
return put!(processor.pending, job_id)
end
function isbusy(processor::JobProcessor)
return isready(processor.pending) || isready(processor.processing)
end
function worker_loop(pending, processing, shutdown, work_fn)
@info "Starting worker loop"
while true
# Check for shutdown signal
if isready(shutdown)
@info "Shutdown signal received"
break
end
# Wait for a job with timeout
if !isready(pending)
sleep(0.1)
continue
end
# Move job from pending to processing queue
job_id = take!(pending)
put!(processing, job_id)
@info "Job started: $job_id"
# Run work function
try
work_fn(job_id)
catch e
@error "Job failed: job $job_id"
end
# Remove job from processing queue
take!(processing)
@info "Job finished: $job_id"
end
end
function start(processor::JobProcessor)
processor.monitor_task = @spawn begin
worker_loop(
processor.pending,
processor.processing,
processor.shutdown,
processor.work_fn,
)
end
return
end
function stop(processor::JobProcessor)
put!(processor.shutdown, true)
if processor.monitor_task !== nothing
try
wait(processor.monitor_task)
catch e
@warn "Error waiting for worker task" exception=e
end
end
return
end
export JobProcessor, start, stop, put!, isbusy

View File

@@ -1,36 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import Logging: min_enabled_level, shouldlog, handle_message
using Base.CoreLogging, Logging, Dates
struct TimeLogger <: AbstractLogger end
min_enabled_level(::TimeLogger) = CoreLogging.Info
shouldlog(logger::TimeLogger, level, _module, group, id) = true
function handle_message(
logger::TimeLogger,
level,
message,
_module,
group,
id,
filepath,
line;
kwargs...,
)
current_time = Dates.format(now(), "yyyy-mm-dd HH:MM:SS.sss")
print("[$current_time] ")
println(message)
flush(stdout)
flush(stderr)
return Base.Libc.flush_cstdio()
end
function setup_logger()
global_logger(TimeLogger())
@spawn global_logger(TimeLogger())
return
end

View File

@@ -1,147 +0,0 @@
using HTTP
using Random
using JSON
using CodecZlib
using UnitCommitment
struct ServerHandle
server::HTTP.Server
processor::JobProcessor
end
RESPONSE_HEADERS = [
"Access-Control-Allow-Origin" => "*",
"Access-Control-Allow-Methods" => "GET, POST, OPTIONS",
"Access-Control-Allow-Headers" => "Content-Type",
]
function submit(req, processor::JobProcessor)
# Check if request body is empty
compressed_body = HTTP.payload(req)
if isempty(compressed_body)
return HTTP.Response(400, RESPONSE_HEADERS, "Error: No file provided")
end
# Validate compressed JSON by decompressing and parsing
try
decompressed_data = transcode(GzipDecompressor, compressed_body)
JSON.parse(String(decompressed_data))
catch e
return HTTP.Response(
400,
RESPONSE_HEADERS,
"Error: Invalid compressed JSON",
)
end
# Generate random job ID (lowercase letters and numbers)
job_id = randstring(['a':'z'; '0':'9'], 16)
# Create job directory
job_dir = joinpath(basedir, "jobs", job_id)
mkpath(job_dir)
# Save input file
json_path = joinpath(job_dir, "input.json.gz")
write(json_path, compressed_body)
# Add job to queue
put!(processor, job_id)
# Return job ID as JSON
response_body = JSON.json(Dict("job_id" => job_id))
return HTTP.Response(200, RESPONSE_HEADERS, response_body)
end
function jobs_view(req)
# Extract job_id from URL path /api/jobs/{job_id}/view
path_parts = split(req.target, '/')
job_id = path_parts[4]
# Construct job directory path
job_dir = joinpath(basedir, "jobs", job_id)
# Check if job directory exists
if !isdir(job_dir)
return HTTP.Response(404, RESPONSE_HEADERS, "Job not found")
end
# Read log file if it exists
log_path = joinpath(job_dir, "output.log")
log_content = isfile(log_path) ? read(log_path, String) : nothing
# Read output.json if it exists
output_path = joinpath(job_dir, "output.json")
output_content = isfile(output_path) ? read(output_path, String) : nothing
# Create response JSON
response_data = Dict("log" => log_content, "solution" => output_content)
response_body = JSON.json(response_data)
return HTTP.Response(200, RESPONSE_HEADERS, response_body)
end
function start_server(host, port; optimizer)
Random.seed!()
function work_fn(job_id)
job_dir = joinpath(basedir, "jobs", job_id)
mkpath(job_dir)
input_filename = joinpath(job_dir, "input.json.gz")
log_filename = joinpath(job_dir, "output.log")
solution_filename = joinpath(job_dir, "output.json")
try
open(log_filename, "w") do io
redirect_stdout(io) do
redirect_stderr(io) do
instance = UnitCommitment.read(input_filename)
model = UnitCommitment.build_model(;
instance,
optimizer = optimizer,
)
UnitCommitment.optimize!(model, UnitCommitment.XavQiuWanThi2019.Method(time_limit=900.0))
solution = UnitCommitment.solution(model)
UnitCommitment.write(solution_filename, solution)
return
end
end
end
catch e
open(log_filename, "a") do io
println(io, "\nError: ", e)
println(io, "\nStacktrace:")
return Base.show_backtrace(io, catch_backtrace())
end
end
return
end
# Create and start job processor
processor = JobProcessor(; work_fn)
start(processor)
router = HTTP.Router()
# Register CORS preflight endpoint
HTTP.register!(
router,
"OPTIONS",
"/**",
req -> HTTP.Response(200, RESPONSE_HEADERS, ""),
)
# Register /submit endpoint
HTTP.register!(router, "POST", "/api/submit", req -> submit(req, processor))
# Register job/*/view endpoint
HTTP.register!(router, "GET", "/api/jobs/*/view", jobs_view)
server = HTTP.serve!(router, host, port; verbose = false)
return ServerHandle(server, processor)
end
function stop(handle::ServerHandle)
stop(handle.processor)
close(handle.server)
return nothing
end

View File

@@ -1,23 +0,0 @@
name = "BackendT"
uuid = "27da795e-16fd-43bd-a2ba-f77bdecaf977"
version = "0.1.0"
authors = ["Alinson S. Xavier <git@axavier.org>"]
[deps]
Backend = "948642ed-e3f9-4642-9296-0f1eaf40c938"
CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193"
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3"
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[compat]
CodecZlib = "0.7.8"
Distributed = "1.11.0"
HTTP = "1.10.19"
HiGHS = "1.20.1"
JSON = "0.21.4"
JuliaFormatter = "2.2.0"
Test = "1.11.0"

View File

@@ -1,43 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
module BackendT
using Distributed
using Test
using HTTP
using JSON
using CodecZlib
import Backend
import JuliaFormatter
using HiGHS
BASEDIR = dirname(@__FILE__)
include("jobs_test.jl")
include("server_test.jl")
function fixture(path::String)::String
return "$BASEDIR/../fixtures/$path"
end
function runtests()
Backend.setup_logger()
@testset "UCJL Backend" begin
server_test_usage()
jobs_test_usage()
end
return
end
function format()
JuliaFormatter.format(BASEDIR, verbose = true)
JuliaFormatter.format("$BASEDIR/../../src", verbose = true)
return
end
export runtests, format
end

View File

@@ -1,43 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Backend
using Test
function jobs_test_usage()
@testset "JobProcessor" begin
# Create a temporary directory for test output
test_dir = mktempdir()
# Define dummy work function that writes to a file
# Note: This function will be executed on a worker process
function work_fn(job_id)
output_file = joinpath(test_dir, job_id * ".txt")
write(output_file, job_id)
return
end
# Create processor with work function
processor = JobProcessor(; work_fn)
# Start the worker
start(processor)
# Push job to queue
put!(processor, "test")
# Wait for job to complete
# Increased timeout to account for worker process startup
sleep(2)
stop(processor)
# Check that the work function was called with correct job_id
output_file = joinpath(test_dir, "test.txt")
@test isfile(output_file)
@test read(output_file, String) == "test"
# Clean up
rm(test_dir; recursive = true)
end
end

View File

@@ -1,61 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
const HOST = "127.0.0.1"
const PORT = 32617
function server_test_usage()
server = Backend.start_server(HOST, PORT; optimizer = HiGHS.Optimizer)
try
# Read the compressed fixture file
compressed_data = read(fixture("case14.json.gz"))
# Submit test case
response = HTTP.post(
"http://$HOST:$PORT/api/submit",
["Content-Type" => "application/gzip"],
compressed_data,
)
@test response.status == 200
# Check response
response_data = JSON.parse(String(response.body))
@test haskey(response_data, "job_id")
job_id = response_data["job_id"]
@test length(job_id) == 16
# Wait for jobs to finish
sleep(5)
while isbusy(server.processor)
sleep(0.1)
end
# Verify the compressed file was saved correctly
job_dir = joinpath(Backend.basedir, "jobs", job_id)
saved_input_path = joinpath(job_dir, "input.json.gz")
saved_log_path = joinpath(job_dir, "output.log")
saved_output_path = joinpath(job_dir, "output.json")
@test isfile(saved_input_path)
@test isfile(saved_log_path)
@test isfile(saved_output_path)
saved_data = read(saved_input_path)
@test saved_data == compressed_data
# Query job information
view_response = HTTP.get("http://$HOST:$PORT/api/jobs/$job_id/view")
@test view_response.status == 200
# Check response
view_data = JSON.parse(String(view_response.body))
@test haskey(view_data, "log")
@test haskey(view_data, "solution")
@test view_data["log"] !== nothing
@test view_data["solution"] !== nothing
# Clean up
rm(job_dir, recursive = true)
finally
stop(server)
end
end

View File

@@ -1,7 +0,0 @@
node_modules
npm-debug.log
Dockerfile
.dockerignore
.git
.gitignore
.env

View File

@@ -1,2 +0,0 @@
FAST_REFRESH=false
REACT_APP_BACKEND_URL=http://localhost:9000/api

View File

@@ -1,25 +0,0 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# production
/build
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*
assets

View File

@@ -1 +0,0 @@
{}

View File

@@ -1,18 +0,0 @@
# Build Stage
FROM node:18-alpine AS build
WORKDIR /app
COPY package*.json ./
RUN npm install
COPY . .
ARG REACT_APP_BACKEND_URL
ENV REACT_APP_BACKEND_URL=$REACT_APP_BACKEND_URL
RUN npm run build
# Production Stage
FROM node:18-alpine AS production
WORKDIR /app
COPY --from=build /app/build ./build
COPY server.js ./
RUN npm install --production express
EXPOSE 3000
CMD ["node", "server.js"]

View File

@@ -1,14 +0,0 @@
docker-build:
docker build . \
--build-arg REACT_APP_BACKEND_URL=https://ucjl.axavier.org/api \
-t ucjl-frontend
docker-run:
docker stop ucjl-frontend
docker rm ucjl-frontend
docker run \
--detach \
--network custom \
--restart always \
--name ucjl-frontend \
ucjl-frontend

File diff suppressed because it is too large Load Diff

View File

@@ -1,66 +0,0 @@
{
"name": "web",
"version": "0.1.0",
"private": true,
"dependencies": {
"@fortawesome/fontawesome-svg-core": "^6.7.2",
"@fortawesome/free-regular-svg-icons": "^6.7.2",
"@fortawesome/free-solid-svg-icons": "^6.7.2",
"@fortawesome/react-fontawesome": "^0.2.2",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^13.5.0",
"@types/jest": "^27.5.2",
"@types/node": "^16.18.126",
"@types/pako": "^2.0.3",
"@types/papaparse": "^5.3.16",
"@types/react": "^19.1.3",
"@types/react-dom": "^19.1.3",
"ajv": "^8.17.1",
"eslint": "^8.57.1",
"pako": "^2.1.0",
"papaparse": "^5.5.2",
"react": "^19.1.0",
"react-dom": "^19.1.0",
"react-router": "^7.9.5",
"react-scripts": "^5.0.1",
"tabulator-tables": "^6.3.1",
"typescript": "^4.9.5",
"web-vitals": "^2.1.4"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
],
"rules": {
"semi": [
"error",
"always"
]
}
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
},
"devDependencies": {
"@types/tabulator-tables": "^6.2.6",
"prettier": "3.5.3"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -1,43 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<meta name="description" content="UnitCommitment.jl Case Builder" />
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<title>Case Builder - UnitCommitment.jl</title>
<style>
:root {
--site-max-width: 1500px;
--site-min-width: 900px;
--box-border: 1px solid rgba(0, 0, 0, 0.2);
--box-shadow: 0px 2px 4px -3px rgba(0, 0, 0, 0.2);
--border-radius: 4px;
--primary: #0097A7;
--contrast-100: #202020;
--contrast-80: #606060;
--contrast-60: #909090;
--contrast-20: #d6d6d6;
--contrast-10: #f6f6f6;
--contrast-0: #fefefe;
}
body {
margin: 0;
padding: 0;
font-family: Arial, sans-serif;
background-color: #333;
}
.content {
background-color: var(--contrast-10);
padding-bottom: 36px;
}
</style>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
</body>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.4 KiB

View File

@@ -1,25 +0,0 @@
{
"short_name": "React App",
"name": "Create React App Sample",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View File

@@ -1,3 +0,0 @@
# https://www.robotstxt.org/robotstxt.html
User-agent: *
Disallow:

View File

@@ -1,26 +0,0 @@
const express = require('express');
const path = require('path');
const app = express();
const PORT = process.env.PORT || 3000;
// Serve static files from the build directory
app.use(express.static(path.join(__dirname, 'build')));
// Handle client-side routing - serve index.html for all routes
app.get('/*splat', (req, res) => {
res.sendFile(path.join(__dirname, 'build', 'index.html'));
});
const server = app.listen(PORT, () => {
console.log(`Server is running on port ${PORT}`);
});
// Graceful shutdown on CTRL+C
process.on('SIGINT', () => {
console.log('\nShutting down gracefully...');
server.close(() => {
console.log('Server closed');
process.exit(0);
});
});

Some files were not shown because too many files have changed in this diff Show More