Compare commits

..

35 Commits

Author SHA1 Message Date
4f0f57c29e Update CHANGELOG.md 2021-05-28 11:05:31 -05:00
e594a68492 Update CHANGELOG.md 2021-05-28 10:56:45 -05:00
b16c0f0133 Remove benchmark/Manifest.toml 2021-05-28 10:48:54 -05:00
4188c42d3d Remove benchmark/Manifest.toml 2021-05-27 22:27:19 -05:00
a684419f33 Reformat Python scripts 2021-05-27 22:26:38 -05:00
3687d42733 Fix validation when no price-sensitive loads are included 2021-05-27 22:14:49 -05:00
bd0d377c95 Update Makefile 2021-05-27 21:42:54 -05:00
9224cd2efb Format source code with JuliaFormatter; set up GH Actions 2021-05-27 21:37:38 -05:00
fb9221b8fb Properly validate solutions with price-sensitive loads 2021-05-27 21:14:37 -05:00
7eb1019410 Rename internal methods to _something; reformat code 2021-05-27 20:45:15 -05:00
11514b5de8 Rename fix!(instance) to repair! 2021-05-27 18:05:42 -05:00
3bd8428322 Make logs more colorful 2021-05-27 18:01:32 -05:00
99975db5cd Implement UnitCommitment.write 2021-05-27 18:01:05 -05:00
e2660f50f2 Update docs 2021-05-27 17:47:26 -05:00
d20c41704d Update docs 2021-05-27 17:20:00 -05:00
24871a7f8a Update docs 2021-05-27 17:04:03 -05:00
6adf12535e Add formulation section 2021-05-27 13:59:15 -05:00
117c8932e9 GitHub Actions: Fix tests; remove unused workflows 2021-05-27 12:09:42 -05:00
844c9377d8 Update test.yml 2021-05-27 11:47:48 -05:00
14a42188dd test.yml: Drop Julia 1.3 2021-05-27 11:46:51 -05:00
e9144ef9b2 Update test.yml 2021-05-27 11:44:17 -05:00
607bbeb75c Make build_model return a plain JuMP model 2021-05-27 11:30:49 -05:00
5c81be4660 Migrate docs from mkdocs to sphinx 2021-05-27 11:11:02 -05:00
3da6f7e08b Makefile: Bump version 2021-05-27 11:11:02 -05:00
c38c5be05d Merge pull request #10 from mtanneau/ArrayType
Fix Array type instability
2021-04-11 10:57:43 -05:00
mtanneau
a37e7cd9b1 Fix Array type instability 2021-04-10 11:24:59 -04:00
5f74992cf6 Update CHANGELOG; bump version number 2021-03-09 11:07:59 -06:00
4947bff460 Implement sub-hourly commitment 2021-03-09 11:07:59 -06:00
5f0400fd93 Update dependencies 2021-03-09 11:07:59 -06:00
274fd6dfa1 Docs: Add "Time step (min)", rename "Time (h)" to "Time horizon (h)" 2021-03-09 11:07:59 -06:00
1cc4e312fb Update README.md 2020-12-30 09:17:31 -06:00
0282b27ed3 Create config.yml 2020-12-30 08:31:37 -06:00
612fdf0f80 Update issue templates 2020-12-30 08:27:57 -06:00
9b8bf9e9b2 Update README.md 2020-12-05 15:34:55 -06:00
Feng
98a19747ce Update README.md
updated acknowledgements
2020-11-28 09:13:01 -06:00
59 changed files with 2188 additions and 5532 deletions

5
.JuliaFormatter.toml Normal file
View File

@@ -0,0 +1,5 @@
always_for_in = true
always_use_return = true
margin = 80
remove_extra_newlines = true
short_to_long_function_def = true

25
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,25 @@
---
name: Bug report
about: Something is broken in the package
title: ''
labels: ''
assignees: ''
---
## Description
A clear and concise description of what the bug is.
## Steps to Reproduce
Please describe how can the developers reproduce the problem in their own computers. Code snippets and sample input files are specially helpful. For example:
1. Install the package
2. Run the code below with the attached input file...
3. The following error appears...
## System Information
- Operating System: [e.g. Ubuntu 20.04]
- Julia version: [e.g. 1.4]
- Package version: [e.g. 0.0.1]

8
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: Feature Request
url: https://github.com/ANL-CEEESA/UnitCommitment.jl/discussions/categories/feature-requests
about: Submit ideas for new features and small enhancements
- name: Help & FAQ
url: https://github.com/ANL-CEEESA/UnitCommitment.jl/discussions/categories/help-faq
about: Ask questions about the package and get help from the community

View File

@@ -1,28 +0,0 @@
name: Benchmark
on: push
jobs:
benchmark:
runs-on: [self-hosted, benchmark]
if: "contains(github.event.head_commit.message, '[benchmark]')"
timeout-minutes: 10080
steps:
- uses: actions/checkout@v1
- name: Benchmark
run: |
julia --project=@. -e 'using Pkg; Pkg.instantiate()'
make build/sysimage.so
make -C benchmark clean
make -C benchmark -kj4
make -C benchmark tables
make -C benchmark clean-mps clean-sol
- name: Upload logs
uses: actions/upload-artifact@v2
with:
name: Logs
path: benchmark/results/*
- name: Upload tables & charts
uses: actions/upload-artifact@v2
with:
name: Tables
path: benchmark/tables/*

28
.github/workflows/lint.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
name: lint
on:
push:
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@latest
with:
version: '1'
- uses: actions/checkout@v1
- name: Format check
shell: julia --color=yes {0}
run: |
using Pkg
Pkg.add(PackageSpec(name="JuliaFormatter", version="0.14.4"))
using JuliaFormatter
format("src", verbose=true)
format("test", verbose=true)
format("benchmark", verbose=true)
out = String(read(Cmd(`git diff`)))
if isempty(out)
exit(0)
end
@error "Some files have not been formatted !!!"
write(stdout, out)
exit(1)

View File

@@ -1,19 +1,15 @@
name: Tests
on:
push:
paths:
- '**.jl'
- '**.toml'
pull_request:
paths:
- '**.jl'
- '**.toml'
schedule:
- cron: '45 10 * * *'
jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
julia-version: ['1.3', '1.4', '1']
julia-version: ['1.3', '1.4', '1.5', '1.6']
julia-arch: [x64, x86]
os: [ubuntu-latest, windows-latest, macOS-latest]
exclude:

3
.gitignore vendored
View File

@@ -14,3 +14,6 @@ instances/_source
local
notebooks
TODO.md
docs/_build
.vscode
Manifest.toml

View File

@@ -1,11 +1,49 @@
# UnitCommitment.jl
# Changelog
### Version 0.1.1 (Nov 16, 2020)
All notable changes to this project will be documented in this file.
* Fixes to MATLAB and PGLIB-UC instances
* Add OR-LIB and Tejada19 instances
* Improve documentation
- The format is based on [Keep a Changelog][changelog].
- This project adheres to [Semantic Versioning][semver].
- For versions before 1.0, we follow [the Pkg.jl convention][pkjjl]
that `0.a.b` is compatible with `0.a.c`.
### Version 0.1.0 (Nov 6, 2020)
[changelog]: https://keepachangelog.com/en/1.0.0/
[semver]: https://semver.org/spec/v2.0.0.html
[pkjjl]: https://pkgdocs.julialang.org/v1/compatibility/#compat-pre-1.0
* Initial public release
## [0.2.0] - 2021-05-28
### Added
- Add sub-hourly unit commitment.
- Add `UnitCommitment.write(filename, solution)`.
- Add mathematical formulation to the documentation.
### Changed
- Rename "Time (h)" parameter to "Time horizon (h)".
- Rename `UnitCommitment.get_solution` to `UnitCommitment.solution`, for better
consistency with JuMP style.
- Add an underscore to the name of all functions that do not appear in the
documentation (e.g. `something` becomes `_something`) These functions are not
part of the public API and may change without notice, even in PATCH releases.
- The function `UnitCommitment.build_model` now returns a plain JuMP model. The
struct `UnitCommitmentModel` has been completely removed. Accessing model
elements can now be accomplished as follows:
- `model.vars.x[idx]` becomes `model[:x][idx]`
- `model.eqs.y[idx]` becomes `model[:eq_y][idx]`
- `model.expr.z[idx]` becomes `model[:expr_z][idx]`
- `model.obj` becomes `model[:obj]`
- `model.isf` becomes `model[:isf]`
- `model.lodf` becomes `model[:lodf]`
### Fixed
- Properly validate solutions with price-sensitive loads.
## [0.1.1] - 2020-11-16
### Added
- Add OR-LIB and Tejada19 instances.
- Improve documentation.
## Fixed
- Fixes to MATLAB and PGLIB-UC instances.
## [0.1.0] - 2020-11-06
- Initial public release.

View File

@@ -3,8 +3,7 @@
# Released under the modified BSD license. See COPYING.md for more details.
JULIA := julia --color=yes --project=@.
MKDOCS := ~/.local/bin/mkdocs
VERSION := 0.1
VERSION := 0.2
build/sysimage.so: src/sysimage.jl Project.toml Manifest.toml
mkdir -p build
@@ -16,14 +15,18 @@ clean:
rm -rf build/*
docs:
$(MKDOCS) build -d ../docs/$(VERSION)/
rm ../docs/$(VERSION)/*.ipynb
install-deps-docs:
pip install --user mkdocs mkdocs-cinder python-markdown-math
cd docs; make clean; make dirhtml
rsync -avP --delete-after docs/_build/dirhtml/ ../docs/$(VERSION)/
test: build/sysimage.so
@echo Running tests...
$(JULIA) --sysimage build/sysimage.so -e 'using Pkg; Pkg.test("UnitCommitment")' | tee build/test.log
.PHONY: docs test
format:
julia -e 'using JuliaFormatter; format("src"); format("test"); format("benchmark")'
install-deps:
julia -e 'using Pkg; Pkg.add(PackageSpec(name="JuliaFormatter", version="0.14.4"))'
.PHONY: docs test format install-deps

View File

@@ -1,367 +0,0 @@
# This file is machine-generated - editing it directly is not advised
[[Artifacts]]
deps = ["Pkg"]
git-tree-sha1 = "c30985d8821e0cd73870b17b0ed0ce6dc44cb744"
uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
version = "1.3.0"
[[Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[BenchmarkTools]]
deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"]
git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260"
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
version = "0.5.0"
[[BinaryProvider]]
deps = ["Libdl", "Logging", "SHA"]
git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058"
uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
version = "0.5.10"
[[Bzip2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "c3598e525718abcc440f69cc6d5f60dda0a1b61e"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.6+5"
[[CEnum]]
git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.4.1"
[[Calculus]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "f641eb0a4f00c343bbc32346e1217b86f3ce9dad"
uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
version = "0.5.1"
[[Cbc]]
deps = ["BinaryProvider", "CEnum", "Cbc_jll", "Libdl", "MathOptInterface", "SparseArrays"]
git-tree-sha1 = "929d0500c50387e7ac7ae9956ca7d7ce5312c90d"
uuid = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
version = "0.7.1"
[[Cbc_jll]]
deps = ["Cgl_jll", "Clp_jll", "CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "16b8ffa56b3ded6b201aa2f50623f260448aa205"
uuid = "38041ee0-ae04-5750-a4d2-bb4d0d83d27d"
version = "2.10.3+4"
[[Cgl_jll]]
deps = ["Clp_jll", "CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "32be20ec1e4c40e5c5d1bbf949ba9918a92a7569"
uuid = "3830e938-1dd0-5f3e-8b8e-b3ee43226782"
version = "0.60.2+5"
[[Clp_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "79263d9383ca89b35f31c33ab5b880536a8413a4"
uuid = "06985876-5285-5a41-9fcb-8948a742cc53"
version = "1.17.6+6"
[[CodecBzip2]]
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.7.2"
[[CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.0"
[[CoinUtils_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "ee1f06ab89337b7f194c29377ab174e752cdf60d"
uuid = "be027038-0da8-5614-b30d-e42594cb92df"
version = "2.11.3+3"
[[CommonSubexpressions]]
deps = ["MacroTools", "Test"]
git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7"
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
version = "0.3.0"
[[Compat]]
deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"]
git-tree-sha1 = "a706ff10f1cd8dab94f59fd09c0e657db8e77ff0"
uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
version = "3.23.0"
[[CompilerSupportLibraries_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "8e695f735fca77e9708e795eda62afdb869cbb70"
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "0.3.4+0"
[[DataStructures]]
deps = ["Compat", "InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "fb0aa371da91c1ff9dc7fbed6122d3e411420b9c"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.18.8"
[[Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[DelimitedFiles]]
deps = ["Mmap"]
uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
[[DiffResults]]
deps = ["StaticArrays"]
git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc"
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
version = "1.0.2"
[[DiffRules]]
deps = ["NaNMath", "Random", "SpecialFunctions"]
git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1"
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
version = "1.0.1"
[[Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[DocStringExtensions]]
deps = ["LibGit2", "Markdown", "Pkg", "Test"]
git-tree-sha1 = "50ddf44c53698f5e784bbebb3f4b21c5807401b1"
uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
version = "0.8.3"
[[Documenter]]
deps = ["Base64", "Dates", "DocStringExtensions", "IOCapture", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"]
git-tree-sha1 = "71e35e069daa9969b8af06cef595a1add76e0a11"
uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
version = "0.25.3"
[[ForwardDiff]]
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"]
git-tree-sha1 = "1d090099fb82223abc48f7ce176d3f7696ede36d"
uuid = "f6369f11-7733-5829-9624-2563aa707210"
version = "0.10.12"
[[GZip]]
deps = ["Libdl"]
git-tree-sha1 = "039be665faf0b8ae36e089cd694233f5dee3f7d6"
uuid = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
version = "0.5.1"
[[HTTP]]
deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"]
git-tree-sha1 = "c7ec02c4c6a039a98a15f955462cd7aea5df4508"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "0.8.19"
[[IOCapture]]
deps = ["Logging"]
git-tree-sha1 = "377252859f740c217b936cebcd918a44f9b53b59"
uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89"
version = "0.1.1"
[[IniFile]]
deps = ["Test"]
git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8"
uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f"
version = "0.5.0"
[[InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[JLLWrappers]]
git-tree-sha1 = "c70593677bbf2c3ccab4f7500d0f4dacfff7b75c"
uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
version = "1.1.3"
[[JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.1"
[[JSONSchema]]
deps = ["HTTP", "JSON", "ZipFile"]
git-tree-sha1 = "a9ecdbc90be216912a2e3e8a8e38dc4c93f0d065"
uuid = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
version = "0.3.2"
[[JuMP]]
deps = ["Calculus", "DataStructures", "ForwardDiff", "JSON", "LinearAlgebra", "MathOptInterface", "MutableArithmetics", "NaNMath", "Random", "SparseArrays", "Statistics"]
git-tree-sha1 = "57c17a221a55f81890aabf00f478886859e25eaf"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "0.21.5"
[[LibGit2]]
deps = ["Printf"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[LinearAlgebra]]
deps = ["Libdl"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "6a8a2a625ab0dea913aba95c11370589e0239ff0"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.6"
[[Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[MathOptInterface]]
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "JSON", "JSONSchema", "LinearAlgebra", "MutableArithmetics", "OrderedCollections", "SparseArrays", "Test", "Unicode"]
git-tree-sha1 = "4fd15565d1811be771e87a877f1e691a005d2b90"
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
version = "0.9.18"
[[MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"]
git-tree-sha1 = "1c38e51c3d08ef2278062ebceade0e46cefc96fe"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.0.3"
[[MbedTLS_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "0eef589dd1c26a3ac9d753fe1a8bcad63f956fa6"
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.16.8+1"
[[Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "c14dea152799bd0376024e3c3c1c3a6cb06764c7"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "0.2.11"
[[NaNMath]]
git-tree-sha1 = "c84c576296d0e2fbb3fc134d3e09086b3ea617cd"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "0.3.4"
[[OpenBLAS32_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "19c33675cdeb572c1b17f96c492459d4f4958036"
uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2"
version = "0.3.10+0"
[[OpenSpecFun_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "9db77584158d0ab52307f8c04f8e7c08ca76b5b3"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.3+4"
[[OrderedCollections]]
git-tree-sha1 = "cf59cfed2e2c12e8a2ff0a4f1e9b2cd8650da6db"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.3.2"
[[Osi_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "bd436a97280df40938e66ae8d18e57aceb072856"
uuid = "7da25872-d9ce-5375-a4d3-7a845f58efdd"
version = "0.108.5+3"
[[PackageCompiler]]
deps = ["Libdl", "Pkg", "UUIDs"]
git-tree-sha1 = "3eee77c94646163f15bd8626acf494360897f890"
uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
version = "1.2.3"
[[Parsers]]
deps = ["Dates"]
git-tree-sha1 = "6fa4202675c05ba0f8268a6ddf07606350eda3ce"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "1.0.11"
[[Pkg]]
deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
[[Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[Random]]
deps = ["Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
[[Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[SharedArrays]]
deps = ["Distributed", "Mmap", "Random", "Serialization"]
uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
[[Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[SparseArrays]]
deps = ["LinearAlgebra", "Random"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[SpecialFunctions]]
deps = ["OpenSpecFun_jll"]
git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "0.10.3"
[[StaticArrays]]
deps = ["LinearAlgebra", "Random", "Statistics"]
git-tree-sha1 = "da4cf579416c81994afd6322365d00916c79b8ae"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "0.12.5"
[[Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
[[Test]]
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[TranscodingStreams]]
deps = ["Random", "Test"]
git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.9.5"
[[UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[ZipFile]]
deps = ["Libdl", "Printf", "Zlib_jll"]
git-tree-sha1 = "c3a5637e27e914a7a445b8d0ad063d701931e9f7"
uuid = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
version = "0.9.3"
[[Zlib_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "320228915c8debb12cb434c59057290f0834dbf6"
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.11+18"

View File

@@ -2,7 +2,7 @@ name = "UnitCommitment"
uuid = "64606440-39ea-11e9-0f29-3303a1d3d877"
authors = ["Santos Xavier, Alinson <axavier@anl.gov>"]
repo = "https://github.com/ANL-CEEESA/UnitCommitment.jl"
version = "0.1.1"
version = "0.2.0"
[deps]
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
@@ -12,7 +12,6 @@ JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
PackageCompiler = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"

View File

@@ -1,9 +1,18 @@
<a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/actions?query=workflow%3ATest+branch%3Adev"><img src="https://github.com/iSoron/UnitCommitment.jl/workflows/Tests/badge.svg"></img></a>
<a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/actions?query=workflow%3ABenchmark+branch%3Adev+is%3Asuccess"><img src="https://github.com/iSoron/UnitCommitment.jl/workflows/Benchmark/badge.svg"></img></a>
<a href="https://doi.org/10.5281/zenodo.4269874"><img src="https://zenodo.org/badge/doi/10.5281/zenodo.4269874.svg" alt="DOI"></a>
# UnitCommitment.jl
<h1 align="center">UnitCommitment.jl</h1>
<p align="center">
<a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/actions?query=workflow%3ATest+branch%3Adev">
<img src="https://github.com/iSoron/UnitCommitment.jl/workflows/Tests/badge.svg"></img>
</a>
<a href="https://doi.org/10.5281/zenodo.4269874">
<img src="https://zenodo.org/badge/doi/10.5281/zenodo.4269874.svg" alt="DOI"></img>
</a>
<a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/releases/">
<img src="https://img.shields.io/github/v/release/ANL-CEEESA/UnitCommitment.jl?include_prereleases&label=pre-release">
</a>
<a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/discussions">
<img src="https://img.shields.io/badge/GitHub-Discussions-%23fc4ebc" />
</a>
</p>
**UnitCommitment.jl** (UC.jl) is an optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and JuMP implementations of state-of-the-art mixed-integer programming formulations.
@@ -28,7 +37,7 @@
* We would like to thank **Aleksandr M. Kazachkov** (University of Florida), **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package.
* Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357.
* Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357, and the U.S. Department of Energy **Advanced Grid Modeling Program** under Grant DE-OE0000875
### Citing

View File

@@ -6,9 +6,6 @@ SHELL := /bin/bash
JULIA := julia --project=. --sysimage ../build/sysimage.so
TIMESTAMP := $(shell date "+%Y-%m-%d %H:%M")
SRC_FILES := $(wildcard ../src/*.jl)
DEST := .
FORMULATION := tight
results_dir := results_$(FORMULATION)
INSTANCES_PGLIB := \
pglib-uc/ca/2014-09-01_reserves_0 \
@@ -41,206 +38,6 @@ INSTANCES_MATPOWER := \
matpower/case6468rte/2017-08-01 \
matpower/case6515rte/2017-08-01
INSTANCES_INFORMS1 := \
matpower/case1888rte/2017-01-01 \
matpower/case1888rte/2017-01-02 \
matpower/case1888rte/2017-01-03 \
matpower/case1888rte/2017-01-04 \
matpower/case1888rte/2017-01-05 \
matpower/case1888rte/2017-01-06 \
matpower/case1888rte/2017-01-07 \
matpower/case1888rte/2017-01-08 \
matpower/case1888rte/2017-01-09 \
matpower/case1888rte/2017-01-10 \
matpower/case1888rte/2017-01-11 \
matpower/case1888rte/2017-01-12 \
matpower/case1888rte/2017-01-13 \
matpower/case1888rte/2017-01-14 \
matpower/case1888rte/2017-01-15 \
matpower/case1888rte/2017-01-16 \
matpower/case1888rte/2017-01-17 \
matpower/case1888rte/2017-01-18 \
matpower/case1888rte/2017-01-19 \
matpower/case1888rte/2017-01-20 \
matpower/case1888rte/2017-01-21 \
matpower/case1888rte/2017-01-22 \
matpower/case1888rte/2017-01-23 \
matpower/case1888rte/2017-01-24 \
matpower/case1888rte/2017-01-25 \
matpower/case1888rte/2017-01-26 \
matpower/case1888rte/2017-01-27 \
matpower/case1888rte/2017-01-28 \
matpower/case1888rte/2017-01-29 \
matpower/case1888rte/2017-01-30 \
matpower/case1888rte/2017-01-31 \
matpower/case1888rte/2017-02-01 \
matpower/case1888rte/2017-02-02 \
matpower/case1888rte/2017-02-03 \
matpower/case1888rte/2017-02-04 \
matpower/case1888rte/2017-02-05 \
matpower/case1888rte/2017-02-06 \
matpower/case1888rte/2017-02-07 \
matpower/case1888rte/2017-02-08 \
matpower/case1888rte/2017-02-09 \
matpower/case1888rte/2017-02-10 \
matpower/case1888rte/2017-02-11 \
matpower/case1888rte/2017-02-12 \
matpower/case1888rte/2017-02-13 \
matpower/case1888rte/2017-02-14 \
matpower/case1888rte/2017-02-15 \
matpower/case1888rte/2017-02-16 \
matpower/case1888rte/2017-02-17 \
matpower/case1888rte/2017-02-18 \
matpower/case1888rte/2017-02-19 \
matpower/case1888rte/2017-02-20 \
matpower/case1888rte/2017-02-21 \
matpower/case1888rte/2017-02-22 \
matpower/case1888rte/2017-02-23 \
matpower/case1888rte/2017-02-24 \
matpower/case1888rte/2017-02-25 \
matpower/case1888rte/2017-02-26 \
matpower/case1888rte/2017-02-27 \
matpower/case1888rte/2017-02-28 \
matpower/case1888rte/2017-03-01
INSTANCES_INFORMS2 := \
matpower/case3375wp/2017-01-01 \
matpower/case3375wp/2017-01-02 \
matpower/case3375wp/2017-01-03 \
matpower/case3375wp/2017-01-04 \
matpower/case3375wp/2017-01-05 \
matpower/case3375wp/2017-01-06 \
matpower/case3375wp/2017-01-07 \
matpower/case3375wp/2017-01-08 \
matpower/case3375wp/2017-01-09 \
matpower/case3375wp/2017-01-10 \
matpower/case3375wp/2017-01-11 \
matpower/case3375wp/2017-01-12 \
matpower/case3375wp/2017-01-13 \
matpower/case3375wp/2017-01-14 \
matpower/case3375wp/2017-01-15 \
matpower/case3375wp/2017-01-16 \
matpower/case3375wp/2017-01-17 \
matpower/case3375wp/2017-01-18 \
matpower/case3375wp/2017-01-19 \
matpower/case3375wp/2017-01-20 \
matpower/case3375wp/2017-01-21 \
matpower/case3375wp/2017-01-22 \
matpower/case3375wp/2017-01-23 \
matpower/case3375wp/2017-01-24 \
matpower/case3375wp/2017-01-25 \
matpower/case3375wp/2017-01-26 \
matpower/case3375wp/2017-01-27 \
matpower/case3375wp/2017-01-28 \
matpower/case3375wp/2017-01-29 \
matpower/case3375wp/2017-01-30 \
matpower/case3375wp/2017-01-31 \
matpower/case3375wp/2017-02-01 \
matpower/case3375wp/2017-02-02 \
matpower/case3375wp/2017-02-03 \
matpower/case3375wp/2017-02-04 \
matpower/case3375wp/2017-02-05 \
matpower/case3375wp/2017-02-06 \
matpower/case3375wp/2017-02-07 \
matpower/case3375wp/2017-02-08 \
matpower/case3375wp/2017-02-09 \
matpower/case3375wp/2017-02-10 \
matpower/case3375wp/2017-02-11 \
matpower/case3375wp/2017-02-12 \
matpower/case3375wp/2017-02-13 \
matpower/case3375wp/2017-02-14 \
matpower/case3375wp/2017-02-15 \
matpower/case3375wp/2017-02-16 \
matpower/case3375wp/2017-02-17 \
matpower/case3375wp/2017-02-18 \
matpower/case3375wp/2017-02-19 \
matpower/case3375wp/2017-02-20 \
matpower/case3375wp/2017-02-21 \
matpower/case3375wp/2017-02-22 \
matpower/case3375wp/2017-02-23 \
matpower/case3375wp/2017-02-24 \
matpower/case3375wp/2017-02-25 \
matpower/case3375wp/2017-02-26 \
matpower/case3375wp/2017-02-27 \
matpower/case3375wp/2017-02-28 \
matpower/case3375wp/2017-03-01
INSTANCES_INFORMS3 := \
matpower/case6468rte/2017-01-01 \
matpower/case6468rte/2017-01-02 \
matpower/case6468rte/2017-01-03 \
matpower/case6468rte/2017-01-04 \
matpower/case6468rte/2017-01-05 \
matpower/case6468rte/2017-01-06 \
matpower/case6468rte/2017-01-07 \
matpower/case6468rte/2017-01-08 \
matpower/case6468rte/2017-01-09 \
matpower/case6468rte/2017-01-10 \
matpower/case6468rte/2017-01-11 \
matpower/case6468rte/2017-01-12 \
matpower/case6468rte/2017-01-13 \
matpower/case6468rte/2017-01-14 \
matpower/case6468rte/2017-01-15 \
matpower/case6468rte/2017-01-16 \
matpower/case6468rte/2017-01-17 \
matpower/case6468rte/2017-01-18 \
matpower/case6468rte/2017-01-19 \
matpower/case6468rte/2017-01-20 \
matpower/case6468rte/2017-01-21 \
matpower/case6468rte/2017-01-22 \
matpower/case6468rte/2017-01-23 \
matpower/case6468rte/2017-01-24 \
matpower/case6468rte/2017-01-25 \
matpower/case6468rte/2017-01-26 \
matpower/case6468rte/2017-01-27 \
matpower/case6468rte/2017-01-28 \
matpower/case6468rte/2017-01-29 \
matpower/case6468rte/2017-01-30 \
matpower/case6468rte/2017-01-31 \
matpower/case6468rte/2017-02-01 \
matpower/case6468rte/2017-02-02 \
matpower/case6468rte/2017-02-03 \
matpower/case6468rte/2017-02-04 \
matpower/case6468rte/2017-02-05 \
matpower/case6468rte/2017-02-06 \
matpower/case6468rte/2017-02-07 \
matpower/case6468rte/2017-02-08 \
matpower/case6468rte/2017-02-09 \
matpower/case6468rte/2017-02-10 \
matpower/case6468rte/2017-02-11 \
matpower/case6468rte/2017-02-12 \
matpower/case6468rte/2017-02-13 \
matpower/case6468rte/2017-02-14 \
matpower/case6468rte/2017-02-15 \
matpower/case6468rte/2017-02-16 \
matpower/case6468rte/2017-02-17 \
matpower/case6468rte/2017-02-18 \
matpower/case6468rte/2017-02-19 \
matpower/case6468rte/2017-02-20 \
matpower/case6468rte/2017-02-21 \
matpower/case6468rte/2017-02-22 \
matpower/case6468rte/2017-02-23 \
matpower/case6468rte/2017-02-24 \
matpower/case6468rte/2017-02-25 \
matpower/case6468rte/2017-02-26 \
matpower/case6468rte/2017-02-27 \
matpower/case6468rte/2017-02-28 \
matpower/case6468rte/2017-03-01
INSTANCES_TEST := \
test/case14
#SAMPLES := 1 2 3
SAMPLES := 1
SOLUTIONS_MATPOWER := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_MATPOWER))))
SOLUTIONS_PGLIB := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_PGLIB))))
SOLUTIONS_INFORMS1 := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_INFORMS1))))
SOLUTIONS_INFORMS2 := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_INFORMS2))))
SOLUTIONS_INFORMS3 := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_INFORMS3))))
SOLUTIONS_TEST := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_TEST))))
.PHONY: tables save small large clean-mps matpower pglib informs1 informs2 informs3 test pglib orlib
INSTANCES_ORLIB := \
or-lib/20_0_1_w \
or-lib/20_0_5_w \
@@ -265,8 +62,13 @@ INSTANCES_TEJADA19 := \
tejada19/UC_168h_131g \
tejada19/UC_168h_199g
SOLUTIONS_ORLIB := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_ORLIB))))
SOLUTIONS_TEJADA19 := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_TEJADA19))))
SAMPLES := 1 2 3 4 5
SOLUTIONS_MATPOWER := $(foreach s,$(SAMPLES),$(addprefix results/,$(addsuffix .$(s).sol.json,$(INSTANCES_MATPOWER))))
SOLUTIONS_PGLIB := $(foreach s,$(SAMPLES),$(addprefix results/,$(addsuffix .$(s).sol.json,$(INSTANCES_PGLIB))))
SOLUTIONS_ORLIB := $(foreach s,$(SAMPLES),$(addprefix results/,$(addsuffix .$(s).sol.json,$(INSTANCES_ORLIB))))
SOLUTIONS_TEJADA19 := $(foreach s,$(SAMPLES),$(addprefix results/,$(addsuffix .$(s).sol.json,$(INSTANCES_TEJADA19))))
.PHONY: tables save small large clean-mps matpower pglib orlib
all: matpower pglib orlib tejada19
@@ -274,51 +76,27 @@ matpower: $(SOLUTIONS_MATPOWER)
pglib: $(SOLUTIONS_PGLIB)
informs1: $(SOLUTIONS_INFORMS1)
informs2: $(SOLUTIONS_INFORMS2)
informs3: $(SOLUTIONS_INFORMS3)
test: $(SOLUTIONS_TEST)
orlib: $(SOLUTIONS_ORLIB)
tejada19: $(SOLUTIONS_TEJADA19)
clean:
@rm -rf tables/benchmark* tables/compare* $(results_dir)
@rm -rf tables/benchmark* tables/compare* results
clean-mps:
@rm -fv $(results_dir)/*/*.mps.gz results/*/*/*.mps.gz
@rm -fv results/*/*.mps.gz results/*/*/*.mps.gz
clean-sol:
@rm -rf $(results_dir)/*/*.sol.* results/*/*/*.sol.*
@rm -rf results/*/*.sol.* results/*/*/*.sol.*
save:
mkdir -p "runs/$(TIMESTAMP)"
rsync -avP $(results_dir) tables "runs/$(TIMESTAMP)/"
rsync -avP results tables "runs/$(TIMESTAMP)/"
results/%.sol.json: run.jl
@echo "run $*"
@mkdir -p $(dir $(DEST)/$(results_dir)/$*)
@$(JULIA) run.jl $* default $(DEST)/$(results_dir) 2>&1 | cat > $(DEST)/$(results_dir)/$*.log
@echo "run $* [done]"
results_tight/%.sol.json: run.jl
@echo "run $*"
@mkdir -p $(dir $(DEST)/$(results_dir)/$*)
@$(JULIA) run.jl $* tight $(DEST)/$(results_dir) 2>&1 | cat > $(DEST)/$(results_dir)/$*.log
@echo "run $* [done]"
results_default/%.sol.json: run.jl
@echo "run $*"
@mkdir -p $(dir $(DEST)/$(results_dir)/$*)
@$(JULIA) run.jl $* default $(DEST)/$(results_dir) 2>&1 | cat > $(DEST)/$(results_dir)/$*.log
@echo "run $* [done]"
results_sparse/%.sol.json: run.jl
@echo "run $*"
@mkdir -p $(dir $(DEST)/$(results_dir)/$*)
@$(JULIA) run.jl $* sparse $(DEST)/$(results_dir) 2>&1 | cat > $(DEST)/$(results_dir)/$*.log
@mkdir -p $(dir results/$*)
@$(JULIA) run.jl $* 2>&1 | cat > results/$*.log
@echo "run $* [done]"
tables:

View File

@@ -1,389 +0,0 @@
# This file is machine-generated - editing it directly is not advised
[[Artifacts]]
deps = ["Pkg"]
git-tree-sha1 = "c30985d8821e0cd73870b17b0ed0ce6dc44cb744"
uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
version = "1.3.0"
[[Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[BenchmarkTools]]
deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"]
git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260"
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
version = "0.5.0"
[[BinaryProvider]]
deps = ["Libdl", "Logging", "SHA"]
git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058"
uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
version = "0.5.10"
[[Bzip2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "c3598e525718abcc440f69cc6d5f60dda0a1b61e"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.6+5"
[[CEnum]]
git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.4.1"
[[Calculus]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "f641eb0a4f00c343bbc32346e1217b86f3ce9dad"
uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
version = "0.5.1"
[[Cbc]]
deps = ["BinaryProvider", "CEnum", "Cbc_jll", "Libdl", "MathOptInterface", "SparseArrays"]
git-tree-sha1 = "929d0500c50387e7ac7ae9956ca7d7ce5312c90d"
uuid = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
version = "0.7.1"
[[Cbc_jll]]
deps = ["Cgl_jll", "Clp_jll", "CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "16b8ffa56b3ded6b201aa2f50623f260448aa205"
uuid = "38041ee0-ae04-5750-a4d2-bb4d0d83d27d"
version = "2.10.3+4"
[[Cgl_jll]]
deps = ["Clp_jll", "CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "32be20ec1e4c40e5c5d1bbf949ba9918a92a7569"
uuid = "3830e938-1dd0-5f3e-8b8e-b3ee43226782"
version = "0.60.2+5"
[[Clp_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "79263d9383ca89b35f31c33ab5b880536a8413a4"
uuid = "06985876-5285-5a41-9fcb-8948a742cc53"
version = "1.17.6+6"
[[CodecBzip2]]
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.7.2"
[[CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.0"
[[CoinUtils_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "ee1f06ab89337b7f194c29377ab174e752cdf60d"
uuid = "be027038-0da8-5614-b30d-e42594cb92df"
version = "2.11.3+3"
[[CommonSubexpressions]]
deps = ["MacroTools", "Test"]
git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7"
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
version = "0.3.0"
[[CompilerSupportLibraries_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "8e695f735fca77e9708e795eda62afdb869cbb70"
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "0.3.4+0"
[[DataStructures]]
deps = ["InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "88d48e133e6d3dd68183309877eac74393daa7eb"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.17.20"
[[Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[DiffResults]]
deps = ["StaticArrays"]
git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc"
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
version = "1.0.2"
[[DiffRules]]
deps = ["NaNMath", "Random", "SpecialFunctions"]
git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1"
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
version = "1.0.1"
[[Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[DocStringExtensions]]
deps = ["LibGit2", "Markdown", "Pkg", "Test"]
git-tree-sha1 = "50ddf44c53698f5e784bbebb3f4b21c5807401b1"
uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
version = "0.8.3"
[[Documenter]]
deps = ["Base64", "Dates", "DocStringExtensions", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"]
git-tree-sha1 = "fb1ff838470573adc15c71ba79f8d31328f035da"
uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
version = "0.25.2"
[[ForwardDiff]]
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"]
git-tree-sha1 = "1d090099fb82223abc48f7ce176d3f7696ede36d"
uuid = "f6369f11-7733-5829-9624-2563aa707210"
version = "0.10.12"
[[GLPK]]
deps = ["BinaryProvider", "CEnum", "GLPK_jll", "Libdl", "MathOptInterface"]
git-tree-sha1 = "0984f1669480cdecd465458b4abf81b238fbfe50"
uuid = "60bf3e95-4087-53dc-ae20-288a0d20c6a6"
version = "0.14.2"
[[GLPK_jll]]
deps = ["GMP_jll", "Libdl", "Pkg"]
git-tree-sha1 = "ccc855de74292e478d4278e3a6fdd8212f75e81e"
uuid = "e8aa6df9-e6ca-548a-97ff-1f85fc5b8b98"
version = "4.64.0+0"
[[GMP_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "15abc5f976569a1c9d651aff02f7222ef305eb2a"
uuid = "781609d7-10c4-51f6-84f2-b8444358ff6d"
version = "6.1.2+6"
[[GZip]]
deps = ["Libdl"]
git-tree-sha1 = "039be665faf0b8ae36e089cd694233f5dee3f7d6"
uuid = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
version = "0.5.1"
[[Gurobi]]
deps = ["CEnum", "Libdl", "MathOptInterface"]
git-tree-sha1 = "de2015da3bffcf005ef51b78163e81bfb7b2301d"
uuid = "2e9cd046-0924-5485-92f1-d5272153d98b"
version = "0.9.2"
[[HTTP]]
deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"]
git-tree-sha1 = "c7ec02c4c6a039a98a15f955462cd7aea5df4508"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "0.8.19"
[[IniFile]]
deps = ["Test"]
git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8"
uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f"
version = "0.5.0"
[[InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[JLLWrappers]]
git-tree-sha1 = "c70593677bbf2c3ccab4f7500d0f4dacfff7b75c"
uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
version = "1.1.3"
[[JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "b34d7cef7b337321e97d22242c3c2b91f476748e"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.0"
[[JSONSchema]]
deps = ["HTTP", "JSON", "ZipFile"]
git-tree-sha1 = "a9ecdbc90be216912a2e3e8a8e38dc4c93f0d065"
uuid = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
version = "0.3.2"
[[JuMP]]
deps = ["Calculus", "DataStructures", "ForwardDiff", "LinearAlgebra", "MathOptInterface", "MutableArithmetics", "NaNMath", "Random", "SparseArrays", "Statistics"]
git-tree-sha1 = "cbab42e2e912109d27046aa88f02a283a9abac7c"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "0.21.3"
[[LibGit2]]
deps = ["Printf"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[LinearAlgebra]]
deps = ["Libdl"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "6a8a2a625ab0dea913aba95c11370589e0239ff0"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.6"
[[Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[MathOptInterface]]
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "JSON", "JSONSchema", "LinearAlgebra", "MutableArithmetics", "OrderedCollections", "SparseArrays", "Test", "Unicode"]
git-tree-sha1 = "5a1d631e0a9087d425e024d66b9c71e92e78fda8"
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
version = "0.9.17"
[[MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"]
git-tree-sha1 = "1c38e51c3d08ef2278062ebceade0e46cefc96fe"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.0.3"
[[MbedTLS_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "c0b1286883cac4e2b617539de41111e0776d02e8"
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.16.8+0"
[[Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "6cf09794783b9de2e662c4e8b60d743021e338d0"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "0.2.10"
[[NaNMath]]
git-tree-sha1 = "c84c576296d0e2fbb3fc134d3e09086b3ea617cd"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "0.3.4"
[[OpenBLAS32_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "19c33675cdeb572c1b17f96c492459d4f4958036"
uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2"
version = "0.3.10+0"
[[OpenSpecFun_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "9db77584158d0ab52307f8c04f8e7c08ca76b5b3"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.3+4"
[[OrderedCollections]]
git-tree-sha1 = "16c08bf5dba06609fe45e30860092d6fa41fde7b"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.3.1"
[[Osi_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "bd436a97280df40938e66ae8d18e57aceb072856"
uuid = "7da25872-d9ce-5375-a4d3-7a845f58efdd"
version = "0.108.5+3"
[[PackageCompiler]]
deps = ["Libdl", "Pkg", "UUIDs"]
git-tree-sha1 = "3eee77c94646163f15bd8626acf494360897f890"
uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
version = "1.2.3"
[[Parsers]]
deps = ["Dates"]
git-tree-sha1 = "6fa4202675c05ba0f8268a6ddf07606350eda3ce"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "1.0.11"
[[Pkg]]
deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
[[Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[Random]]
deps = ["Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[Requires]]
deps = ["UUIDs"]
git-tree-sha1 = "28faf1c963ca1dc3ec87f166d92982e3c4a1f66d"
uuid = "ae029012-a4dd-5104-9daa-d747884805df"
version = "1.1.0"
[[SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
[[Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[SparseArrays]]
deps = ["LinearAlgebra", "Random"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[SpecialFunctions]]
deps = ["OpenSpecFun_jll"]
git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "0.10.3"
[[StaticArrays]]
deps = ["LinearAlgebra", "Random", "Statistics"]
git-tree-sha1 = "016d1e1a00fabc556473b07161da3d39726ded35"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "0.12.4"
[[Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
[[Test]]
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[TimerOutputs]]
deps = ["Printf"]
git-tree-sha1 = "f458ca23ff80e46a630922c555d838303e4b9603"
uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
version = "0.5.6"
[[TranscodingStreams]]
deps = ["Random", "Test"]
git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.9.5"
[[UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[UnitCommitment]]
deps = ["Cbc", "DataStructures", "Documenter", "GLPK", "GZip", "Gurobi", "JSON", "JuMP", "LinearAlgebra", "Logging", "MathOptInterface", "OrderedCollections", "PackageCompiler", "Printf", "Requires", "SparseArrays", "Test", "TimerOutputs"]
path = ".."
uuid = "64606440-39ea-11e9-0f29-3303a1d3d877"
version = "2.1.0"
[[ZipFile]]
deps = ["Libdl", "Printf", "Zlib_jll"]
git-tree-sha1 = "c3a5637e27e914a7a445b8d0ad063d701931e9f7"
uuid = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
version = "0.9.3"
[[Zlib_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "320228915c8debb12cb434c59057290f0834dbf6"
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.11+18"

View File

@@ -10,48 +10,16 @@ using Logging
using Printf
using LinearAlgebra
UnitCommitment._setup_logger()
function main()
NUM_THREADS = 4
basename, suffix = split(ARGS[1], ".")
solution_filename = "results/$basename.$suffix.sol.json"
model_filename = "results/$basename.$suffix.mps.gz"
time_limit = 60 * 20
BLAS.set_num_threads(NUM_THREADS)
if length(ARGS) >= 2
mode = string("_", ARGS[2])
else
mode = "_default"
end
if length(ARGS) >= 3 && !isempty(strip(ARGS[3]))
results_dir = ARGS[3]
else
results_dir = string("./","results$mode")
end
# Validate mode and set formulation
if mode == "_default"
formulation = UnitCommitment.DefaultFormulation
elseif mode == "_tight"
formulation = UnitCommitment.TightFormulation
elseif mode == "_sparse"
formulation = UnitCommitment.SparseDefaultFormulation
else
error("Unknown formulation requested: ", ARGS[2])
end
# Filename is instance_name.sample_number.sol.gz
# Parse out the instance + sample parts to create output files
basename, suffix = split(ARGS[1], ".") # will not work if suffix part is not present
model_filename_stub = string(results_dir,"/$basename.$suffix")
solution_filename = string("$model_filename_stub.sol.json")
# Choose logging options
logname, logfile = nothing, nothing
#logname = string("$model_filename_stub.out")
if isa(logname, String) && !isempty(logname)
logfile = open(logname, "w")
global_logger(TimeLogger(initial_time = time(), file = logfile))
else
global_logger(TimeLogger(initial_time = time()))
end
BLAS.set_num_threads(4)
total_time = @elapsed begin
@info "Reading: $basename"
@@ -61,44 +29,38 @@ function main()
@info @sprintf("Read problem in %.2f seconds", time_read)
time_model = @elapsed begin
optimizer=optimizer_with_attributes(Gurobi.Optimizer,
"Threads" => NUM_THREADS,
"Seed" => rand(1:1000))
model = build_model(instance=instance, optimizer=optimizer, formulation=formulation)
end
model = build_model(
instance = instance,
optimizer = optimizer_with_attributes(
Gurobi.Optimizer,
"Threads" => 4,
"Seed" => rand(1:1000),
),
variable_names = true,
)
end
@info "Setting names..."
UnitCommitment.set_variable_names!(model)
model_filename = string(model_filename_stub,".init",".mps.gz")
@info string("Exporting initial model without transmission constraints to ", model_filename)
JuMP.write_to_file(model.mip, model_filename)
total_time += @elapsed begin
@info "Optimizing..."
BLAS.set_num_threads(1)
UnitCommitment.optimize!(model, time_limit=time_limit, gap_limit=1e-3)
UnitCommitment.optimize!(
model,
time_limit = time_limit,
gap_limit = 1e-3,
)
end
@info @sprintf("Total time was %.2f seconds", total_time)
@info "Writing: $solution_filename"
solution = UnitCommitment.get_solution(model)
solution = UnitCommitment.solution(model)
open(solution_filename, "w") do file
JSON.print(file, solution, 2)
return JSON.print(file, solution, 2)
end
@info "Verifying solution..."
UnitCommitment.validate(instance, solution)
model_filename = string(model_filename_stub,".final",".mps.gz")
@info string("Exporting final model to ", model_filename)
JuMP.write_to_file(model.mip, model_filename)
if !isnothing(logfile)
close(logfile)
@info "Exporting model..."
return JuMP.write_to_file(model, model_filename)
end
end # main
main()

View File

@@ -13,15 +13,21 @@ import sys
b1 = pd.read_csv(sys.argv[1], index_col=0)
b2 = pd.read_csv(sys.argv[2], index_col=0)
c1 = b1.groupby(["Group", "Instance", "Sample"])[["Optimization time (s)", "Primal bound"]].mean()
c2 = b2.groupby(["Group", "Instance", "Sample"])[["Optimization time (s)", "Primal bound"]].mean()
c1 = b1.groupby(["Group", "Instance", "Sample"])[
["Optimization time (s)", "Primal bound"]
].mean()
c2 = b2.groupby(["Group", "Instance", "Sample"])[
["Optimization time (s)", "Primal bound"]
].mean()
c1.columns = ["A Time (s)", "A Value"]
c2.columns = ["B Time (s)", "B Value"]
merged = pd.concat([c1, c2], axis=1)
merged["Speedup"] = merged["A Time (s)"] / merged["B Time (s)"]
merged["Time diff (s)"] = merged["B Time (s)"] - merged["A Time (s)"]
merged["Value diff (%)"] = np.round((merged["B Value"] - merged["A Value"]) / merged["A Value"] * 100.0, 5)
merged["Value diff (%)"] = np.round(
(merged["B Value"] - merged["A Value"]) / merged["A Value"] * 100.0, 5
)
merged.loc[merged.loc[:, "B Time (s)"] <= 0, "Speedup"] = float("nan")
merged.loc[merged.loc[:, "B Time (s)"] <= 0, "Time diff (s)"] = float("nan")
# merged = merged[(merged["A Time (s)"] >= easy_cutoff) | (merged["B Time (s)"] >= easy_cutoff)]
@@ -32,17 +38,19 @@ merged["Name"] = merged["Group"] + "/" + merged["Instance"]
k = len(merged.groupby("Name"))
plt.figure(figsize=(12, 0.50 * k))
plt.rcParams['xtick.bottom'] = plt.rcParams['xtick.labelbottom'] = True
plt.rcParams['xtick.top'] = plt.rcParams['xtick.labeltop'] = True
plt.rcParams["xtick.bottom"] = plt.rcParams["xtick.labelbottom"] = True
plt.rcParams["xtick.top"] = plt.rcParams["xtick.labeltop"] = True
sns.set_style("whitegrid")
sns.set_palette("Set1")
sns.barplot(data=merged,
sns.barplot(
data=merged,
x="Speedup",
y="Name",
color="tab:red",
capsize=0.15,
errcolor="k",
errwidth=1.25)
errwidth=1.25,
)
plt.axvline(1.0, linestyle="--", color="k")
plt.tight_layout()
@@ -50,7 +58,10 @@ print("Writing tables/compare.png")
plt.savefig("tables/compare.png", dpi=150)
print("Writing tables/compare.csv")
merged.loc[:, ["Group",
merged.loc[
:,
[
"Group",
"Instance",
"Sample",
"A Time (s)",
@@ -60,5 +71,5 @@ merged.loc[:, ["Group",
"A Value",
"B Value",
"Value diff (%)",
]
],
].to_csv("tables/compare.csv", index_label="Index")

View File

@@ -9,8 +9,8 @@ from tabulate import tabulate
def process_all_log_files():
pathlist = list(Path(".").glob('results/*/*/*.log'))
pathlist += list(Path(".").glob('results/*/*.log'))
pathlist = list(Path(".").glob("results/*/*/*.log"))
pathlist += list(Path(".").glob("results/*/*.log"))
rows = []
for path in pathlist:
if ".ipy" in str(path):
@@ -52,37 +52,55 @@ def process(filename):
with open(filename) as file:
for line in file.readlines():
m = re.search(r"Explored ([0-9.e+]*) nodes \(([0-9.e+]*) simplex iterations\) in ([0-9.e+]*) seconds", line)
m = re.search(
r"Explored ([0-9.e+]*) nodes \(([0-9.e+]*) simplex iterations\) in ([0-9.e+]*) seconds",
line,
)
if m is not None:
nodes += int(m.group(1))
simplex_iterations += int(m.group(2))
optimize_time += float(m.group(3))
m = re.search(r"Best objective ([0-9.e+]*), best bound ([0-9.e+]*), gap ([0-9.e+]*)\%", line)
m = re.search(
r"Best objective ([0-9.e+]*), best bound ([0-9.e+]*), gap ([0-9.e+]*)\%",
line,
)
if m is not None:
primal_bound = float(m.group(1))
dual_bound = float(m.group(2))
gap = round(float(m.group(3)), 3)
m = re.search(r"Root relaxation: objective ([0-9.e+]*), ([0-9.e+]*) iterations, ([0-9.e+]*) seconds", line)
m = re.search(
r"Root relaxation: objective ([0-9.e+]*), ([0-9.e+]*) iterations, ([0-9.e+]*) seconds",
line,
)
if m is not None:
root_obj = float(m.group(1))
root_iterations += int(m.group(2))
root_time += float(m.group(3))
m = re.search(r"Presolved: ([0-9.e+]*) rows, ([0-9.e+]*) columns, ([0-9.e+]*) nonzeros", line)
m = re.search(
r"Presolved: ([0-9.e+]*) rows, ([0-9.e+]*) columns, ([0-9.e+]*) nonzeros",
line,
)
if m is not None:
n_rows_presolved = int(m.group(1))
n_cols_presolved = int(m.group(2))
n_nz_presolved = int(m.group(3))
m = re.search(r"Optimize a model with ([0-9.e+]*) rows, ([0-9.e+]*) columns and ([0-9.e+]*) nonzeros", line)
m = re.search(
r"Optimize a model with ([0-9.e+]*) rows, ([0-9.e+]*) columns and ([0-9.e+]*) nonzeros",
line,
)
if m is not None:
n_rows_orig = int(m.group(1))
n_cols_orig = int(m.group(2))
n_nz_orig = int(m.group(3))
m = re.search(r"Variable types: ([0-9.e+]*) continuous, ([0-9.e+]*) integer \(([0-9.e+]*) binary\)", line)
m = re.search(
r"Variable types: ([0-9.e+]*) continuous, ([0-9.e+]*) integer \(([0-9.e+]*) binary\)",
line,
)
if m is not None:
n_cont_vars_presolved = int(m.group(1))
n_bin_vars_presolved = int(m.group(3))
@@ -103,7 +121,10 @@ def process(filename):
if m is not None:
total_time = float(m.group(1))
m = re.search(r"User-callback calls ([0-9.e+]*), time in user-callback ([0-9.e+]*) sec", line)
m = re.search(
r"User-callback calls ([0-9.e+]*), time in user-callback ([0-9.e+]*) sec",
line,
)
if m is not None:
cb_calls = int(m.group(1))
cb_time = float(m.group(2))
@@ -150,6 +171,7 @@ def process(filename):
"Transmission screening calls": transmission_calls,
}
def generate_chart():
import pandas as pd
import matplotlib.pyplot as plt
@@ -159,7 +181,9 @@ def generate_chart():
files = ["tables/benchmark.csv"]
for f in files:
table = pd.read_csv(f, index_col=0)
table.loc[:, "Instance"] = table.loc[:,"Group"] + "/" + table.loc[:,"Instance"]
table.loc[:, "Instance"] = (
table.loc[:, "Group"] + "/" + table.loc[:, "Instance"]
)
table.loc[:, "Filename"] = f
tables += [table]
benchmark = pd.concat(tables, sort=True)
@@ -168,16 +192,18 @@ def generate_chart():
plt.figure(figsize=(12, 0.50 * k))
sns.set_style("whitegrid")
sns.set_palette("Set1")
sns.barplot(y="Instance",
sns.barplot(
y="Instance",
x="Total time (s)",
color="tab:red",
capsize=0.15,
errcolor="k",
errwidth=1.25,
data=benchmark);
data=benchmark,
)
plt.tight_layout()
print("Writing tables/benchmark.png")
plt.savefig("tables/benchmark.png", dpi=150);
plt.savefig("tables/benchmark.png", dpi=150)
if __name__ == "__main__":

14
docs/Makefile Normal file
View File

@@ -0,0 +1,14 @@
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View File

Before

Width:  |  Height:  |  Size: 35 KiB

After

Width:  |  Height:  |  Size: 35 KiB

49
docs/_static/custom.css vendored Normal file
View File

@@ -0,0 +1,49 @@
h1.site-logo {
font-size: 30px !important;
}
h1.site-logo small {
font-size: 20px !important;
}
h1.site-logo {
font-size: 30px !important;
}
h1.site-logo small {
font-size: 20px !important;
}
tbody, thead, pre {
border: 1px solid rgba(0, 0, 0, 0.25);
}
table td, th {
padding: 8px;
}
table p {
margin-bottom: 0;
}
table td code {
white-space: nowrap;
}
table tr,
table th {
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
}
table tr:last-child {
border-bottom: 0;
}
pre {
box-shadow: inherit !important;
background-color: #fff;
}
.text-align\:center {
text-align: center;
}

16
docs/conf.py Normal file
View File

@@ -0,0 +1,16 @@
project = "UnitCommitment.jl"
copyright = "2020-2021, UChicago Argonne, LLC"
author = ""
release = "0.2"
extensions = ["myst_parser"]
templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
html_theme = "sphinx_book_theme"
html_static_path = ["_static"]
html_css_files = ["custom.css"]
html_theme_options = {
"repository_url": "https://github.com/ANL-CEEESA/UnitCommitment.jl/",
"use_repository_button": True,
"extra_navbar": "",
}
html_title = f"UnitCommitment.jl<br/><small>{release}</small>"

View File

@@ -1,7 +1,18 @@
```{sectnum}
---
start: 2
depth: 2
suffix: .
---
```
Data Format
===========
## 1. Input Data Format
Input Data Format
-----------------
Instances are specified by JSON files containing the following main sections:
@@ -15,27 +26,28 @@ Instances are specified by JSON files containing the following main sections:
Each section is described in detail below. For a complete example, see [case14](https://github.com/ANL-CEEESA/UnitCommitment.jl/tree/dev/instances/matpower/case14).
### 1.1 Parameters
### Parameters
This section describes system-wide parameters, such as power balance penalties, and optimization parameters, such as the length of the planning horizon.
This section describes system-wide parameters, such as power balance penalties, optimization parameters, such as the length of the planning horizon and the time.
| Key | Description | Default | Time series?
| :----------------------------- | :------------------------------------------------ | :------: | :------------:
| `Time (h)` | Length of the planning horizon (in hours) | Required | N
| `Power balance penalty ($/MW)` | Penalty for system-wide shortage or surplus in production (in $/MW). This is charged per time period. For example, if there is a shortage of 1 MW for three time periods, three times this amount will be charged. | `1000.0` | Y
| `Time horizon (h)` | Length of the planning horizon (in hours). | Required | N
| `Time step (min)` | Length of each time step (in minutes). Must be a divisor of 60 (e.g. 60, 30, 20, 15, etc). | `60` | N
| `Power balance penalty ($/MW)` | Penalty for system-wide shortage or surplus in production (in $/MW). This is charged per time step. For example, if there is a shortage of 1 MW for three time steps, three times this amount will be charged. | `1000.0` | Y
#### Example
```json
{
"Parameters": {
"Time (h)": 4,
"Time horizon (h)": 4,
"Power balance penalty ($/MW)": 1000.0
}
}
```
### 1.2 Buses
### Buses
This section describes the characteristics of each bus in the system.
@@ -64,40 +76,40 @@ This section describes the characteristics of each bus in the system.
```
### 1.3 Generators
### Generators
This section describes all generators in the system, including thermal units, renewable units and virtual units.
| Key | Description | Default | Time series?
| :------------------------ | :------------------------------------------------| ------- | :-----------:
| `Bus` | Identifier of the bus where this generator is located (string) | Required | N
| `Bus` | Identifier of the bus where this generator is located (string). | Required | N
| `Production cost curve (MW)` and `Production cost curve ($)` | Parameters describing the piecewise-linear production costs. See below for more details. | Required | Y
| `Startup costs ($)` and `Startup delays (h)` | Parameters describing how much it costs to start the generator after it has been shut down for a certain amount of time. If `Startup costs ($)` and `Startup delays (h)` are set to `[300.0, 400.0]` and `[1, 4]`, for example, and the generator is shut down at time `t`, then it costs 300 to start up the generator at times `t+1`, `t+2` or `t+3`, and 400 to start the generator at time `t+4` or any time after that. The number of startup cost points is unlimited, and may be different for each generator. Startup delays must be strictly increasing. | `[0.0]` and `[1]` | N
| `Minimum uptime (h)` | Minimum amount of time the generator must stay operational after starting up (in hours). For example, if the generator starts up at time 1 and `Minimum uptime (h)` is set to 4, then the generator can only shut down at time 5. | `1` | N
| `Minimum downtime (h)` | Minimum amount of time the generator must stay offline after shutting down (in hours). For example, if the generator shuts down at time 1 and `Minimum downtime (h)` is set to 4, then the generator can only start producing power again at time 5. | `1` | N
| `Ramp up limit (MW)` | Maximum increase in production from one time period to the next (in MW). For example, if the generator is producing 100 MW at time 1 and if this parameter is set to 40 MW, then the generator will produce at most 140 MW at time 2. | `+inf` | N
| `Ramp down limit (MW)` | Maximum decrease in production from one time period to the next (in MW). For example, if the generator is producing 100 MW at time 1 and this parameter is set to 40 MW, then the generator will produce at least 60 MW at time 2. | `+inf` | N
| `Startup limit (MW)` | Maximum amount of power a generator can produce immediately after starting up (in MW). | `+inf` | N
| `Shutdown limit (MW)` | Maximum amount of power a generator can produce immediately before shutting down (in MW). Specifically, the generator can only shut down at time `t+1` if its production at time `t` is below this limit. | `+inf` | N
| `Initial status (h)` | If set to a positive number, indicates the amount of time the generator has been on at the beginning of the simulation, and if set to a negative number, the amount of time the generator has been off. For example, if `Initial status (h)` is `-2`, this means that the generator was off at simulation time `-2` and `-1`. The simulation starts at time `0`. | Required | N
| `Initial power (MW)` | Amount of power the generator at time period `-1`, immediately before the planning horizon starts. | Required | N
| `Must run?` | If `true`, the generator should be committed, even that is not economical (Boolean). | `false` | Y
| `Startup costs ($)` and `Startup delays (h)` | Parameters describing how much it costs to start the generator after it has been shut down for a certain amount of time. If `Startup costs ($)` and `Startup delays (h)` are set to `[300.0, 400.0]` and `[1, 4]`, for example, and the generator is shut down at time `00:00` (h:min), then it costs \$300 to start up the generator at any time between `01:00` and `03:59`, and \$400 to start the generator at time `04:00` or any time after that. The number of startup cost points is unlimited, and may be different for each generator. Startup delays must be strictly increasing and the first entry must equal `Minimum downtime (h)`. | `[0.0]` and `[1]` | N
| `Minimum uptime (h)` | Minimum amount of time the generator must stay operational after starting up (in hours). For example, if the generator starts up at time `00:00` (h:min) and `Minimum uptime (h)` is set to 4, then the generator can only shut down at time `04:00`. | `1` | N
| `Minimum downtime (h)` | Minimum amount of time the generator must stay offline after shutting down (in hours). For example, if the generator shuts down at time `00:00` (h:min) and `Minimum downtime (h)` is set to 4, then the generator can only start producing power again at time `04:00`. | `1` | N
| `Ramp up limit (MW)` | Maximum increase in production from one time step to the next (in MW). For example, if the generator is producing 100 MW at time step 1 and if this parameter is set to 40 MW, then the generator will produce at most 140 MW at time step 2. | `+inf` | N
| `Ramp down limit (MW)` | Maximum decrease in production from one time step to the next (in MW). For example, if the generator is producing 100 MW at time step 1 and this parameter is set to 40 MW, then the generator will produce at least 60 MW at time step 2. | `+inf` | N
| `Startup limit (MW)` | Maximum amount of power a generator can produce immediately after starting up (in MW). For example, if `Startup limit (MW)` is set to 100 MW and the unit is off at time step 1, then it may produce at most 100 MW at time step 2.| `+inf` | N
| `Shutdown limit (MW)` | Maximum amount of power a generator can produce immediately before shutting down (in MW). Specifically, the generator can only shut down at time step `t+1` if its production at time step `t` is below this limit. | `+inf` | N
| `Initial status (h)` | If set to a positive number, indicates the amount of time (in hours) the generator has been on at the beginning of the simulation, and if set to a negative number, the amount of time the generator has been off. For example, if `Initial status (h)` is `-2`, this means that the generator was off since `-02:00` (h:min). The simulation starts at time `00:00`. If `Initial status (h)` is `3`, this means that the generator was on since `-03:00`. A value of zero is not acceptable. | Required | N
| `Initial power (MW)` | Amount of power the generator at time step `-1`, immediately before the planning horizon starts. | Required | N
| `Must run?` | If `true`, the generator should be committed, even if that is not economical (Boolean). | `false` | Y
| `Provides spinning reserves?` | If `true`, this generator may provide spinning reserves (Boolean). | `true` | Y
#### Production costs and limits
Production costs are represented as piecewise-linear curves. Figure 1 shows an example cost curve with three segments, where it costs 1400, 1600, 2200 and 2400 dollars to generate, respectively, 100, 110, 130 and 135 MW of power. To model this generator, `Production cost curve (MW)` should be set to `[100, 110, 130, 135]`, and `Production cost curve ($)` should be set to `[1400, 1600, 2200, 2400]`.
Production costs are represented as piecewise-linear curves. Figure 1 shows an example cost curve with three segments, where it costs \$1400, \$1600, \$2200 and \$2400 to generate, respectively, 100, 110, 130 and 135 MW of power. To model this generator, `Production cost curve (MW)` should be set to `[100, 110, 130, 135]`, and `Production cost curve ($)` should be set to `[1400, 1600, 2200, 2400]`.
Note that this curve also specifies the production limits. Specifically, the first point identifies the minimum power output when the unit is operational, while the last point identifies the maximum power output.
<center>
<img src="../images/cost_curve.png" style="max-width: 500px"/>
<img src="../_static/cost_curve.png" style="max-width: 500px"/>
<div><b>Figure 1.</b> Piecewise-linear production cost curve.</div>
<br/>
</center>
#### Additional remarks:
* For time-dependent production limits or time-dependent production costs, the usage of nested arrays is allowed. For example, if `Production cost curve (MW)` is set to `[5.0, [10.0, 12.0, 15.0, 20.0]]`, then the unit may generate at most 10, 12, 15 and 20 MW of power during time periods 1, 2, 3 and 4, respectively. The minimum output for all time periods is fixed to at 5 MW.
* For time-dependent production limits or time-dependent production costs, the usage of nested arrays is allowed. For example, if `Production cost curve (MW)` is set to `[5.0, [10.0, 12.0, 15.0, 20.0]]`, then the unit may generate at most 10, 12, 15 and 20 MW of power during time steps 1, 2, 3 and 4, respectively. The minimum output for all time periods is fixed to at 5 MW.
* There is no limit to the number of piecewise-linear segments, and different generators may have a different number of segments.
* If `Production cost curve (MW)` and `Production cost curve ($)` both contain a single element, then the generator must produce exactly that amount of power when operational. To specify that the generator may produce any amount of power up to a certain limit `P`, the parameter `Production cost curve (MW)` should be set to `[0, P]`.
* Production cost curves must be convex.
@@ -133,7 +145,7 @@ Note that this curve also specifies the production limits. Specifically, the fir
}
```
### 1.4 Price-sensitive loads
### Price-sensitive loads
This section describes components in the system which may increase or reduce their energy consumption according to the energy prices. Fixed loads (as described in the `buses` section) are always served, regardless of the price, unless there is significant congestion in the system or insufficient production capacity. Price-sensitive loads, on the other hand, are only served if it is economical to do so.
@@ -157,7 +169,7 @@ This section describes components in the system which may increase or reduce the
}
```
### 1.5 Transmission Lines
### Transmission Lines
This section describes the characteristics of transmission system, such as its topology and the susceptance of each transmission line.
@@ -167,9 +179,9 @@ This section describes the characteristics of transmission system, such as its t
| `Target bus` | Identifier of the bus where the transmission line reaches. | Required | N
| `Reactance (ohms)` | Reactance of the transmission line (in ohms). | Required | N
| `Susceptance (S)` | Susceptance of the transmission line (in siemens). | Required | N
| `Normal flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in its regular, fully-operational state. May be `null` is there is no limit. | `+inf` | Y
| `Normal flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in its regular, fully-operational state. | `+inf` | Y
| `Emergency flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in degraded state (for example, after the failure of another transmission line). | `+inf` | Y
| `Flow limit penalty ($/MW)` | Penalty for violating the flow limits of the transmission line (in $/MW). This is charged per time period. For example, if there is a thermal violation of 1 MW for three time periods, three times this amount will be charged. | `5000.0` | Y
| `Flow limit penalty ($/MW)` | Penalty for violating the flow limits of the transmission line (in $/MW). This is charged per time step. For example, if there is a thermal violation of 1 MW for three time steps, then three times this amount will be charged. | `5000.0` | Y
#### Example
@@ -190,7 +202,7 @@ This section describes the characteristics of transmission system, such as its t
```
### 1.6 Reserves
### Reserves
This section describes the hourly amount of operating reserves required.
@@ -214,7 +226,7 @@ This section describes the hourly amount of operating reserves required.
}
```
### 1.7 Contingencies
### Contingencies
This section describes credible contingency scenarios in the optimization, such as the loss of a transmission line or generator.
@@ -239,11 +251,11 @@ This section describes credible contingency scenarios in the optimization, such
}
```
### 1.8 Additional remarks
### Additional remarks
#### Time series parameters
Many numerical properties in the JSON file can be specified either as a single floating point number if they are time-independent, or as an array containing exactly `T` elements, where `T` is the length of the planning horizon, if they are time-dependent. For example, both formats below are valid when `T=3`:
Many numerical properties in the JSON file can be specified either as a single floating point number if they are time-independent, or as an array containing exactly `T` elements, if they are time-dependent, where `T` is the number of time steps in the planning horizon. For example, both formats below are valid when `T=3`:
```json
{
@@ -252,13 +264,29 @@ Many numerical properties in the JSON file can be specified either as a single f
}
```
#### Current limitations
The value `T` depends on both `Time horizon (h)` and `Time step (min)`, as the table below illustrates.
* All reserves are system-wide (no zonal reserves)
* Network topology remains the same for all time periods
* Only N-1 transmission contingencies are supported. Generator contingencies are not supported.
* Time-varying minimum production amounts are not currently compatible with ramp/startup/shutdown limits.
Time horizon (h) | Time step (min) | T
:---------------:|:---------------:|:----:
24 | 60 | 24
24 | 15 | 96
24 | 5 | 288
36 | 60 | 36
36 | 15 | 144
36 | 5 | 432
## 2. Output Data Format
Output Data Format
------------------
The output data format is also JSON-based, but it is not currently documented since we expect it to change significantly in a future version of the package.
Current limitations
-------------------
* All reserves are system-wide. Zonal reserves are not currently supported.
* Network topology remains the same for all time periods
* Only N-1 transmission contingencies are supported. Generator contingencies are not currently supported.
* Time-varying minimum production amounts are not currently compatible with ramp/startup/shutdown limits.

72
docs/index.md Normal file
View File

@@ -0,0 +1,72 @@
# UnitCommitment.jl
**UnitCommitment.jl** (UC.jl) is a Julia/JuMP optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and Julia/JuMP implementations of state-of-the-art mixed-integer programming formulations.
### Package Components
* **Data Format:** The package proposes an extensible and fully-documented JSON-based data specification format for SCUC, developed in collaboration with Independent System Operators (ISOs), which describes the most important aspects of the problem. The format supports all the most common generator characteristics (including ramping, piecewise-linear production cost curves and time-dependent startup costs), as well as operating reserves, price-sensitive loads, transmission networks and contingencies.
* **Benchmark Instances:** The package provides a diverse collection of large-scale benchmark instances collected from the literature and extended to make them more challenging and realistic.
* **Model Implementation**: The package provides a Julia/JuMP implementation of state-of-the-art formulations and solution methods for SCUC. Our goal is to keep this implementation up-to-date, as new methods are proposed in the literature.
* **Benchmark Tools:** The package provides automated benchmark scripts to accurately evaluate the performance impact of proposed code changes.
### Authors
* **Alinson Santos Xavier** (Argonne National Laboratory)
* **Feng Qiu** (Argonne National Laboratory)
### Acknowledgments
* We would like to thank **Aleksandr M. Kazachkov** (University of Florida), **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package.
* Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357
* Based upon work supported by the **U.S. Department of Energy Advanced Grid Modeling Program** under Grant DE-OE0000875.
### Citing
If you use UnitCommitment.jl in your research (instances, models or algorithms), we kindly request that you cite the package as follows:
* **Alinson S. Xavier, Feng Qiu**, "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment". Zenodo (2020). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874).
If you use the instances, we additionally request that you cite the original sources, as described in the [instances page](instances.md).
### License
```text
UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment
Copyright © 2020, UChicago Argonne, LLC. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of
conditions and the following disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
```
## Site contents
```{toctree}
---
maxdepth: 2
---
usage.md
format.md
instances.md
model.md
```

View File

@@ -1,4 +1,13 @@
# Instances
```{sectnum}
---
start: 3
depth: 2
suffix: .
---
```
Instances
=========
UnitCommitment.jl provides a large collection of benchmark instances collected
from the literature and converted to a [common data format](format.md). In some cases, as indicated below, the original instances have been extended, with realistic parameters, using data-driven methods.
@@ -7,7 +16,9 @@ If you use these instances in your research, we request that you cite UnitCommit
Raw instances files are [available at our GitHub repository](https://github.com/ANL-CEEESA/UnitCommitment.jl/tree/dev/instances). Benchmark instances can also be loaded with
`UnitCommitment.read_benchmark(name)`, as explained in the [usage section](usage.md).
## 1. MATPOWER
MATPOWER
--------
[MATPOWER](https://github.com/MATPOWER/matpower) is an open-source package for solving power flow problems in MATLAB and Octave. It contains a number of power flow test cases, which have been widely used in the power systems literature.
@@ -25,7 +36,7 @@ Because most MATPOWER test cases were originally designed for power flow studies
For each MATPOWER test case, UC.jl provides two variations (`2017-02-01` and `2017-08-01`) corresponding respectively to a winter and to a summer test case.
### 1.1 MATPOWER/UW-PSTCA
### MATPOWER/UW-PSTCA
A variety of smaller IEEE test cases, [compiled by University of Washington](http://labs.ece.uw.edu/pstca/), corresponding mostly to small portions of the American Electric Power System in the 1960s.
@@ -43,7 +54,7 @@ A variety of smaller IEEE test cases, [compiled by University of Washington](htt
| `matpower/case300/2017-08-01` | 300 | 69 | 411 | 320 | [MTPWR, PSTCA]
### 1.2 MATPOWER/Polish
### MATPOWER/Polish
Test cases based on the Polish 400, 220 and 110 kV networks, originally provided by **Roman Korab** (Politechnika Śląska) and corrected by the MATPOWER team.
@@ -66,7 +77,7 @@ Test cases based on the Polish 400, 220 and 110 kV networks, originally provided
| `matpower/case3375wp/2017-02-01` | 3374 | 590 | 4161 | 3245 | [MTPWR]
| `matpower/case3375wp/2017-08-01` | 3374 | 590 | 4161 | 3245 | [MTPWR]
### 1.3 MATPOWER/PEGASE
### MATPOWER/PEGASE
Test cases from the [Pan European Grid Advanced Simulation and State Estimation (PEGASE) project](https://cordis.europa.eu/project/id/211407), describing part of the European high voltage transmission network.
@@ -83,7 +94,7 @@ Test cases from the [Pan European Grid Advanced Simulation and State Estimation
| `matpower/case13659pegase/2017-02-01` | 13659 | 4092 | 20467 | 13932 | [JoFlMa16, FlPaCa13, MTPWR]
| `matpower/case13659pegase/2017-08-01` | 13659 | 4092 | 20467 | 13932 | [JoFlMa16, FlPaCa13, MTPWR]
### 1.4 MATPOWER/RTE
### MATPOWER/RTE
Test cases from the R&D Division at [Reseau de Transport d'Electricite](https://www.rte-france.com) representing the size and complexity of the French very high voltage transmission network.
@@ -107,11 +118,12 @@ Test cases from the R&D Division at [Reseau de Transport d'Electricite](https://
| `matpower/case6515rte/2017-08-01` | 6515 | 1368 | 9037 | 6063 | [MTPWR, JoFlMa16]
## 2. PGLIB-UC Instances
PGLIB-UC Instances
------------------
[PGLIB-UC](https://github.com/power-grid-lib/pglib-uc) is a benchmark library curated and maintained by the [IEEE PES Task Force on Benchmarks for Validation of Emerging Power System Algorithms](https://power-grid-lib.github.io/). These test cases have been used in [KnOsWa20].
### 2.1 PGLIB-UC/California
### PGLIB-UC/California
Test cases based on publicly available data from the California ISO. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
@@ -139,7 +151,7 @@ Test cases based on publicly available data from the California ISO. For more de
| `pglib-uc/ca/Scenario400_reserves_5` | 1 | 611 | 0 | 0 | [KnOsWa20]
### 2.2 PGLIB-UC/FERC
### PGLIB-UC/FERC
Test cases based on a publicly available [unit commitment test case produced by the Federal Energy Regulatory Commission](https://www.ferc.gov/industries-data/electric/power-sales-and-markets/increasing-efficiency-through-improved-software-1). For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
@@ -171,7 +183,7 @@ Test cases based on a publicly available [unit commitment test case produced by
| `pglib-uc/ferc/2015-12-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
### 2.3 PGLIB-UC/RTS-GMLC
### PGLIB-UC/RTS-GMLC
[RTS-GMLC](https://github.com/GridMod/RTS-GMLC) is an updated version of the RTS-96 test system produced by the United States Department of Energy's [Grid Modernization Laboratory Consortium](https://gmlc.doe.gov/). The PGLIB-UC/RTS-GMLC instances are modified versions of the original RTS-GMLC instances, with modified ramp-rates and without a transmission network. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
@@ -190,7 +202,9 @@ Test cases based on a publicly available [unit commitment test case produced by
| `pglib-uc/rts_gmlc/2020-11-25` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-12-23` | 1 | 154 | 0 | 0 | [BaBlEh19]
## 3. OR-LIB/UC
OR-LIB/UC
---------
[OR-LIB](http://people.brunel.ac.uk/~mastjjb/jeb/info.html) is a collection of test data sets for a variety of operations research problems, including unit commitment. The UC instances in OR-LIB are synthetic instances generated by a [random problem generator](http://groups.di.unipi.it/optimize/Data/UC.html) developed by the [Operations Research Group at University of Pisa](http://groups.di.unipi.it/optimize/). These test cases have been used in [FrGe06] and many other publications.
@@ -239,7 +253,9 @@ Test cases based on a publicly available [unit commitment test case produced by
| `or-lib/200_0_8_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_9_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
## 4. Tejada19
Tejada19
--------
Test cases used in [TeLuSa19]. These instances are similar to OR-LIB/UC, in the sense that they use the same random problem generator, but are much larger.
@@ -295,7 +311,9 @@ Test cases based on a publicly available [unit commitment test case produced by
| `tejada19/UC_168h_192g` | 168 | 1 | 192 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_199g` | 168 | 1 | 199 | 0 | 0 | [TeLuSa19]
## 5. References
References
----------
* [UCJL] **Alinson S. Xavier, Feng Qiu.** "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment". Zenodo (2020). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874)

196
docs/model.md Normal file
View File

@@ -0,0 +1,196 @@
```{sectnum}
---
start: 4
depth: 2
suffix: .
---
```
JuMP Model
==========
In this page, we describe the JuMP optimization model produced by the function `UnitCommitment.build_model`. A detailed understanding of this model is not necessary if you are just interested in using the package to solve some standard unit commitment cases, but it may be useful, for example, if you need to solve a slightly different problem, with additional variables and constraints. The notation in this page generally follows [KnOsWa20].
Decision variables
------------------
### Generators
Name | Symbol | Description | Unit
-----|:--------:|-------------|:------:
`is_on[g,t]` | $u_{g}(t)$ | True if generator `g` is on at time `t`. | Binary
`switch_on[g,t]` | $v_{g}(t)$ | True is generator `g` switches on at time `t`. | Binary
`switch_off[g,t]` | $w_{g}(t)$ | True if generator `g` switches off at time `t`. | Binary
`prod_above[g,t]` |$p'_{g}(t)$ | Amount of power produced by generator `g` above its minimum power output at time `t`. For example, if the minimum power of generator `g` is 100 MW and `g` is producing 115 MW of power at time `t`, then `prod_above[g,t]` equals `15.0`. | MW
`segprod[g,t,k]` | $p^k_g(t)$ | Amount of power from piecewise linear segment `k` produced by generator `g` at time `t`. For example, if cost curve for generator `g` is defined by the points `(100, 1400)`, `(110, 1600)`, `(130, 2200)` and `(135, 2400)`, and if the generator is producing 115 MW of power at time `t`, then `segprod[g,t,:]` equals `[10.0, 5.0, 0.0]`.| MW
`reserve[g,t]` | $r_g(t)$ | Amount of reserves provided by generator `g` at time `t`. | MW
`startup[g,t,s]` | $\delta^s_g(t)$ | True if generator `g` switches on at time `t` incurring start-up costs from start-up category `s`. | Binary
### Buses
Name | Symbol | Description | Unit
-----|:------:|-------------|:------:
`net_injection[b,t]` | $n_b(t)$ | Net injection at bus `b` at time `t`. | MW
`curtail[b,t]` | $s^+_b(t)$ | Amount of load curtailed at bus `b` at time `t` | MW
### Price-sensitive loads
Name | Symbol | Description | Unit
-----|:------:|-------------|:------:
`loads[s,t]` | $d_{s}(t)$ | Amount of power served to price-sensitive load `s` at time `t`. | MW
### Transmission lines
Name | Symbol | Description | Unit
-----|:------:|-------------|:------:
`flow[l,t]` | $f_l(t)$ | Power flow on line `l` at time `t`. | MW
`overflow[l,t]` | $f^+_l(t)$ | Amount of flow above the limit for line `l` at time `t`. | MW
```{warning}
Since transmission and N-1 security constraints are enforced in a lazy way, most of the `flow[l,t]` variables are never added to the model. Accessing `model[:flow][l,t]` without first checking that the variable exists will likely generate an error.
```
Objective function
------------------
$$
\begin{align}
\text{minimize} \;\; &
\sum_{t \in \mathcal{T}}
\sum_{g \in \mathcal{G}}
C^\text{min}_g(t) u_g(t) \\
&
+ \sum_{t \in \mathcal{T}}
\sum_{g \in \mathcal{G}}
\sum_{g \in \mathcal{K}_g}
C^k_g(t) p^k_g(t) \\
&
+ \sum_{t \in \mathcal{T}}
\sum_{g \in \mathcal{G}}
\sum_{s \in \mathcal{S}_g}
C^s_{g}(t) \delta^s_g(t) \\
&
+ \sum_{t \in \mathcal{T}}
\sum_{l \in \mathcal{L}}
C^\text{overflow}_{l}(t) f^+_l(t) \\
&
+ \sum_{t \in \mathcal{T}}
\sum_{b \in \mathcal{B}}
C^\text{curtail}(t) s^+_b(t) \\
&
- \sum_{t \in \mathcal{T}}
\sum_{s \in \mathcal{PS}}
R_{s}(t) d_{s}(t) \\
\end{align}
$$
where
- $\mathcal{B}$ is the set of buses
- $\mathcal{G}$ is the set of generators
- $\mathcal{L}$ is the set of transmission lines
- $\mathcal{PS}$ is the set of price-sensitive loads
- $\mathcal{S}_g$ is the set of start-up categories for generator $g$
- $\mathcal{T}$ is the set of time steps
- $C^\text{curtail}(t)$ is the curtailment penalty (in \$/MW)
- $C^\text{min}_g(t)$ is the cost of keeping generator $g$ on and producing at minimum power during time $t$ (in \$)
- $C^\text{overflow}_{l}(t)$ is the flow limit penalty for line $l$ at time $t$ (in \$/MW)
- $C^k_g(t)$ is the cost for generator $g$ to produce 1 MW of power at time $t$ under piecewise linear segment $k$
- $C^s_{g}(t)$ is the cost of starting up generator $g$ at time $t$ under start-up category $s$ (in \$)
- $R_{s}(t)$ is the revenue obtained from serving price-sensitive load $s$ at time $t$ (in \$/MW)
Constraints
-----------
TODO
Inspecting and modifying the model
----------------------------------
### Accessing decision variables
After building a model using `UnitCommitment.build_model`, it is possible to obtain a reference to the decision variables by calling `model[:varname][index]`. For example, `model[:is_on]["g1",1]` returns a direct reference to the JuMP variable indicating whether generator named "g1" is on at time 1. The script below illustrates how to build a model, solve it and display the solution without using the function `UnitCommitment.solution`.
```julia
using Cbc
using Printf
using JuMP
using UnitCommitment
# Load benchmark instance
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
# Build JuMP model
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# Solve the model
UnitCommitment.optimize!(model)
# Display commitment status
for g in instance.units
for t in 1:instance.time
@printf(
"%-10s %5d %5.1f %5.1f %5.1f\n",
g.name,
t,
value(model[:is_on][g.name, t]),
value(model[:switch_on][g.name, t]),
value(model[:switch_off][g.name, t]),
)
end
end
```
### Modifying the model
Since we now have a direct reference to the JuMP decision variables, it is possible to fix variables, change the coefficients in the objective function, or even add new constraints to the model before solving it. The script below shows how can this be accomplished. For more information on modifying an existing model, [see the JuMP documentation](https://jump.dev/JuMP.jl/stable/manual/variables/).
```julia
using Cbc
using JuMP
using UnitCommitment
# Load benchmark instance
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
# Construct JuMP model
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# Fix a decision variable to 1.0
JuMP.fix(
model[:is_on]["g1",1],
1.0,
force=true,
)
# Change the objective function
JuMP.set_objective_coefficient(
model,
model[:switch_on]["g2",1],
1000.0,
)
# Create a new constraint
@constraint(
model,
model[:is_on]["g3",1] + model[:is_on]["g4",1] <= 1,
)
# Solve the model
UnitCommitment.optimize!(model)
```
References
----------
* [KnOsWa20] **Bernard Knueven, James Ostrowski and Jean-Paul Watson.** "On Mixed-Integer Programming Formulations for the Unit Commitment Problem". INFORMS Journal on Computing (2020). [DOI: 10.1287/ijoc.2019.0944](https://doi.org/10.1287/ijoc.2019.0944)

View File

@@ -1,11 +1,21 @@
# Usage
```{sectnum}
---
start: 1
depth: 2
suffix: .
---
```
## 1. Installation
Usage
=====
UnitCommitment.jl was tested and developed with [Julia 1.5](https://julialang.org/). To install Julia, please follow the [installation guide on the official Julia website](https://julialang.org/downloads/platform.html). To install UnitCommitment.jl, run the Julia interpreter, type `]` to open the package manager, then type:
Installation
------------
UnitCommitment.jl was tested and developed with [Julia 1.6](https://julialang.org/). To install Julia, please follow the [installation guide on the official Julia website](https://julialang.org/downloads/platform.html). To install UnitCommitment.jl, run the Julia interpreter, type `]` to open the package manager, then type:
```text
pkg> add UnitCommitment
pkg> add UnitCommitment@0.2
```
To test that the package has been correctly installed, run:
@@ -18,9 +28,10 @@ If all tests pass, the package should now be ready to be used by any Julia scrip
To solve the optimization models, a mixed-integer linear programming (MILP) solver is also required. Please see the [JuMP installation guide](https://jump.dev/JuMP.jl/stable/installation/) for more instructions on installing a solver. Typical open-source choices are [Cbc](https://github.com/JuliaOpt/Cbc.jl) and [GLPK](https://github.com/JuliaOpt/GLPK.jl). In the instructions below, Cbc will be used, but any other MILP solver listed in JuMP installation guide should also be compatible.
## 2. Typical Usage
Typical Usage
-------------
### 2.1 Solving user-provided instances
### Solving user-provided instances
The first step to use UC.jl is to construct a JSON file describing your unit commitment instance. See the [data format page]() for a complete description of the data format UC.jl expects. The next steps, as shown below, are to read the instance from file, construct the optimization model, run the optimization and extract the optimal solution.
@@ -33,20 +44,22 @@ using UnitCommitment
instance = UnitCommitment.read("/path/to/input.json")
# Construct optimization model
model = UnitCommitment.build_model(instance=instance,
optimizer=Cbc.Optimizer)
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# Solve model
UnitCommitment.optimize!(model)
# Extract solution and write it to a file
solution = UnitCommitment.get_solution(model)
open("/path/to/output.json", "w") do file
JSON.print(file, solution, 2)
end
# Extract solution
solution = UnitCommitment.solution(model)
# Write solution to a file
UnitCommitment.write("/path/to/output.json", solution)
```
### 2.2 Solving benchmark instances
### Solving benchmark instances
As described in the [Instances page](instances.md), UnitCommitment.jl contains a number of benchmark instances collected from the literature. To solve one of these instances individually, instead of constructing your own, the function `read_benchmark` can be used:
@@ -55,15 +68,15 @@ using UnitCommitment
instance = UnitCommitment.read_benchmark("matpower/case3375wp/2017-02-01")
```
## 3. Advanced usage
Advanced usage
--------------
### 3.1 Modifying the formulation
### Modifying the formulation
For the time being, the recommended way of modifying the MILP formulation used by UC.jl is to create a local copy of our git repository and directly modify the source code of the package. In a future version, it will be possible to switch between multiple formulations, or to simply add/remove constraints after the model has been generated.
### 3.2 Generating initial conditions
### Generating initial conditions
When creating random unit commitment instances for benchmark purposes, it is often hard to compute, in advance, sensible initial conditions for all generators. Setting initial conditions naively (for example, making all generators initially off and producing no power) can easily cause the instance to become infeasible due to excessive ramping. Initial conditions can also make it hard to modify existing instances. For example, increasing the system load without carefully modifying the initial conditions may make the problem infeasible or unrealistically challenging to solve.
@@ -84,10 +97,11 @@ model = UnitCommitment.build_model(instance, Cbc.Optimizer)
UnitCommitment.optimize!(model)
```
!!! warning
```{warning}
The function `generate_initial_conditions!` may return different initial conditions after each call, even if the same instance and the same optimizer is provided. The particular algorithm may also change in a future version of UC.jl. For these reasons, it is recommended that you generate initial conditions exactly once for each instance and store them for later use.
```
### 3.3 Verifying solutions
### Verifying solutions
When developing new formulations, it is very easy to introduce subtle errors in the model that result in incorrect solutions. To help with this, UC.jl includes a utility function that verifies if a given solution is feasible, and, if not, prints all the validation errors it found. The implementation of this function is completely independent from the implementation of the optimization model, and therefore can be used to validate it. The function can also be used to verify solutions produced by other optimization packages, as long as they follow the [UC.jl data format](format.md).

Binary file not shown.

Binary file not shown.

View File

@@ -1,26 +0,0 @@
site_name: UnitCommitment.jl
theme:
name: cinder
hljs_languages:
- julia
copyright: "Copyright © 2020, UChicago Argonne, LLC. All Rights Reserved."
repo_url: https://github.com/ANL-CEEESA/unitcommitment.jl
edit_uri: edit/dev/src/docs/
nav:
- Home: index.md
- Usage: usage.md
- Format: format.md
- Instances: instances.md
plugins:
- search
markdown_extensions:
- admonition
- mdx_math
- fenced_code
extra_javascript:
- https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-AMS-MML_HTMLorMML
- js/mathjax.js
docs_dir: src/docs
site_dir: docs
extra_css:
- "css/custom.css"

View File

@@ -1,182 +0,0 @@
matpower/case1888rte/2017-01-01
matpower/case1888rte/2017-01-02
matpower/case1888rte/2017-01-03
matpower/case1888rte/2017-01-04
matpower/case1888rte/2017-01-05
matpower/case1888rte/2017-01-06
matpower/case1888rte/2017-01-07
matpower/case1888rte/2017-01-08
matpower/case1888rte/2017-01-09
matpower/case1888rte/2017-01-10
matpower/case1888rte/2017-01-11
matpower/case1888rte/2017-01-12
matpower/case1888rte/2017-01-13
matpower/case1888rte/2017-01-14
matpower/case1888rte/2017-01-15
matpower/case1888rte/2017-01-16
matpower/case1888rte/2017-01-17
matpower/case1888rte/2017-01-18
matpower/case1888rte/2017-01-19
matpower/case1888rte/2017-01-20
matpower/case1888rte/2017-01-21
matpower/case1888rte/2017-01-22
matpower/case1888rte/2017-01-23
matpower/case1888rte/2017-01-24
matpower/case1888rte/2017-01-25
matpower/case1888rte/2017-01-26
matpower/case1888rte/2017-01-27
matpower/case1888rte/2017-01-28
matpower/case1888rte/2017-01-29
matpower/case1888rte/2017-01-30
matpower/case1888rte/2017-01-31
matpower/case1888rte/2017-02-01
matpower/case1888rte/2017-02-02
matpower/case1888rte/2017-02-03
matpower/case1888rte/2017-02-04
matpower/case1888rte/2017-02-05
matpower/case1888rte/2017-02-06
matpower/case1888rte/2017-02-07
matpower/case1888rte/2017-02-08
matpower/case1888rte/2017-02-09
matpower/case1888rte/2017-02-10
matpower/case1888rte/2017-02-11
matpower/case1888rte/2017-02-12
matpower/case1888rte/2017-02-13
matpower/case1888rte/2017-02-14
matpower/case1888rte/2017-02-15
matpower/case1888rte/2017-02-16
matpower/case1888rte/2017-02-17
matpower/case1888rte/2017-02-18
matpower/case1888rte/2017-02-19
matpower/case1888rte/2017-02-20
matpower/case1888rte/2017-02-21
matpower/case1888rte/2017-02-22
matpower/case1888rte/2017-02-23
matpower/case1888rte/2017-02-24
matpower/case1888rte/2017-02-25
matpower/case1888rte/2017-02-26
matpower/case1888rte/2017-02-27
matpower/case1888rte/2017-02-28
matpower/case1888rte/2017-03-01
matpower/case3375wp/2017-01-01
matpower/case3375wp/2017-01-02
matpower/case3375wp/2017-01-03
matpower/case3375wp/2017-01-04
matpower/case3375wp/2017-01-05
matpower/case3375wp/2017-01-06
matpower/case3375wp/2017-01-07
matpower/case3375wp/2017-01-08
matpower/case3375wp/2017-01-09
matpower/case3375wp/2017-01-10
matpower/case3375wp/2017-01-11
matpower/case3375wp/2017-01-12
matpower/case3375wp/2017-01-13
matpower/case3375wp/2017-01-14
matpower/case3375wp/2017-01-15
matpower/case3375wp/2017-01-16
matpower/case3375wp/2017-01-17
matpower/case3375wp/2017-01-18
matpower/case3375wp/2017-01-19
matpower/case3375wp/2017-01-20
matpower/case3375wp/2017-01-21
matpower/case3375wp/2017-01-22
matpower/case3375wp/2017-01-23
matpower/case3375wp/2017-01-24
matpower/case3375wp/2017-01-25
matpower/case3375wp/2017-01-26
matpower/case3375wp/2017-01-27
matpower/case3375wp/2017-01-28
matpower/case3375wp/2017-01-29
matpower/case3375wp/2017-01-30
matpower/case3375wp/2017-01-31
matpower/case3375wp/2017-02-01
matpower/case3375wp/2017-02-02
matpower/case3375wp/2017-02-03
matpower/case3375wp/2017-02-04
matpower/case3375wp/2017-02-05
matpower/case3375wp/2017-02-06
matpower/case3375wp/2017-02-07
matpower/case3375wp/2017-02-08
matpower/case3375wp/2017-02-09
matpower/case3375wp/2017-02-10
matpower/case3375wp/2017-02-11
matpower/case3375wp/2017-02-12
matpower/case3375wp/2017-02-13
matpower/case3375wp/2017-02-14
matpower/case3375wp/2017-02-15
matpower/case3375wp/2017-02-16
matpower/case3375wp/2017-02-17
matpower/case3375wp/2017-02-18
matpower/case3375wp/2017-02-19
matpower/case3375wp/2017-02-20
matpower/case3375wp/2017-02-21
matpower/case3375wp/2017-02-22
matpower/case3375wp/2017-02-23
matpower/case3375wp/2017-02-24
matpower/case3375wp/2017-02-25
matpower/case3375wp/2017-02-26
matpower/case3375wp/2017-02-27
matpower/case3375wp/2017-02-28
matpower/case3375wp/2017-03-01
matpower/case6468rte/2017-01-01
matpower/case6468rte/2017-01-02
matpower/case6468rte/2017-01-03
matpower/case6468rte/2017-01-04
matpower/case6468rte/2017-01-05
matpower/case6468rte/2017-01-06
matpower/case6468rte/2017-01-07
matpower/case6468rte/2017-01-08
matpower/case6468rte/2017-01-09
matpower/case6468rte/2017-01-10
matpower/case6468rte/2017-01-11
matpower/case6468rte/2017-01-12
matpower/case6468rte/2017-01-13
matpower/case6468rte/2017-01-14
matpower/case6468rte/2017-01-15
matpower/case6468rte/2017-01-16
matpower/case6468rte/2017-01-17
matpower/case6468rte/2017-01-18
matpower/case6468rte/2017-01-19
matpower/case6468rte/2017-01-20
matpower/case6468rte/2017-01-21
matpower/case6468rte/2017-01-22
matpower/case6468rte/2017-01-23
matpower/case6468rte/2017-01-24
matpower/case6468rte/2017-01-25
matpower/case6468rte/2017-01-26
matpower/case6468rte/2017-01-27
matpower/case6468rte/2017-01-28
matpower/case6468rte/2017-01-29
matpower/case6468rte/2017-01-30
matpower/case6468rte/2017-01-31
matpower/case6468rte/2017-02-01
matpower/case6468rte/2017-02-02
matpower/case6468rte/2017-02-03
matpower/case6468rte/2017-02-04
matpower/case6468rte/2017-02-05
matpower/case6468rte/2017-02-06
matpower/case6468rte/2017-02-07
matpower/case6468rte/2017-02-08
matpower/case6468rte/2017-02-09
matpower/case6468rte/2017-02-10
matpower/case6468rte/2017-02-11
matpower/case6468rte/2017-02-12
matpower/case6468rte/2017-02-13
matpower/case6468rte/2017-02-14
matpower/case6468rte/2017-02-15
matpower/case6468rte/2017-02-16
matpower/case6468rte/2017-02-17
matpower/case6468rte/2017-02-18
matpower/case6468rte/2017-02-19
matpower/case6468rte/2017-02-20
matpower/case6468rte/2017-02-21
matpower/case6468rte/2017-02-22
matpower/case6468rte/2017-02-23
matpower/case6468rte/2017-02-24
matpower/case6468rte/2017-02-25
matpower/case6468rte/2017-02-26
matpower/case6468rte/2017-02-27
matpower/case6468rte/2017-02-28
matpower/case6468rte/2017-03-01
test/case14

View File

@@ -1,49 +0,0 @@
#!/bin/bash
#SBATCH --array=1-180
#SBATCH --time=02:00:00
#SBATCH --account=def-alodi
#SBATCH --mem-per-cpu=1G
#SBATCH --cpus-per-task=4
#SBATCH --mail-user=aleksandr.kazachkov@polymtl.ca
#SBATCH --mail-type=BEGIN
#SBATCH --mail-type=END
#SBATCH --mail-type=FAIL
#SBATCH --array=182
#SBATCH --time=00:00:30
#SBATCH --mem-per-cpu=500M
#SBATCH --cpus-per-task=1
#SBATCH --time=01:00:00
#SBATCH --mem-per-cpu=1G
#SBATCH --cpus-per-task=4
MODE="tight"
if [ ! -z $1 ]; then
MODE=$1
fi
#CASE_NUM=`printf %03d $SLURM_ARRAY_TASK_ID`
PROJ_DIR="${REPOS_DIR}/UnitCommitment2.jl"
INST=$(sed -n "${SLURM_ARRAY_TASK_ID}p" ${PROJ_DIR}/scripts/instances.txt)
#DEST="${PROJ_DIR}/benchmark"
DEST="${HOME}/scratch/uc"
RESULTS_DIR="${DEST}/results_${MODE}"
NUM_SAMPLES=1
if [ $MODE == "sparse" ] || [ $MODE == "default" ] || [ $MODE == "tight" ]
then
echo "Running task $SLURM_ARRAY_TASK_ID for instance $INST with results sent to ${RESULTS_DIR}"
else
echo "Unrecognized mode: $1. Exiting."
exit
fi
cd ${PROJ_DIR}/benchmark
mkdir -p $(dirname ${RESULTS_DIR}/${INST})
for i in $(seq ${NUM_SAMPLES}); do
FILE=$INST.$i
#echo "Running $FILE at `date` using command julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR} 2&>1 | cat > ${RESULTS_DIR}/${FILE}.log"
#julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR} 2&>1 | cat > ${RESULTS_DIR}/${FILE}.log
echo "Running $FILE at `date` using command julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR} &> ${RESULTS_DIR}/${FILE}.log"
julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR} &> ${RESULTS_DIR}/${FILE}.log
#julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR}
done

View File

@@ -4,15 +4,9 @@
module UnitCommitment
include("log.jl")
include("dotdict.jl")
include("instance.jl")
include("screening.jl")
include("components.jl")
include("variables.jl")
include("constraints.jl")
include("formulation.jl")
#include("model.jl")
include("model2.jl")
include("model.jl")
include("sensitivity.jl")
include("validate.jl")
include("convert.jl")

View File

@@ -1,46 +0,0 @@
##################################################
# Component types
abstract type UCComponentType end
abstract type RequiredConstraints <: UCComponentType end
abstract type SystemConstraints <: UCComponentType end
abstract type GenerationLimits <: UCComponentType end
abstract type PiecewiseProduction <: UCComponentType end
abstract type UpDownTime <: UCComponentType end
abstract type ReserveConstraints <: UCComponentType end
abstract type RampLimits <: UCComponentType end
abstract type StartupCosts <: UCComponentType end
abstract type ShutdownCosts <: UCComponentType end
##################################################
# Components
"""
Generic component of the unit commitment problem.
Elements
===
* `name`: name of the component
* `description`: gives a brief summary of what the component adds
* `type`: reference back to the UCComponentType being modeled
* `vars`: required variables
* `constrs`: constraints that are created by this function
* `add_component`: function to add constraints and update the objective to capture this component
* `params`: extra parameters the component might use
"""
mutable struct UCComponent
"Name of the component."
name::String
"Description of what the component adds."
description::String
"Which part of the unit commitment problem is modeled by this component."
type::Type{<:UCComponentType}
"Variables that are needed for the component (subset of `var_list`)."
vars::Union{Array{Symbol},Nothing}
"Equations that are modified for the component (subset of `constr_list`)."
constrs::Union{Array{Symbol},Nothing}
"Function to add constraints and objective coefficients needed for this component to the model. Signature should be (component, mip, model)."
add_component::Union{Function,Nothing}
"Extra parameters for the component."
params::Any
end # struct UCComponent
export UCComponent

View File

@@ -1,31 +0,0 @@
##################################################
# Constraints
"""
List of constraints that the model will potentially have
"""
constr_list =
[
:startup_choose,
:startup_restrict,
:segprod_limit,
:segprod_limita,
:segprod_limitb,
:prod_above_def,
:prod_limit,
:str_prod_limit,
:binary_link,
:switch_on_off,
:ramp_up,
:ramp_down,
:str_ramp_up,
:str_ramp_down,
:startstop_limit,
:startup_limit,
:shutdown_limit,
:min_uptime,
:min_downtime,
:power_balance,
:net_injection_def,
:min_reserve
]

View File

@@ -4,7 +4,7 @@
using DataStructures, JSON, GZip
function read_json(path::String)::OrderedDict
function _read_json(path::String)::OrderedDict
if endswith(path, ".gz")
file = GZip.gzopen(path)
else
@@ -13,8 +13,8 @@ function read_json(path::String)::OrderedDict
return JSON.parse(file, dicttype = () -> DefaultOrderedDict(nothing))
end
function read_egret_solution(path::String)::OrderedDict
egret = read_json(path)
function _read_egret_solution(path::String)::OrderedDict
egret = _read_json(path)
T = length(egret["system"]["time_keys"])
solution = OrderedDict()

View File

@@ -1,28 +0,0 @@
.navbar-default {
border-bottom: 0px;
background-color: #fff;
box-shadow: 0px 0px 15px rgba(0, 0, 0, 0.2);
}
a, .navbar-default a {
color: #06a !important;
font-weight: normal;
}
.disabled > a {
color: #999 !important;
}
.navbar-default a:hover,
.navbar-default .active,
.active > a {
background-color: #f0f0f0 !important;
}
.icon-bar {
background-color: #666 !important;
}
.navbar-collapse {
border-color: #fff !important;
}

File diff suppressed because one or more lines are too long

View File

@@ -1,42 +0,0 @@
# UnitCommitment.jl
**UnitCommitment.jl** (UC.jl) is a Julia optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and Julia/JuMP implementations of state-of-the-art mixed-integer programming formulations.
### Package Components
* **Data Format:** The package proposes an extensible and fully-documented JSON-based data specification format for SCUC, developed in collaboration with Independent System Operators (ISOs), which describes the most important aspects of the problem. The format supports all the most common generator characteristics (including ramping, piecewise-linear production cost curves and time-dependent startup costs), as well as operating reserves, price-sensitive loads, transmission networks and contingencies.
* **Benchmark Instances:** The package provides a diverse collection of large-scale benchmark instances collected from the literature and extended to make them more challenging and realistic.
* **Model Implementation**: The package provides a Julia/JuMP implementation of state-of-the-art formulations and solution methods for SCUC. Our goal is to keep this implementation up-to-date, as new methods are proposed in the literature.
* **Benchmark Tools:** The package provides automated benchmark scripts to accurately evaluate the performance impact of proposed code changes.
### Documentation
* [Usage](usage.md)
* [Data Format](format.md)
* [Instances](instances.md)
### Source code
* [https://github.com/ANL-CEEESA/unitcommitment.jl](https://github.com/ANL-CEEESA/unitcommitment.jl)
### Authors
* **Alinson Santos Xavier** (Argonne National Laboratory)
* **Feng Qiu** (Argonne National Laboratory)
### Acknowledgments
* We would like to thank **Aleksandr M. Kazachkov** (University of Florida), **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package.
* Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357.
### Citing
If you use UnitCommitment.jl in your research, we request that you cite the package as follows:
* Alinson S. Xavier, Feng Qiu, "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment". Zenodo (2020). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874).
If you make use of the provided instances files, we request that you additionally cite the original sources, as described in the [instances page](instances.md).
### License
Released under the modified BSD license. See `LICENSE.md` for more details.

View File

@@ -1,8 +0,0 @@
MathJax.Hub.Config({
"tex2jax": { inlineMath: [ [ '$', '$' ] ] }
});
MathJax.Hub.Config({
config: ["MMLorHTML.js"],
jax: ["input/TeX", "output/HTML-CSS", "output/NativeMML"],
extensions: ["MathMenu.js", "MathZoom.js"]
});

View File

@@ -1,68 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
struct DotDict
inner::Dict
end
DotDict() = DotDict(Dict())
function Base.setproperty!(d::DotDict, key::Symbol, value)
setindex!(getfield(d, :inner), value, key)
end
function Base.getproperty(d::DotDict, key::Symbol)
(key == :inner ? getfield(d, :inner) : d.inner[key])
end
function Base.getindex(d::DotDict, key::Int64)
d.inner[Symbol(key)]
end
function Base.getindex(d::DotDict, key::Symbol)
d.inner[key]
end
function Base.keys(d::DotDict)
keys(d.inner)
end
function Base.values(d::DotDict)
values(d.inner)
end
function Base.iterate(d::DotDict)
iterate(values(d.inner))
end
function Base.iterate(d::DotDict, v::Int64)
iterate(values(d.inner), v)
end
function Base.length(d::DotDict)
length(values(d.inner))
end
function Base.show(io::IO, d::DotDict)
print(io, "DotDict with $(length(keys(d.inner))) entries:\n")
count = 0
for k in keys(d.inner)
count += 1
if count > 10
print(io, " ...\n")
break
end
print(io, " :$(k) => $(d.inner[k])\n")
end
end
function recursive_to_dot_dict(el)
if typeof(el) == Dict{String, Any}
return DotDict(Dict(Symbol(k) => recursive_to_dot_dict(el[k]) for k in keys(el)))
else
return el
end
end
export recursive_to_dot_dict

File diff suppressed because it is too large Load Diff

View File

@@ -11,8 +11,10 @@ Generates feasible initial conditions for the given instance, by constructing
and solving a single-period mixed-integer optimization problem, using the given
optimizer. The instance is modified in-place.
"""
function generate_initial_conditions!(instance::UnitCommitmentInstance,
optimizer)
function generate_initial_conditions!(
instance::UnitCommitmentInstance,
optimizer,
)::Nothing
G = instance.units
B = instance.buses
t = 1
@@ -23,19 +25,17 @@ function generate_initial_conditions!(instance::UnitCommitmentInstance,
@variable(mip, p[G] >= 0)
# Constraint: Minimum power
@constraint(mip,
min_power[g in G],
p[g] >= g.min_power[t] * x[g])
@constraint(mip, min_power[g in G], p[g] >= g.min_power[t] * x[g])
# Constraint: Maximum power
@constraint(mip,
max_power[g in G],
p[g] <= g.max_power[t] * x[g])
@constraint(mip, max_power[g in G], p[g] <= g.max_power[t] * x[g])
# Constraint: Production equals demand
@constraint(mip,
@constraint(
mip,
power_balance,
sum(b.load[t] for b in B) == sum(p[g] for g in G))
sum(b.load[t] for b in B) == sum(p[g] for g in G)
)
# Constraint: Must run
for g in G
@@ -58,9 +58,7 @@ function generate_initial_conditions!(instance::UnitCommitmentInstance,
return c / mw
end
end
@objective(mip,
Min,
sum(p[g] * cost_slope(g) for g in G))
@objective(mip, Min, sum(p[g] * cost_slope(g) for g in G))
JuMP.optimize!(mip)
@@ -73,4 +71,5 @@ function generate_initial_conditions!(instance::UnitCommitmentInstance,
g.initial_status = -24
end
end
return
end

View File

@@ -5,42 +5,35 @@
using Printf
using JSON
using DataStructures
using GZip
import Base: getindex, time
import GZip
abstract type UCElement end
abstract type Time <: UCElement end
mutable struct Bus <: UCElement
mutable struct Bus
name::String
offset::Int
load::Array{Float64}
units::Array
price_sensitive_loads::Array
load::Vector{Float64}
units::Vector
price_sensitive_loads::Vector
end
mutable struct CostSegment <: UCElement
mw::Array{Float64}
cost::Array{Float64}
mutable struct CostSegment
mw::Vector{Float64}
cost::Vector{Float64}
end
mutable struct StartupCategory <: UCElement
mutable struct StartupCategory
delay::Int
cost::Float64
end
mutable struct Unit <: UCElement
mutable struct Unit
name::String
bus::Bus
max_power::Array{Float64}
min_power::Array{Float64}
must_run::Array{Bool}
min_power_cost::Array{Float64}
cost_segments::Array{CostSegment}
max_power::Vector{Float64}
min_power::Vector{Float64}
must_run::Vector{Bool}
min_power_cost::Vector{Float64}
cost_segments::Vector{CostSegment}
min_uptime::Int
min_downtime::Int
ramp_up_limit::Float64
@@ -49,94 +42,106 @@ mutable struct Unit <: UCElement
shutdown_limit::Float64
initial_status::Union{Int,Nothing}
initial_power::Union{Float64,Nothing}
provides_spinning_reserves::Array{Bool}
startup_categories::Array{StartupCategory}
end # Unit
provides_spinning_reserves::Vector{Bool}
startup_categories::Vector{StartupCategory}
end
mutable struct TransmissionLine <: UCElement
mutable struct TransmissionLine
name::String
offset::Int
source::Bus
target::Bus
reactance::Float64
susceptance::Float64
normal_flow_limit::Array{Float64}
emergency_flow_limit::Array{Float64}
flow_limit_penalty::Array{Float64}
normal_flow_limit::Vector{Float64}
emergency_flow_limit::Vector{Float64}
flow_limit_penalty::Vector{Float64}
end
mutable struct Reserves <: UCElement
spinning::Array{Float64}
mutable struct Reserves
spinning::Vector{Float64}
end
mutable struct Contingency <: UCElement
mutable struct Contingency
name::String
lines::Array{TransmissionLine}
units::Array{Unit}
lines::Vector{TransmissionLine}
units::Vector{Unit}
end
mutable struct PriceSensitiveLoad <: UCElement
mutable struct PriceSensitiveLoad
name::String
bus::Bus
demand::Array{Float64}
revenue::Array{Float64}
demand::Vector{Float64}
revenue::Vector{Float64}
end
mutable struct UnitCommitmentInstance
time::Int
power_balance_penalty::Array{Float64}
"Penalty for failing to meet reserve requirement."
shortfall_penalty::Array{Float64}
units::Array{Unit}
buses::Array{Bus}
lines::Array{TransmissionLine}
power_balance_penalty::Vector{Float64}
units::Vector{Unit}
buses::Vector{Bus}
lines::Vector{TransmissionLine}
reserves::Reserves
contingencies::Array{Contingency}
price_sensitive_loads::Array{PriceSensitiveLoad}
contingencies::Vector{Contingency}
price_sensitive_loads::Vector{PriceSensitiveLoad}
end
function Base.show(io::IO, instance::UnitCommitmentInstance)
print(io, "UnitCommitmentInstance with ")
print(io, "UnitCommitmentInstance(")
print(io, "$(length(instance.units)) units, ")
print(io, "$(length(instance.buses)) buses, ")
print(io, "$(length(instance.lines)) lines, ")
print(io, "$(length(instance.contingencies)) contingencies, ")
print(io, "$(length(instance.price_sensitive_loads)) price sensitive loads")
print(
io,
"$(length(instance.price_sensitive_loads)) price sensitive loads, ",
)
print(io, "$(instance.time) time steps")
print(io, ")")
return
end
function read_benchmark(name::AbstractString)::UnitCommitmentInstance
basedir = dirname(@__FILE__)
return UnitCommitment.read("$basedir/../instances/$name.json.gz")
end
function read(path::AbstractString)::UnitCommitmentInstance
if endswith(path, ".gz")
return read(GZip.gzopen(path))
return _read(gzopen(path))
else
return read(open(path))
return _read(open(path))
end
end
function read(file::IO)::UnitCommitmentInstance
return from_json(JSON.parse(file, dicttype=()->DefaultOrderedDict(nothing)))
function _read(file::IO)::UnitCommitmentInstance
return _from_json(
JSON.parse(file, dicttype = () -> DefaultOrderedDict(nothing)),
)
end
function from_json(json; fix=true)
function _from_json(json; repair = true)
units = Unit[]
buses = Bus[]
contingencies = Contingency[]
lines = TransmissionLine[]
loads = PriceSensitiveLoad[]
T = json["Parameters"]["Time (h)"]
function scalar(x; default = nothing)
x !== nothing || return default
return x
end
time_horizon = json["Parameters"]["Time (h)"]
if time_horizon === nothing
time_horizon = json["Parameters"]["Time horizon (h)"]
end
time_horizon !== nothing || error("Missing parameter: Time horizon (h)")
time_step = scalar(json["Parameters"]["Time step (min)"], default = 60)
(60 % time_step == 0) ||
error("Time step $time_step is not a divisor of 60")
time_multiplier = 60 ÷ time_step
T = time_horizon * time_multiplier
name_to_bus = Dict{String,Bus}()
name_to_line = Dict{String,TransmissionLine}()
@@ -148,24 +153,21 @@ function from_json(json; fix=true)
return x
end
function scalar(x; default=nothing)
x !== nothing || return default
x
end
# Read parameters
power_balance_penalty = timeseries(json["Parameters"]["Power balance penalty (\$/MW)"],
default=[1000.0 for t in 1:T])
shortfall_penalty = timeseries(json["Parameters"]["Reserve shortfall penalty (\$/MW)"],
default=[0. for t in 1:T])
power_balance_penalty = timeseries(
json["Parameters"]["Power balance penalty (\$/MW)"],
default = [1000.0 for t in 1:T],
)
# Read buses
for (bus_name, dict) in json["Buses"]
bus = Bus(bus_name,
bus = Bus(
bus_name,
length(buses),
timeseries(dict["Load (MW)"]),
Unit[],
PriceSensitiveLoad[])
PriceSensitiveLoad[],
)
name_to_bus[bus_name] = bus
push!(buses, bus)
end
@@ -176,8 +178,12 @@ function from_json(json; fix=true)
# Read production cost curve
K = length(dict["Production cost curve (MW)"])
curve_mw = hcat([timeseries(dict["Production cost curve (MW)"][k]) for k in 1:K]...)
curve_cost = hcat([timeseries(dict["Production cost curve (\$)"][k]) for k in 1:K]...)
curve_mw = hcat(
[timeseries(dict["Production cost curve (MW)"][k]) for k in 1:K]...,
)
curve_cost = hcat(
[timeseries(dict["Production cost curve (\$)"][k]) for k in 1:K]...,
)
min_power = curve_mw[:, 1]
max_power = curve_mw[:, K]
min_power_cost = curve_cost[:, 1]
@@ -191,44 +197,57 @@ function from_json(json; fix=true)
# Read startup costs
startup_delays = scalar(dict["Startup delays (h)"], default = [1])
startup_costs = scalar(dict["Startup costs (\$)"], default=[0.])
startup_costs = scalar(dict["Startup costs (\$)"], default = [0.0])
startup_categories = StartupCategory[]
for k in 1:length(startup_delays)
push!(startup_categories, StartupCategory(startup_delays[k],
startup_costs[k]))
push!(
startup_categories,
StartupCategory(
startup_delays[k] .* time_multiplier,
startup_costs[k],
),
)
end
# Read and validate initial conditions
initial_power = scalar(dict["Initial power (MW)"], default = nothing)
initial_status = scalar(dict["Initial status (h)"], default = nothing)
if initial_power === nothing
initial_status === nothing || error("unit $unit_name has initial status but no initial power")
initial_status === nothing ||
error("unit $unit_name has initial status but no initial power")
else
initial_status !== nothing || error("unit $unit_name has initial power but no initial status")
initial_status != 0 || error("unit $unit_name has invalid initial status")
initial_status !== nothing ||
error("unit $unit_name has initial power but no initial status")
initial_status != 0 ||
error("unit $unit_name has invalid initial status")
if initial_status < 0 && initial_power > 1e-3
error("unit $unit_name has invalid initial power")
end
initial_status *= time_multiplier
end
unit = Unit(unit_name,
unit = Unit(
unit_name,
bus,
max_power,
min_power,
timeseries(dict["Must run?"], default = [false for t in 1:T]),
min_power_cost,
segments,
scalar(dict["Minimum uptime (h)"], default=1),
scalar(dict["Minimum downtime (h)"], default=1),
scalar(dict["Minimum uptime (h)"], default = 1) * time_multiplier,
scalar(dict["Minimum downtime (h)"], default = 1) * time_multiplier,
scalar(dict["Ramp up limit (MW)"], default = 1e6),
scalar(dict["Ramp down limit (MW)"], default = 1e6),
scalar(dict["Startup limit (MW)"], default = 1e6),
scalar(dict["Shutdown limit (MW)"], default = 1e6),
initial_status,
initial_power,
timeseries(dict["Provides spinning reserves?"],
default=[true for t in 1:T]),
startup_categories)
timeseries(
dict["Provides spinning reserves?"],
default = [true for t in 1:T],
),
startup_categories,
)
push!(bus.units, unit)
name_to_unit[unit_name] = unit
push!(units, unit)
@@ -237,25 +256,33 @@ function from_json(json; fix=true)
# Read reserves
reserves = Reserves(zeros(T))
if "Reserves" in keys(json)
reserves.spinning = timeseries(json["Reserves"]["Spinning (MW)"],
default=zeros(T))
reserves.spinning =
timeseries(json["Reserves"]["Spinning (MW)"], default = zeros(T))
end
# Read transmission lines
if "Transmission lines" in keys(json)
for (line_name, dict) in json["Transmission lines"]
line = TransmissionLine(line_name,
line = TransmissionLine(
line_name,
length(lines) + 1,
name_to_bus[dict["Source bus"]],
name_to_bus[dict["Target bus"]],
scalar(dict["Reactance (ohms)"]),
scalar(dict["Susceptance (S)"]),
timeseries(dict["Normal flow limit (MW)"],
default=[1e8 for t in 1:T]),
timeseries(dict["Emergency flow limit (MW)"],
default=[1e8 for t in 1:T]),
timeseries(dict["Flow limit penalty (\$/MW)"],
default=[5000.0 for t in 1:T]))
timeseries(
dict["Normal flow limit (MW)"],
default = [1e8 for t in 1:T],
),
timeseries(
dict["Emergency flow limit (MW)"],
default = [1e8 for t in 1:T],
),
timeseries(
dict["Flow limit penalty (\$/MW)"],
default = [5000.0 for t in 1:T],
),
)
name_to_line[line_name] = line
push!(lines, line)
end
@@ -267,10 +294,12 @@ function from_json(json; fix=true)
affected_units = Unit[]
affected_lines = TransmissionLine[]
if "Affected lines" in keys(dict)
affected_lines = [name_to_line[l] for l in dict["Affected lines"]]
affected_lines =
[name_to_line[l] for l in dict["Affected lines"]]
end
if "Affected units" in keys(dict)
affected_units = [name_to_unit[u] for u in dict["Affected units"]]
affected_units =
[name_to_unit[u] for u in dict["Affected units"]]
end
cont = Contingency(cont_name, affected_lines, affected_units)
push!(contingencies, cont)
@@ -281,7 +310,8 @@ function from_json(json; fix=true)
if "Price-sensitive loads" in keys(json)
for (load_name, dict) in json["Price-sensitive loads"]
bus = name_to_bus[dict["Bus"]]
load = PriceSensitiveLoad(load_name,
load = PriceSensitiveLoad(
load_name,
bus,
timeseries(dict["Demand (MW)"]),
timeseries(dict["Revenue (\$/MW)"]),
@@ -291,22 +321,22 @@ function from_json(json; fix=true)
end
end
instance = UnitCommitmentInstance(T,
instance = UnitCommitmentInstance(
T,
power_balance_penalty,
shortfall_penalty,
units,
buses,
lines,
reserves,
contingencies,
loads)
if fix
UnitCommitment.fix!(instance)
loads,
)
if repair
UnitCommitment.repair!(instance)
end
return instance
end
"""
slice(instance, range)
@@ -322,7 +352,10 @@ Example
modified = UnitCommitment.slice(instance, 1:2)
"""
function slice(instance::UnitCommitmentInstance, range::UnitRange{Int})::UnitCommitmentInstance
function slice(
instance::UnitCommitmentInstance,
range::UnitRange{Int},
)::UnitCommitmentInstance
modified = deepcopy(instance)
modified.time = length(range)
modified.power_balance_penalty = modified.power_balance_penalty[range]
@@ -353,5 +386,4 @@ function slice(instance::UnitCommitmentInstance, range::UnitRange{Int})::UnitCom
return modified
end
export UnitCommitmentInstance

View File

@@ -8,8 +8,8 @@ using Base.CoreLogging, Logging, Printf
struct TimeLogger <: AbstractLogger
initial_time::Float64
file::Union{Nothing,IOStream}
screen_log_level
io_log_level
screen_log_level::Any
io_log_level::Any
end
function TimeLogger(;
@@ -24,7 +24,8 @@ end
min_enabled_level(logger::TimeLogger) = logger.io_log_level
shouldlog(logger::TimeLogger, level, _module, group, id) = true
function handle_message(logger::TimeLogger,
function handle_message(
logger::TimeLogger,
level,
message,
_module,
@@ -32,11 +33,21 @@ function handle_message(logger::TimeLogger,
id,
filepath,
line;
kwargs...)
kwargs...,
)
elapsed_time = time() - logger.initial_time
time_string = @sprintf("[%12.3f] ", elapsed_time)
if level >= Logging.Error
color = :light_red
elseif level >= Logging.Warn
color = :light_yellow
else
color = :light_green
end
if level >= logger.screen_log_level
print(time_string)
printstyled(time_string, color = color)
println(message)
end
if logger.file !== nothing && level >= logger.io_log_level
@@ -47,4 +58,7 @@ function handle_message(logger::TimeLogger,
end
end
export TimeLogger
function _setup_logger()
initial_time = time()
return global_logger(TimeLogger(initial_time = initial_time))
end

File diff suppressed because it is too large Load Diff

View File

@@ -1,475 +0,0 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
# Writen by Alinson S. Xavier <axavier@anl.gov>
using JuMP, MathOptInterface, DataStructures
import JuMP: value, fix, set_name
# Extend some JuMP functions so that decision variables can be safely replaced by
# (constant) floating point numbers.
function value(x::Float64)
x
end
function fix(x::Float64, v::Float64; force)
abs(x - v) < 1e-6 || error("Value mismatch: $x != $v")
end
function set_name(x::Float64, n::String)
# nop
end
"""
Create a JuMP model using the variables and constraints defined by
the collection of `UCComponent`s in `formulation`.
Parameters
===
* `isf`: injection shift factors
* `lodf`: line outage distribution factors
"""
function build_model(;
filename::Union{String, Nothing}=nothing,
instance::Union{UnitCommitmentInstance, Nothing}=nothing,
isf::Union{Array{Float64,2}, Nothing}=nothing,
lodf::Union{Array{Float64,2}, Nothing}=nothing,
isf_cutoff::Float64=0.005,
lodf_cutoff::Float64=0.001,
optimizer=nothing,
model=nothing,
variable_names::Bool=false,
formulation::Vector{UCComponent} = UnitCommitment.DefaultFormulation,
) :: UnitCommitmentModel2
if (filename == nothing) && (instance == nothing)
error("Either filename or instance must be specified")
end
if filename != nothing
@info "Reading: $(filename)"
time_read = @elapsed begin
instance = UnitCommitment.read(filename)
end
@info @sprintf("Read problem in %.2f seconds", time_read)
end
if length(instance.buses) == 1
isf = zeros(0, 0)
lodf = zeros(0, 0)
else
if isf == nothing
@info "Computing injection shift factors..."
time_isf = @elapsed begin
isf = UnitCommitment.injection_shift_factors(lines=instance.lines,
buses=instance.buses)
end
@info @sprintf("Computed ISF in %.2f seconds", time_isf)
@info "Computing line outage factors..."
time_lodf = @elapsed begin
lodf = UnitCommitment.line_outage_factors(lines=instance.lines,
buses=instance.buses,
isf=isf)
end
@info @sprintf("Computed LODF in %.2f seconds", time_lodf)
@info @sprintf("Applying PTDF and LODF cutoffs (%.5f, %.5f)", isf_cutoff, lodf_cutoff)
isf[abs.(isf) .< isf_cutoff] .= 0
lodf[abs.(lodf) .< lodf_cutoff] .= 0
end
end
@info "Building model..."
time_model = @elapsed begin
if model == nothing
if optimizer == nothing
mip = Model()
else
mip = Model(optimizer)
end
else
mip = model
end
@info "About to build model"
model = UnitCommitmentModel2(mip, # JuMP.Model
DotDict(), # vars
DotDict(), # eqs
DotDict(), # exprs
instance, # UnitCommitmentInstance
isf, # injection shift factors
lodf, # line outage distribution factors
AffExpr(), # obj
formulation, # formulation
)
# Prepare variables
for var in get_required_variables(formulation)
add_variable(mip, model, instance, UnitCommitment.var_list[var])
end # prepare variables
# Prepare constraints
for constr in get_required_constraints(formulation)
add_constraint(mip, model, instance, constr)
end # prepare constraints
# Prepare expressions (in this case, affine expressions that are later used as part of constraints or objective)
# * :startup_cost => contribution to objective of startup costs
for field in [:startup_cost] #[:net_injection]
setproperty!(model.exprs, field, OrderedDict())
end
# Add components to mip
for c in formulation
c.add_component(c, mip, model)
end
# Add objective function
build_obj_function!(model)
end # end timing of building model
@info @sprintf("Built model in %.2f seconds", time_model)
if variable_names
set_variable_names!(model)
end
return model
end # build_model
"""
Add a particular variable to `model.vars`.
"""
function add_variable(mip::JuMP.Model,
model::UnitCommitmentModel2,
instance::UnitCommitmentInstance,
var::UCVariable)
setproperty!(model.vars, var.name, OrderedDict())
x = getproperty(model.vars, var.name)
if !isnothing(var.add_variable)
var.add_variable(var, x, mip, instance)
return
end
# The following is a bit complex-looking, but the idea is ultimately straightforward
# We want to loop over the possible index values for var,
# for every dimension of var (e.g., looping over units and time)
# The OrderedDict `ind_to_field` maps a UCElement to the corresponding field name within a UnitCommitmentInstance
# NB: this can be an array of field names, such as [:x, :y], which means we want to access instance.x.y
# Furthermore, `var` has an array `indices` of UCElement values, describing which index loops over
# So all we want is to extract the _length_ of the corresponding field of `instance`
# We create a Tuple so we can feed it to CartesianIndices
fields = UnitCommitment.ind_to_field(var.indices)
num_indices = UnitCommitment.num_indices(fields)
# There is some really complicated logic below that one day needs to be improved
# (we need to handle nested indices, and this is one way that hopefully works, but it is definitely not intuitive)
loop_primitive = UnitCommitment.loop_over_indices(UnitCommitment.get_indices_tuple(instance, fields))
indices = UnitCommitment.get_indices(loop_primitive) # returns an array of tuples? or a unit range maybe.
for ind in indices
# For each of the indices, check if the field corresponding to that index has a name
# Then we will index the variable by that name instead of the integer
curr_tuple = Tuple(ind)
new_tuple = ()
for i in 1:num_indices
curr_field = UnitCommitment.get_nested_field(instance, fields, i, curr_tuple)
if :name in propertynames(curr_field)
new_tuple = (new_tuple..., curr_field.name)
else
new_tuple = (new_tuple..., curr_tuple[i])
end
end
name = string(var.name, "[")
for (i,val) in enumerate(new_tuple)
name = string(name, val, i < num_indices ? "," : "")
end
name = string(name, "]")
if num_indices == 1
new_tuple = new_tuple[1]
end
x[new_tuple] = @variable(mip,
lower_bound=var.lb,
upper_bound=var.ub,
integer=var.integer,
base_name=name)
end
### DEBUG
#if var.name == :reserve_shortfall
# @show var.name, num_indices, loop_primitive, indices, x
# #@show JuMP.all_variables(mip)
#end
### DEBUG
end # add_variable
"""
Add constraint to `model.eqs` (set of affine expressions represent left-hand side of constraints).
"""
function add_constraint(mip::JuMP.Model,
model::UnitCommitmentModel2,
instance::UnitCommitmentInstance,
constr::Symbol)
setproperty!(model.eqs, constr, OrderedDict())
end # add_constraint
"""
Components of the objective include, summed over time:
* production cost above minimum
* minimum production cost if generator is on
* startup cost
* shutdown cost
* cost of not meeting shortfall
* penalty for not meeting or exceeding load (using curtai variable)
* shutdown cost
"""
function build_obj_function!(model::UnitCommitmentModel2)
@objective(model.mip, Min, model.obj)
end # build_obj_function
function enforce_transmission(;
model::UnitCommitmentModel2,
violation::Violation,
isf::Array{Float64,2},
lodf::Array{Float64,2})::Nothing
instance, mip, vars = model.instance, model.mip, model.vars
limit::Float64 = 0.0
if violation.outage_line == nothing
limit = violation.monitored_line.normal_flow_limit[violation.time]
@info @sprintf(" %8.3f MW overflow in %-5s time %3d (pre-contingency)",
violation.amount,
violation.monitored_line.name,
violation.time)
else
limit = violation.monitored_line.emergency_flow_limit[violation.time]
@info @sprintf(" %8.3f MW overflow in %-5s time %3d (outage: line %s)",
violation.amount,
violation.monitored_line.name,
violation.time,
violation.outage_line.name)
end
fm = violation.monitored_line.name
t = violation.time
flow = @variable(mip, base_name="flow[$fm,$t]")
# |flow| <= limit + overflow
overflow = vars.overflow[violation.monitored_line.name, violation.time]
@constraint(mip, flow <= limit + overflow)
@constraint(mip, -flow <= limit + overflow)
if violation.outage_line == nothing
@constraint(mip, flow == sum(vars.net_injection[b.name, violation.time] *
isf[violation.monitored_line.offset, b.offset]
for b in instance.buses
if b.offset > 0))
else
@constraint(mip, flow == sum(vars.net_injection[b.name, violation.time] * (
isf[violation.monitored_line.offset, b.offset] + (
lodf[violation.monitored_line.offset, violation.outage_line.offset] *
isf[violation.outage_line.offset, b.offset]
)
)
for b in instance.buses
if b.offset > 0))
end
nothing
end # enforce_transmission
function set_variable_names!(model::UnitCommitmentModel2)
@info "Setting variable and constraint names..."
time_varnames = @elapsed begin
#set_jump_names!(model.vars) # amk: already set
set_jump_names!(model.eqs)
end
@info @sprintf("Set names in %.2f seconds", time_varnames)
end # set_variable_names
function set_jump_names!(dict)
for name in keys(dict)
for idx in keys(dict[name])
idx_str = isa(idx, Tuple) ? join(map(string, idx), ",") : idx
set_name(dict[name][idx], "$name[$idx_str]")
end
end
end # set_jump_names
function get_solution(model::UnitCommitmentModel2)
instance, T = model.instance, model.instance.time
function timeseries(vars, collection)
return OrderedDict(b.name => [round(value(vars[b.name, t]), digits=5) for t in 1:T]
for b in collection)
end
function production_cost(g)
return [value(model.vars.is_on[g.name, t]) * g.min_power_cost[t] +
sum(Float64[value(model.vars.segprod[g.name, k, t]) * g.cost_segments[k].cost[t]
for k in 1:length(g.cost_segments)])
for t in 1:T]
end
function production(g)
return [value(model.vars.is_on[g.name, t]) * g.min_power[t] +
sum(Float64[value(model.vars.segprod[g.name, k, t])
for k in 1:length(g.cost_segments)])
for t in 1:T]
end
function startup_cost(g)
#S = length(g.startup_categories)
#return [sum(g.startup_categories[s].cost * value(model.vars.startup[g.name, s, t])
# for s in 1:S)
# for t in 1:T]
return [ value.(model.exprs.startup_cost[g.name, t]) for t in 1:T ]
end
sol = OrderedDict()
sol["Production (MW)"] = OrderedDict(g.name => production(g) for g in instance.units)
sol["Production cost (\$)"] = OrderedDict(g.name => production_cost(g) for g in instance.units)
sol["Startup cost (\$)"] = OrderedDict(g.name => startup_cost(g) for g in instance.units)
sol["Is on"] = timeseries(model.vars.is_on, instance.units)
sol["Switch on"] = timeseries(model.vars.switch_on, instance.units)
sol["Switch off"] = timeseries(model.vars.switch_off, instance.units)
sol["Reserve (MW)"] = timeseries(model.vars.reserve, instance.units)
sol["Net injection (MW)"] = timeseries(model.vars.net_injection, instance.buses)
sol["Load curtail (MW)"] = timeseries(model.vars.curtail, instance.buses)
if !isempty(instance.lines)
sol["Line overflow (MW)"] = timeseries(model.vars.overflow, instance.lines)
end
if !isempty(instance.price_sensitive_loads)
sol["Price-sensitive loads (MW)"] = timeseries(model.vars.loads, instance.price_sensitive_loads)
end
return sol
end # get_solution
function fix!(model::UnitCommitmentModel2, solution)::Nothing
vars, instance, T = model.vars, model.instance, model.instance.time
for g in instance.units
for t in 1:T
is_on = round(solution["Is on"][g.name][t])
production = round(solution["Production (MW)"][g.name][t], digits=5)
reserve = round(solution["Reserve (MW)"][g.name][t], digits=5)
JuMP.fix(vars.is_on[g.name, t], is_on, force=true)
JuMP.fix(vars.prod_above[g.name, t], production - is_on * g.min_power[t], force=true)
JuMP.fix(vars.reserve[g.name, t], reserve, force=true)
end
end
end # fix!
function set_warm_start!(model::UnitCommitmentModel2, solution)::Nothing
vars, instance, T = model.vars, model.instance, model.instance.time
for g in instance.units
for t in 1:T
JuMP.set_start_value(vars.is_on[g.name, t], solution["Is on"][g.name][t])
JuMP.set_start_value(vars.switch_on[g.name, t], solution["Switch on"][g.name][t])
JuMP.set_start_value(vars.switch_off[g.name, t], solution["Switch off"][g.name][t])
end
end
end # set_warm_start
function optimize!(model::UnitCommitmentModel2;
time_limit=3600,
gap_limit=1e-4,
two_phase_gap=true,
)::Nothing
function set_gap(gap)
try
JuMP.set_optimizer_attribute(model.mip, "MIPGap", gap)
@info @sprintf("MIP gap tolerance set to %f", gap)
catch
@warn "Could not change MIP gap tolerance"
end
end
instance = model.instance
initial_time = time()
large_gap = false
has_transmission = (length(model.isf) > 0)
if has_transmission && two_phase_gap
set_gap(1e-2)
large_gap = true
else
set_gap(gap_limit)
end
while true
time_elapsed = time() - initial_time
time_remaining = time_limit - time_elapsed
if time_remaining < 0
@info "Time limit exceeded"
break
end
@info @sprintf("Setting MILP time limit to %.2f seconds", time_remaining)
JuMP.set_time_limit_sec(model.mip, time_remaining)
@info "Solving MILP..."
JuMP.optimize!(model.mip)
has_transmission || break
violations = find_violations(model)
if isempty(violations)
@info "No violations found"
if large_gap
large_gap = false
set_gap(gap_limit)
else
break
end
else
enforce_transmission(model, violations)
end
end
nothing
end # optimize!
"""
Identify which transmission lines are violated.
See find_violations description from screening.jl.
"""
function find_violations(model::UnitCommitmentModel2)
instance, vars = model.instance, model.vars
length(instance.buses) > 1 || return []
violations = []
@info "Verifying transmission limits..."
time_screening = @elapsed begin
non_slack_buses = [b for b in instance.buses if b.offset > 0]
net_injections = [value(vars.net_injection[b.name, t])
for b in non_slack_buses, t in 1:instance.time]
overflow = [value(vars.overflow[lm.name, t])
for lm in instance.lines, t in 1:instance.time]
violations = UnitCommitment.find_violations(instance=instance,
net_injections=net_injections,
overflow=overflow,
isf=model.isf,
lodf=model.lodf)
end
@info @sprintf("Verified transmission limits in %.2f seconds", time_screening)
return violations
end # find_violations
function enforce_transmission(model::UnitCommitmentModel2, violations::Array{Violation, 1})
for v in violations
enforce_transmission(model=model,
violation=v,
isf=model.isf,
lodf=model.lodf)
end
end # enforce_transmission
export UnitCommitmentModel2, build_model, get_solution, optimize!

View File

@@ -4,11 +4,9 @@
# Copyright (C) 2019 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using DataStructures
using Base.Threads
struct Violation
time::Int
monitored_line::TransmissionLine
@@ -16,7 +14,6 @@ struct Violation
amount::Float64 # Violation amount (in MW)
end
function Violation(;
time::Int,
monitored_line::TransmissionLine,
@@ -26,14 +23,12 @@ function Violation(;
return Violation(time, monitored_line, outage_line, amount)
end
mutable struct ViolationFilter
max_per_line::Int
max_total::Int
queues::Dict{Int,PriorityQueue{Violation,Float64}}
end
function ViolationFilter(;
max_per_line::Int = 1,
max_total::Int = 5,
@@ -41,10 +36,10 @@ function ViolationFilter(;
return ViolationFilter(max_per_line, max_total, Dict())
end
function offer(filter::ViolationFilter, v::Violation)::Nothing
function _offer(filter::ViolationFilter, v::Violation)::Nothing
if v.monitored_line.offset keys(filter.queues)
filter.queues[v.monitored_line.offset] = PriorityQueue{Violation, Float64}()
filter.queues[v.monitored_line.offset] =
PriorityQueue{Violation,Float64}()
end
q::PriorityQueue{Violation,Float64} = filter.queues[v.monitored_line.offset]
if length(q) < filter.max_per_line
@@ -55,11 +50,10 @@ function offer(filter::ViolationFilter, v::Violation)::Nothing
enqueue!(q, v => v.amount)
end
end
nothing
return nothing
end
function query(filter::ViolationFilter)::Array{Violation, 1}
function _query(filter::ViolationFilter)::Array{Violation,1}
violations = Array{Violation,1}()
time_queue = PriorityQueue{Violation,Float64}()
for l in keys(filter.queues)
@@ -82,10 +76,10 @@ function query(filter::ViolationFilter)::Array{Violation, 1}
return violations
end
"""
function find_violations(instance::UnitCommitmentInstance,
function _find_violations(
instance::UnitCommitmentInstance,
net_injections::Array{Float64, 2};
isf::Array{Float64,2},
lodf::Array{Float64,2},
@@ -93,15 +87,17 @@ end
max_per_period::Int = 5,
)::Array{Violation, 1}
Find transmission constraint violations (both pre-contingency, as well as post-contingency).
Find transmission constraint violations (both pre-contingency, as well as
post-contingency).
The argument `net_injection` should be a (B-1) x T matrix, where B is the number of buses
and T is the number of time periods. The arguments `isf` and `lodf` can be computed using
UnitCommitment.injection_shift_factors and UnitCommitment.line_outage_factors.
The argument `overflow` specifies how much flow above the transmission limits (in MW) is allowed.
It should be an L x T matrix, where L is the number of transmission lines.
The argument `net_injection` should be a (B-1) x T matrix, where B is the
number of buses and T is the number of time periods. The arguments `isf` and
`lodf` can be computed using UnitCommitment.injection_shift_factors and
UnitCommitment.line_outage_factors. The argument `overflow` specifies how much
flow above the transmission limits (in MW) is allowed. It should be an L x T
matrix, where L is the number of transmission lines.
"""
function find_violations(;
function _find_violations(;
instance::UnitCommitmentInstance,
net_injections::Array{Float64,2},
overflow::Array{Float64,2},
@@ -110,7 +106,6 @@ function find_violations(;
max_per_line::Int = 1,
max_per_period::Int = 5,
)::Array{Violation,1}
B = length(instance.buses) - 1
L = length(instance.lines)
T = instance.time
@@ -120,20 +115,27 @@ function find_violations(;
size(isf) == (L, B) || error("isf has incorrect size")
size(lodf) == (L, L) || error("lodf has incorrect size")
filters = Dict(t => ViolationFilter(max_total=max_per_period,
max_per_line=max_per_line)
for t in 1:T)
filters = Dict(
t => ViolationFilter(
max_total = max_per_period,
max_per_line = max_per_line,
) for t in 1:T
)
pre_flow::Array{Float64} = zeros(L, K) # pre_flow[lm, thread]
post_flow::Array{Float64} = zeros(L, L, K) # post_flow[lm, lc, thread]
pre_v::Array{Float64} = zeros(L, K) # pre_v[lm, thread]
post_v::Array{Float64} = zeros(L, L, K) # post_v[lm, lc, thread]
normal_limits::Array{Float64,2} = [l.normal_flow_limit[t] + overflow[l.offset, t]
for l in instance.lines, t in 1:T]
normal_limits::Array{Float64,2} = [
l.normal_flow_limit[t] + overflow[l.offset, t] for
l in instance.lines, t in 1:T
]
emergency_limits::Array{Float64,2} = [l.emergency_flow_limit[t] + overflow[l.offset, t]
for l in instance.lines, t in 1:T]
emergency_limits::Array{Float64,2} = [
l.emergency_flow_limit[t] + overflow[l.offset, t] for
l in instance.lines, t in 1:T
]
is_vulnerable::Array{Bool} = zeros(Bool, L)
for c in instance.contingencies
@@ -148,51 +150,63 @@ function find_violations(;
# Post-contingency flows
for lc in 1:L, lm in 1:L
post_flow[lm, lc, k] = pre_flow[lm, k] + pre_flow[lc, k] * lodf[lm, lc]
post_flow[lm, lc, k] =
pre_flow[lm, k] + pre_flow[lc, k] * lodf[lm, lc]
end
# Pre-contingency violations
for lm in 1:L
pre_v[lm, k] = max(0.0,
pre_v[lm, k] = max(
0.0,
pre_flow[lm, k] - normal_limits[lm, t],
- pre_flow[lm, k] - normal_limits[lm, t])
-pre_flow[lm, k] - normal_limits[lm, t],
)
end
# Post-contingency violations
for lc in 1:L, lm in 1:L
post_v[lm, lc, k] = max(0.0,
post_v[lm, lc, k] = max(
0.0,
post_flow[lm, lc, k] - emergency_limits[lm, t],
- post_flow[lm, lc, k] - emergency_limits[lm, t])
-post_flow[lm, lc, k] - emergency_limits[lm, t],
)
end
# Offer pre-contingency violations
for lm in 1:L
if pre_v[lm, k] > 1e-5
offer(filters[t], Violation(time=t,
_offer(
filters[t],
Violation(
time = t,
monitored_line = instance.lines[lm],
outage_line = nothing,
amount=pre_v[lm, k]))
amount = pre_v[lm, k],
),
)
end
end
# Offer post-contingency violations
for lm in 1:L, lc in 1:L
if post_v[lm, lc, k] > 1e-5 && is_vulnerable[lc]
offer(filters[t], Violation(time=t,
_offer(
filters[t],
Violation(
time = t,
monitored_line = instance.lines[lm],
outage_line = instance.lines[lc],
amount=post_v[lm, lc, k]))
amount = post_v[lm, lc, k],
),
)
end
end
end
violations = Violation[]
for t in 1:instance.time
append!(violations, query(filters[t]))
append!(violations, _query(filters[t]))
end
return violations
end
export Violation, ViolationFilter, offer, query, find_violations

View File

@@ -5,31 +5,38 @@
using SparseArrays, Base.Threads, LinearAlgebra, JuMP
"""
injection_shift_factors(; buses, lines)
_injection_shift_factors(; buses, lines)
Returns a (B-1)xL matrix M, where B is the number of buses and L is the number of transmission
lines. For a given bus b and transmission line l, the entry M[l.offset, b.offset] indicates
the amount of power (in MW) that flows through transmission line l when 1 MW of power is
injected at the slack bus (the bus that has offset zero) and withdrawn from b.
Returns a (B-1)xL matrix M, where B is the number of buses and L is the number
of transmission lines. For a given bus b and transmission line l, the entry
M[l.offset, b.offset] indicates the amount of power (in MW) that flows through
transmission line l when 1 MW of power is injected at the slack bus (the bus
that has offset zero) and withdrawn from b.
"""
function injection_shift_factors(; buses, lines)
susceptance = susceptance_matrix(lines)
incidence = reduced_incidence_matrix(lines = lines, buses = buses)
function _injection_shift_factors(;
buses::Array{Bus},
lines::Array{TransmissionLine},
)
susceptance = _susceptance_matrix(lines)
incidence = _reduced_incidence_matrix(lines = lines, buses = buses)
laplacian = transpose(incidence) * susceptance * incidence
isf = susceptance * incidence * inv(Array(laplacian))
return isf
end
"""
reduced_incidence_matrix(; buses::Array{Bus}, lines::Array{TransmissionLine})
_reduced_incidence_matrix(; buses::Array{Bus}, lines::Array{TransmissionLine})
Returns the incidence matrix for the network, with the column corresponding to the slack
bus is removed. More precisely, returns a (B-1) x L matrix, where B is the number of buses
and L is the number of lines. For each row, there is a 1 element and a -1 element, indicating
the source and target buses, respectively, for that line.
Returns the incidence matrix for the network, with the column corresponding to
the slack bus is removed. More precisely, returns a (B-1) x L matrix, where B
is the number of buses and L is the number of lines. For each row, there is a 1
element and a -1 element, indicating the source and target buses, respectively,
for that line.
"""
function reduced_incidence_matrix(; buses::Array{Bus}, lines::Array{TransmissionLine})
function _reduced_incidence_matrix(;
buses::Array{Bus},
lines::Array{TransmissionLine},
)
matrix = spzeros(Float64, length(lines), length(buses) - 1)
for line in lines
if line.source.offset > 0
@@ -39,37 +46,34 @@ function reduced_incidence_matrix(; buses::Array{Bus}, lines::Array{Transmission
matrix[line.offset, line.target.offset] = -1
end
end
matrix
return matrix
end
"""
susceptance_matrix(lines::Array{TransmissionLine})
_susceptance_matrix(lines::Array{TransmissionLine})
Returns a LxL diagonal matrix, where each diagonal entry is the susceptance of the
corresponding transmission line.
Returns a LxL diagonal matrix, where each diagonal entry is the susceptance of
the corresponding transmission line.
"""
function susceptance_matrix(lines::Array{TransmissionLine})
function _susceptance_matrix(lines::Array{TransmissionLine})
return Diagonal([l.susceptance for l in lines])
end
"""
line_outage_factors(; buses, lines, isf)
_line_outage_factors(; buses, lines, isf)
Returns a LxL matrix containing the Line Outage Distribution Factors (LODFs) for the
given network. This matrix how does the pre-contingency flow change when each individual
transmission line is removed.
Returns a LxL matrix containing the Line Outage Distribution Factors (LODFs)
for the given network. This matrix how does the pre-contingency flow change
when each individual transmission line is removed.
"""
function line_outage_factors(;
function _line_outage_factors(;
buses::Array{Bus,1},
lines::Array{TransmissionLine,1},
isf::Array{Float64,2},
)::Array{Float64,2}
n_lines, n_buses = size(isf)
incidence = Array(reduced_incidence_matrix(lines=lines,
buses=buses))
incidence = Array(_reduced_incidence_matrix(lines = lines, buses = buses))
lodf::Array{Float64,2} = isf * transpose(incidence)
m, n = size(lodf)
for i in 1:n

View File

@@ -10,14 +10,11 @@ using JuMP
using MathOptInterface
using SparseArrays
pkg = [:DataStructures,
:JSON,
:JuMP,
:MathOptInterface,
:SparseArrays,
]
pkg = [:DataStructures, :JSON, :JuMP, :MathOptInterface, :SparseArrays]
@info "Building system image..."
create_sysimage(pkg,
create_sysimage(
pkg,
precompile_statements_file = "build/precompile.jl",
sysimage_path="build/sysimage.so")
sysimage_path = "build/sysimage.so",
)

View File

@@ -7,15 +7,16 @@ using Printf
bin(x) = [xi > 0.5 for xi in x]
"""
fix!(instance)
repair!(instance)
Verifies that the given unit commitment instance is valid and automatically fixes
some validation errors if possible, issuing a warning for each error found.
If a validation error cannot be automatically fixed, issues an exception.
Verifies that the given unit commitment instance is valid and automatically
fixes some validation errors if possible, issuing a warning for each error
found. If a validation error cannot be automatically fixed, issues an
exception.
Returns the number of validation errors found.
"""
function fix!(instance::UnitCommitmentInstance)::Int
function repair!(instance::UnitCommitmentInstance)::Int
n_errors = 0
for g in instance.units
@@ -39,7 +40,6 @@ function fix!(instance::UnitCommitmentInstance)::Int
g.startup_categories[s].cost = new_value
n_errors += 1
end
end
for t in 1:instance.time
@@ -67,18 +67,15 @@ function fix!(instance::UnitCommitmentInstance)::Int
end
end
return n_errors
end
function validate(instance_filename::String, solution_filename::String)
instance = UnitCommitment.read(instance_filename)
solution = JSON.parse(open(solution_filename))
return validate(instance, solution)
end
"""
validate(instance, solution)::Bool
@@ -86,16 +83,18 @@ Verifies that the given solution is feasible for the problem. If feasible,
silently returns true. In infeasible, returns false and prints the validation
errors to the screen.
This function is implemented independently from the optimization model in `model.jl`, and
therefore can be used to verify that the model is indeed producing valid solutions. It
can also be used to verify the solutions produced by other optimization packages.
This function is implemented independently from the optimization model in
`model.jl`, and therefore can be used to verify that the model is indeed
producing valid solutions. It can also be used to verify the solutions produced
by other optimization packages.
"""
function validate(instance::UnitCommitmentInstance,
solution::Union{Dict,OrderedDict};
function validate(
instance::UnitCommitmentInstance,
solution::Union{Dict,OrderedDict},
)::Bool
err_count = 0
err_count += validate_units(instance, solution)
err_count += validate_reserve_and_demand(instance, solution)
err_count += _validate_units(instance, solution)
err_count += _validate_reserve_and_demand(instance, solution)
if err_count > 0
@error "Found $err_count validation errors"
@@ -105,8 +104,7 @@ function validate(instance::UnitCommitmentInstance,
return true
end
function validate_units(instance, solution; tol=0.01)
function _validate_units(instance, solution; tol = 0.01)
err_count = 0
for unit in instance.units
@@ -115,21 +113,20 @@ function validate_units(instance, solution; tol=0.01)
actual_production_cost = solution["Production cost (\$)"][unit.name]
actual_startup_cost = solution["Startup cost (\$)"][unit.name]
is_on = bin(solution["Is on"][unit.name])
switch_off = bin(solution["Switch off"][unit.name]) # some formulations may not use this
for t in 1:instance.time
# Auxiliary variables
if t == 1
is_starting_up = (unit.initial_status < 0) && is_on[t]
is_shutting_down = (unit.initial_status > 0) && !is_on[t]
ramp_up = max(0, production[t] + reserve[t] - unit.initial_power)
ramp_up =
max(0, production[t] + reserve[t] - unit.initial_power)
ramp_down = max(0, unit.initial_power - production[t])
else
is_starting_up = !is_on[t-1] && is_on[t]
is_shutting_down = is_on[t-1] && !is_on[t]
ramp_up = max(0, production[t] + reserve[t] - production[t-1])
#ramp_down = max(0, production[t-1] - production[t])
ramp_down = max(0, production[t-1] + reserve[t-1] - production[t])
ramp_down = max(0, production[t-1] - production[t])
end
# Compute production costs
@@ -146,78 +143,119 @@ function validate_units(instance, solution; tol=0.01)
# Production should be non-negative
if production[t] < -tol
@error @sprintf("Unit %s produces negative amount of power at time %d (%.2f)",
unit.name, t, production[t])
@error @sprintf(
"Unit %s produces negative amount of power at time %d (%.2f)",
unit.name,
t,
production[t]
)
err_count += 1
end
# Verify must-run
if !is_on[t] && unit.must_run[t]
@error @sprintf("Must-run unit %s is offline at time %d",
unit.name, t)
@error @sprintf(
"Must-run unit %s is offline at time %d",
unit.name,
t
)
err_count += 1
end
# Verify reserve eligibility
if !unit.provides_spinning_reserves[t] && reserve[t] > tol
@error @sprintf("Unit %s is not eligible to provide spinning reserves at time %d",
unit.name, t)
@error @sprintf(
"Unit %s is not eligible to provide spinning reserves at time %d",
unit.name,
t
)
err_count += 1
end
# If unit is on, must produce at least its minimum power
if is_on[t] && (production[t] < unit.min_power[t] - tol)
@error @sprintf("Unit %s produces below its minimum limit at time %d (%.2f < %.2f)",
unit.name, t, production[t], unit.min_power[t])
@error @sprintf(
"Unit %s produces below its minimum limit at time %d (%.2f < %.2f)",
unit.name,
t,
production[t],
unit.min_power[t]
)
err_count += 1
end
# If unit is on, must produce at most its maximum power
if is_on[t] && (production[t] + reserve[t] > unit.max_power[t] + tol)
@error @sprintf("Unit %s produces above its maximum limit at time %d (%.2f + %.2f> %.2f)",
unit.name, t, production[t], reserve[t], unit.max_power[t])
if is_on[t] &&
(production[t] + reserve[t] > unit.max_power[t] + tol)
@error @sprintf(
"Unit %s produces above its maximum limit at time %d (%.2f + %.2f> %.2f)",
unit.name,
t,
production[t],
reserve[t],
unit.max_power[t]
)
err_count += 1
end
# If unit is off, must produce zero
if !is_on[t] && production[t] + reserve[t] > tol
@error @sprintf("Unit %s produces power at time %d while off",
unit.name, t)
@error @sprintf(
"Unit %s produces power at time %d while off",
unit.name,
t
)
err_count += 1
end
# Startup limit
if is_starting_up && (ramp_up > unit.startup_limit + tol)
@error @sprintf("Unit %s exceeds startup limit at time %d (%.2f > %.2f)",
unit.name, t, ramp_up, unit.startup_limit)
@error @sprintf(
"Unit %s exceeds startup limit at time %d (%.2f > %.2f)",
unit.name,
t,
ramp_up,
unit.startup_limit
)
err_count += 1
end
# Shutdown limit
if is_shutting_down && (ramp_down > unit.shutdown_limit + tol)
@error @sprintf("Unit %s exceeds shutdown limit at time %d (%.2f > %.2f)\n\tproduction[t-1] = %.2f\n\treserve[t-1] = %.2f\n\tproduction[t] = %.2f\n\treserve[t] = %.2f\n\tis_on[t-1] = %d\n\tis_on[t] = %d",
unit.name, t, ramp_down, unit.shutdown_limit,
(t == 1 ? unit.initial_power : production[t-1]), production[t],
(t == 1 ? 0. : reserve[t-1]), reserve[t],
(t == 1 ? unit.initial_status != nothing && unit.initial_status > 0 : is_on[t-1]), is_on[t]
@error @sprintf(
"Unit %s exceeds shutdown limit at time %d (%.2f > %.2f)",
unit.name,
t,
ramp_down,
unit.shutdown_limit
)
err_count += 1
end
# Ramp-up limit
if !is_starting_up && !is_shutting_down && (ramp_up > unit.ramp_up_limit + tol)
@error @sprintf("Unit %s exceeds ramp up limit at time %d (%.2f > %.2f)",
unit.name, t, ramp_up, unit.ramp_up_limit)
if !is_starting_up &&
!is_shutting_down &&
(ramp_up > unit.ramp_up_limit + tol)
@error @sprintf(
"Unit %s exceeds ramp up limit at time %d (%.2f > %.2f)",
unit.name,
t,
ramp_up,
unit.ramp_up_limit
)
err_count += 1
end
# Ramp-down limit
if !is_starting_up && !is_shutting_down && (ramp_down > unit.ramp_down_limit + tol)
@error @sprintf("Unit %s exceeds ramp down limit at time %d (%.2f > %.2f)\n\tproduction[t-1] = %.2f\n\treserve[t-1] = %.2f\n\tproduction[t] = %.2f\n\treserve[t] = %.2f\n\tis_on[t-1] = %d\n\tis_on[t] = %d",
unit.name, t, ramp_down, unit.ramp_down_limit,
(t == 1 ? unit.initial_power : production[t-1]), production[t],
(t == 1 ? 0. : reserve[t-1]), reserve[t],
(t == 1 ? unit.initial_status != nothing && unit.initial_status > 0 : is_on[t-1]), is_on[t]
if !is_starting_up &&
!is_shutting_down &&
(ramp_down > unit.ramp_down_limit + tol)
@error @sprintf(
"Unit %s exceeds ramp down limit at time %d (%.2f > %.2f)",
unit.name,
t,
ramp_down,
unit.ramp_down_limit
)
err_count += 1
end
@@ -234,10 +272,7 @@ function validate_units(instance, solution; tol=0.01)
break
end
end
if t == time_down + 1 && !switch_off[1]
# If unit has always been off, then the correct startup cost depends on how long was it off before t = 1
# Absent known initial conditions, we assume it was off for the minimum downtime
# TODO: verify the formulations are making the same assumption...
if t == time_down + 1
initial_down = unit.min_downtime
if unit.initial_status < 0
initial_down = -unit.initial_status
@@ -254,8 +289,11 @@ function validate_units(instance, solution; tol=0.01)
# Check minimum downtime
if time_down < unit.min_downtime
@error @sprintf("Unit %s violates minimum downtime at time %d",
unit.name, t)
@error @sprintf(
"Unit %s violates minimum downtime at time %d",
unit.name,
t
)
err_count += 1
end
end
@@ -286,62 +324,92 @@ function validate_units(instance, solution; tol=0.01)
# Check minimum uptime
if time_up < unit.min_uptime
@error @sprintf("Unit %s violates minimum uptime at time %d",
unit.name, t)
@error @sprintf(
"Unit %s violates minimum uptime at time %d",
unit.name,
t
)
err_count += 1
end
end
# Verify production costs
if abs(actual_production_cost[t] - production_cost) > 1.00
@error @sprintf("Unit %s has unexpected production cost at time %d (%.2f should be %.2f)",
unit.name, t, actual_production_cost[t], production_cost)
@error @sprintf(
"Unit %s has unexpected production cost at time %d (%.2f should be %.2f)",
unit.name,
t,
actual_production_cost[t],
production_cost
)
err_count += 1
end
# Verify startup costs
if abs(actual_startup_cost[t] - startup_cost) > 1.00
@error @sprintf("Unit %s has unexpected startup cost at time %d (%.2f should be %.2f)",
unit.name, t, actual_startup_cost[t], startup_cost)
@error @sprintf(
"Unit %s has unexpected startup cost at time %d (%.2f should be %.2f)",
unit.name,
t,
actual_startup_cost[t],
startup_cost
)
err_count += 1
end
end
end
return err_count
end
function validate_reserve_and_demand(instance, solution, tol=0.01)
function _validate_reserve_and_demand(instance, solution, tol = 0.01)
err_count = 0
for t in 1:instance.time
load_curtail = 0
fixed_load = sum(b.load[t] for b in instance.buses)
production = sum(solution["Production (MW)"][g.name][t]
for g in instance.units)
if "Load curtail (MW)" in keys(solution)
load_curtail = sum(solution["Load curtail (MW)"][b.name][t]
for b in instance.buses)
ps_load = 0
if length(instance.price_sensitive_loads) > 0
ps_load = sum(
solution["Price-sensitive loads (MW)"][ps.name][t] for
ps in instance.price_sensitive_loads
)
end
balance = fixed_load - load_curtail - production
production =
sum(solution["Production (MW)"][g.name][t] for g in instance.units)
if "Load curtail (MW)" in keys(solution)
load_curtail = sum(
solution["Load curtail (MW)"][b.name][t] for
b in instance.buses
)
end
balance = fixed_load - load_curtail - production + ps_load
# Verify that production equals demand
if abs(balance) > tol
@error @sprintf("Non-zero power balance at time %d (%.2f - %.2f - %.2f != 0)",
t, fixed_load, load_curtail, production)
@error @sprintf(
"Non-zero power balance at time %d (%.2f + %.2f - %.2f - %.2f != 0)",
t,
fixed_load,
ps_load,
load_curtail,
production,
)
err_count += 1
end
# Verify spinning reserves
reserve = sum(solution["Reserve (MW)"][g.name][t] for g in instance.units)
reserve =
sum(solution["Reserve (MW)"][g.name][t] for g in instance.units)
if reserve < instance.reserves.spinning[t] - tol
@error @sprintf("Insufficient spinning reserves at time %d (%.2f should be %.2f)",
t, reserve, instance.reserves.spinning[t])
@error @sprintf(
"Insufficient spinning reserves at time %d (%.2f should be %.2f)",
t,
reserve,
instance.reserves.spinning[t],
)
err_count += 1
end
end
return err_count
end

View File

@@ -1,471 +0,0 @@
using DataStructures # for OrderedDict
using JuMP
##################################################
# Variables
#mutable struct UCVariable
# "Name of the variable."
# name::Symbol
# "What does the variable represent?"
# description::String
# "Global lower bound for the variable (may be adjusted later)."
# lb::Float64
# "Global upper bound for the variable (may be adjusted later)."
# ub::Float64
# "Is the variable integer-restricted?"
# integer::Bool
# "What are we indexing over?"*
# " Recursive structure, e.g., [X,Y] means Y is a field in X,"*
# " and [X,[Y1,Z],Y2] means Y1 and Y2 are fields in X and Z is a field in Y1.\n"*
# " [ X, [Y,A,B], [Y,A,A], [Z,[D,E],F], T ]\n"*
# " => [x, y1, y1.a, y1.b, y2, y2.a1, y2.a2, z, z.d, z.d.e, z.f, t]."
# indices::Vector
# "Function to add the variable; if this is missing, we will attempt to add the variable automatically using the `indices`. Signature should be (variable, model.vars.familyname, mip, instance)."
# add_variable::Union{Function,Nothing}
#end # UCVariable
# TODO Above did not work for some reason
mutable struct UCVariable
name::Symbol
description::String
lb::Float64
ub::Float64
integer::Bool
indices::Vector
add_variable::Union{Function,Nothing}
end
"""
It holds that x(t,t') = 0 if t' does not belong to 𝒢 = [t+DT, t+TC-1].
This is because DT is the minimum downtime, so there is no way x(t,t')=1 for t'<t+DT
and TC is the "time until cold" => if the generator starts afterwards, always has max cost.
"""
function add_downtime_arcs(var::UCVariable,
x::OrderedDict,
mip::JuMP.Model,
instance::UnitCommitmentInstance)
T = instance.time
for g in instance.units
S = length(g.startup_categories)
if S == 0
continue
end
DT = g.min_downtime # minimum time offline
TC = g.startup_categories[S].delay # time offline until totally cold
for t1 = 1:T-1
for t2 = t1+1:T
# It holds that x(t,t') = 0 if t' does not belong to 𝒢 = [t+DT, t+TC-1]
# This is because DT is the minimum downtime, so there is no way x(t,t')=1 for t'<t+DT
# and TC is the "time until cold" => if the generator starts afterwards, always has max cost
if (t2 < t1 + DT) || (t2 >= t1 + TC)
continue
end
name = string(var.name, "[", g.name, ",", t1, ",", t2, "]")
x[g.name, t1, t2] = @variable(mip,
lower_bound=var.lb,
upper_bound=var.ub,
integer=var.integer,
base_name=name)
end # loop over time 2
end # loop over time 1
end # loop over units
end # add_downtime_arcs
"""
If there is a penalty specified for not meeting the reserve, then we add a reserve shortfall variable.
"""
function add_reserve_shortfall(var::UCVariable,
x::OrderedDict,
mip::JuMP.Model,
instance::UnitCommitmentInstance)
T = instance.time
for t = 1:T
if instance.shortfall_penalty[t] > 1e-7
name = string(var.name, "[", t, "]")
x[t] = @variable(mip,
lower_bound=var.lb,
upper_bound=var.ub,
integer=var.integer,
base_name=name)
end
end # loop over time
end # add_reserve_shortfall
"""
Variables that the model may (or may not) use.
Note the relationship
r_g(t) = bar{p}_g(t) - p_g(t)
= bar{p}'_g(t) - p'_g(t)
"""
var_list = OrderedDict{Symbol,UCVariable}(
:prod
=> UCVariable(:prod,
"[gen, t]; power from generator gen at time t; p_g(t) = p'_g(t) + g.min_power[t] * u_g(t)",
0., Inf, false,
[Unit, Time], nothing),
:prod_above
=> UCVariable(:prod_above,
"[gen, t]; production above minimum required level; p'_g(t)",
0., Inf, false,
[Unit, Time], nothing ),
:max_power_avail
=> UCVariable(:max_power_avail,
"[gen, t]; maximum power available from generator gen at time t; bar{p}_g(t) = p_g(t) + r_g(t)",
0., Inf, false,
[Unit, Time], nothing),
:max_power_avail_above
=> UCVariable(:max_power_avail_above,
"[gen, t]; maximum power available above minimum from generator gen at time t; bar{p}'_g(t)",
0., Inf, false,
[Unit, Time], nothing),
:segprod
=> UCVariable(:segprod,
"[gen, seg, t]; how much generator gen produces on segment seg in time t; p_g^l(t)",
0., Inf, false,
[ [Unit, CostSegment], Time], nothing),
:reserve
=> UCVariable(:reserve,
"[gen, t]; reserves provided by gen at t; r_g(t)",
0., Inf, false,
[Unit, Time], nothing),
:reserve_shortfall
=> UCVariable(:reserve_shortfall,
"[t]; reserve shortfall at gen at t; s_R(t)",
0., Inf, false,
[Time], add_reserve_shortfall),
:is_on
=> UCVariable(:is_on,
"[gen, t]; is gen on at t; u_g(t)",
0., 1., true,
[Unit, Time], nothing),
:switch_on
=> UCVariable(:switch_on,
"[gen, t]; indicator that gen will be turned on at t; v_g(t)",
0., 1., true,
[Unit, Time], nothing),
:switch_off
=> UCVariable(:switch_off,
"[gen, t]; indicator that gen will be turned off at t; w_g(t)",
0., 1., true,
[Unit, Time], nothing),
:net_injection
=> UCVariable(:net_injection,
"[bus.name, t]",
-1e100, Inf, false,
[Bus, Time], nothing),
:curtail
=> UCVariable(:curtail,
"[bus.name, t]; upper bound is max load at the bus at time t",
0., Inf, false,
[Bus, Time], nothing),
:flow
=> UCVariable(:flow,
"[violation.monitored_line.name, t]",
-1e100, Inf, false,
[Violation, Time], nothing),
:overflow
=> UCVariable(:overflow,
"[transmission_line.name, t]; how much flow above the transmission limits (in MW) is allowed",
0., Inf, false,
[TransmissionLine, Time], nothing),
:loads
=> UCVariable(:loads,
"[price_sensitive_load.name, t]; production to meet demand at a set price, if it is economically sensible, independent of the rest of the demand; upper bound is demand at this price at time t",
0., Inf, false,
[PriceSensitiveLoad, Time], nothing),
:startup
=> UCVariable(:startup,
"[gen, startup_category, t]; indicator that generator g starts up in startup_category at time t; 𝛿_g^s(t)",
0., 1., true,
[ [Unit, StartupCategory], Time], nothing),
:downtime_arc
=> UCVariable(:downtime_arc,
"[gen, t, t']; indicator for shutdown at t and starting at t'",
0., 1., true,
[Unit, Time, Time], add_downtime_arcs),
) # var_list
#var_symbol_list =
# [
# :prod_above, # [gen, t], ≥ 0
# :segprod, # [gen, t, segment], ≥ 0
# :reserve, # [gen, t], ≥ 0
# :is_on, # [gen, t], binary
# :switch_on, # [gen, t], binary
# :switch_off, # [gen, t], binary
# :net_injection, # [bus.name, t], urs?
# :curtail, # [bus.name, t], domain [0, b.load[t]]
# :overflow, # [transmission_line.name, t], ≥ 0
# :loads, # [price_sensitive_load.name, t], domain [0, ps.demand[t]]
# :startup # [gen, t, startup_category], binary
# ]
"""
For a particular UCElement, which is the field in UnitCommitmentInstance that this corresponds to?
This is used to determine indexing and ranges, e.g., `is_on` is indexed over Unit and Time,
so the variable `is_on` will range in the first index from 1 to length(instance.units)
and on the second index from 1 to instance.time.
"""
ind_to_field_dict = OrderedDict{Type{<:UCElement},Symbol}(
Time => :time,
Bus => :buses,
Unit => :units,
TransmissionLine => :lines,
PriceSensitiveLoad => :price_sensitive_loads,
CostSegment => :cost_segments,
StartupCategory => :startup_categories,
) # ind_to_field_dict
"""
Take indices and convert them to fields of UnitCommitmentInstance.
"""
function ind_to_field(index::Union{Vector,Type{<:UCElement}}) :: Union{Vector,Symbol}
if isa(index, Type{<:UCElement})
return ind_to_field_dict[index]
else
return [ ind_to_field(t) for t in index ]
end
end # ind_to_field
function num_indices(v) :: Int64
if !isa(v, Array)
return 1
else
return sum(num_indices(v[i]) for i in 1:length(v))
end
end # num_indices
"""
Can return
* UnitRange -> iterate over this range
* Array{UnitRange} -> cross product of the ranges in the array
* Tuple(UnitRange, Array{UnitRange}) -> the array length should be the same as the range of the UnitRange
"""
function get_indices_tuple(obj::Any, fields::Union{Symbol,Vector,Nothing} = nothing)
if isa(fields, Symbol)
return get_indices_tuple(getfield(obj,fields))
end
if fields == nothing || (isa(fields,Array) && length(fields) == 0)
if isa(obj, Array)
return UnitRange(1,length(obj))
elseif isa(obj, Int)
return UnitRange(1,obj)
else
return UnitRange{Int64}(0:-1)
#return UnitRange(1,1)
end
end
if isa(obj,Array)
indices = (
UnitRange(1,length(obj)),
([
isa(f,Array) ? get_indices_tuple(getfield(x, f[1]), f[2:end]) : get_indices_tuple(getfield(x, f))
for x in obj
] for f in fields)...
)
# more_indices = ([
# isa(f,Array) ? get_indices_tuple(getfield(x, f[1]), f[2:end]) : get_indices_tuple(getfield(x, f))
# for x in obj
# ] for f in fields
# )
# indices = (UnitRange(1,length(obj)),more_indices...)
else
indices = ()
for f in fields
if isa(f,Array)
indices = (indices..., get_indices_tuple(getfield(obj, f[1]), f[2:end]))
else
indices = (indices..., get_indices_tuple(obj,f))
end
end
# indices = (
# isa(f,Array) ? get_indices_tuple(getfield(obj, f[1]), f[2:end]) : get_indices_tuple(getfield(obj, f))
# for f in fields
# )
# (
# isa(f,Array) ? get_indices_tuple(getfield(obj, f[1]), f[2:end]) : get_indices_tuple(getfield(obj, f))
# for f in fields
# )
# indices = (indices...,)
end # check if obj is Array or not
return indices
end # get_indices_tuple
function loop_over_indices(indices::Any)
loop = nothing
should_print = false
if isa(indices, UnitRange)
loop = indices
elseif isa(indices, Array{UnitRange{Int64}}) || isa(indices, Tuple{Int, UnitRange})
loop = Base.product(Tuple(indices)...)
elseif isa(indices, Tuple{UnitRange, Array})
loop = ()
for t in zip(indices...)
loop = (loop..., loop_over_indices(t)...)
end
elseif isa(indices,Tuple)
loop = ()
for i in indices
loop = (loop..., loop_over_indices(i))
end
loop = Base.product(loop...)
else
error("Why are we here?")
#loop = Base.product(loop_over_indices(indices)...)
end
if should_print
for i in loop
@show i
end
end
return loop
end # loop_over_indices
function expand_tuple(x::Tuple)
y = ()
for i in x
if isa(i, Tuple)
y = (y..., expand_tuple(i)...)
else
y = (y..., i)
end
end
return y
end # expand_tuple
function expand_tuple(X::Array{<:Tuple})
return [ expand_tuple(x) for x in X ]
end # expand_tuple
function get_indices(x::Array)
return expand_tuple(x)
end
function get_indices(x::Base.Iterators.ProductIterator)
return get_indices(collect(x))
end
"""
Access `t.f`, special terminal case of `get_nested_field`.
"""
function get_nested_field(t::Any, f::Symbol)
return getfield(t,f)
end # get_nested_field
"""
Access `t.f`, where `f` could be a subfield.
"""
function get_nested_field(t::Any, f::Vector{Symbol})
if length(f) > 1
return get_nested_field(getfield(t,f[1]), f[2:end])
else
return getfield(t,f[1])
end
end # get_nested_field
"""
Given a set of indices of UCVariable, e.g., [[X,Y],T],
and a UnitCommitmentInstance instance,
if we want to access the field corresponding to Y,
then we call get_nested_field(instance, [[X,Y],T], 2, (4,3)),
which will return instance.X[4].Y[3] if Y is a vector,
and just instance.X[4].Y otherwise.
===
Termination Conditions
If `i` <= 0, then we only care about instance, and not the field.
If `field` is a Symbol and `i` >= 1, then we want to explore instance.field (index t[i] or t).
Note that if `i` >= 1, then `field` must be a symbol.
If `i` == 1, then `t` can be an Int.
===
Parameters
* instance::Any --> all fields will be from instance, or nested fields of fields of instance.
* field::Union{Vector,Symbol,Nothing} --> either the field we want to access, or a vector of fields, and we will want field[i].
* i::Int --> which field to access.
* t::Tuple --> how to go through the fields of instance to get the right field, length needs to be at least `i`.
"""
function get_nested_field(instance::Any, field::Union{Vector,Symbol,Nothing}, i::Int, t::Union{Tuple, Int})
# Check data
if isa(field, Vector)
if i >= 2 && (!isa(t,Tuple) || length(t) < i)
error("Tuple of indices to get nested field needs to be at least the length of the index we want to get.")
end
end
if isa(field, Symbol) || i <= 0
# i = 0 can happen in the recursive call
# What it means is that we do not want a field of the instance, but the instance itself
# TODO handle other iterable types and empty arrays
f = (isa(field, Symbol) && i >= 1) ? getfield(instance, field) : instance
if isa(f,Vector)
if length(f) == 0
error("Trying to iterate over empty field!")
else
return isa(t,Int) ? f[t] : f[t[i]]
end
else
return f
end
end # check termination conditions (f is field or i <= 0)
# Loop over the fields until we find where index i is located
# It may be nested inside an array, so that is why we recurse
start_ind = 0
for f in field
curr_len = isa(f, Vector) ? length(f) : 1
if start_ind + curr_len >= i
if isa(f, Vector)
new_field_is_iterable = isa(getfield(instance, f[1]), Vector)
if new_field_is_iterable
return get_nested_field(getfield(instance, f[1])[t[start_ind+1]], f[2:end], i - start_ind - 1, isa(t,Tuple) ? t[start_ind+2:end] : t)
else
return get_nested_field(getfield(instance, f[1]), f[2:end], i - start_ind - 1, isa(t,Tuple) ? t[start_ind+2:end] : t)
end
else
# f is hopefully a symbol...
return get_nested_field(instance, f, 1, isa(t,Tuple) ? t[start_ind+1] : t)
end
end
start_ind += curr_len
end
return nothing
end # get_nested_field
#"""
#Get ranges for the indices of a UCVariable along dimension `i`,
#making sure that the right fields ranges are calculated via `get_nested_field` and `ind_to_field`.
#"""
#function get_range(arr::UCVariable, instance::UnitCommitmentInstance, i::Int) :: UnitRange
# arr = ind_to_field[var.indices[i]]
# f = get_nested_field(instance, arr)
# if isa(f, Array)
# return 1:length(f)
# elseif isa(f, Int)
# return 1:f
# else
# error("Unknown type to generate UnitRange from: ", typeof(f))
# end
#end # get_range
export UCVariable

View File

@@ -6,14 +6,17 @@ using UnitCommitment
@testset "convert" begin
@testset "EGRET solution" begin
solution = UnitCommitment.read_egret_solution("fixtures/egret_output.json.gz")
solution =
UnitCommitment._read_egret_solution("fixtures/egret_output.json.gz")
for attr in ["Is on", "Production (MW)", "Production cost (\$)"]
@test attr in keys(solution)
@test "115_STEAM_1" in keys(solution[attr])
@test length(solution[attr]["115_STEAM_1"]) == 48
end
@test solution["Production cost (\$)"]["315_CT_6"][15:20] == [0., 0., 884.44, 1470.71, 1470.71, 884.44]
@test solution["Startup cost (\$)"]["315_CT_6"][15:20] == [0., 0., 5665.23, 0., 0., 0.]
@test solution["Production cost (\$)"]["315_CT_6"][15:20] ==
[0.0, 0.0, 884.44, 1470.71, 1470.71, 884.44]
@test solution["Startup cost (\$)"]["315_CT_6"][15:20] ==
[0.0, 0.0, 5665.23, 0.0, 0.0, 0.0]
@test length(keys(solution["Is on"])) == 154
end
end

View File

@@ -44,7 +44,7 @@ using UnitCommitment, LinearAlgebra, Cbc, JuMP, JSON, GZip
@test unit.startup_limit == 1e6
@test unit.shutdown_limit == 1e6
@test unit.must_run == [false for t in 1:4]
@test unit.min_power_cost == [1400. for t in 1:4]
@test unit.min_power_cost == [1400.0 for t in 1:4]
@test unit.min_uptime == 1
@test unit.min_downtime == 1
@test unit.provides_spinning_reserves == [true for t in 1:4]
@@ -76,7 +76,7 @@ using UnitCommitment, LinearAlgebra, Cbc, JuMP, JSON, GZip
@test unit.startup_limit == 70.0
@test unit.shutdown_limit == 70.0
@test unit.must_run == [true for t in 1:4]
@test unit.min_power_cost == [0. for t in 1:4]
@test unit.min_power_cost == [0.0 for t in 1:4]
@test unit.min_uptime == 1
@test unit.min_downtime == 1
@test unit.provides_spinning_reserves == [true for t in 1:4]
@@ -97,8 +97,22 @@ using UnitCommitment, LinearAlgebra, Cbc, JuMP, JSON, GZip
load = instance.price_sensitive_loads[1]
@test load.name == "ps1"
@test load.bus.name == "b3"
@test load.revenue == [100. for t in 1:4]
@test load.demand == [50. for t in 1:4]
@test load.revenue == [100.0 for t in 1:4]
@test load.demand == [50.0 for t in 1:4]
end
@testset "read sub-hourly" begin
instance = UnitCommitment.read_benchmark("test/case14-sub-hourly")
@test instance.time == 4
unit = instance.units[1]
@test unit.name == "g1"
@test unit.min_uptime == 2
@test unit.min_downtime == 2
@test length(unit.startup_categories) == 3
@test unit.startup_categories[1].delay == 2
@test unit.startup_categories[2].delay == 4
@test unit.startup_categories[3].delay == 6
@test unit.initial_status == -200
end
@testset "slice" begin
@@ -135,8 +149,10 @@ using UnitCommitment, LinearAlgebra, Cbc, JuMP, JSON, GZip
# Should be able to build model without errors
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
model = build_model(instance=modified,
model = build_model(
instance = modified,
optimizer = optimizer,
variable_names=true)
variable_names = true,
)
end
end

View File

@@ -2,45 +2,31 @@
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, LinearAlgebra, JuMP
USE_GUROBI = (Base.find_package("Gurobi") != nothing)
USE_CBC = !USE_GUROBI
if USE_GUROBI
using Gurobi
else
using Cbc
end
NUM_THREADS = 4
LOG_LEVEL = 1
using UnitCommitment, LinearAlgebra, Cbc, JuMP
@testset "Model" begin
@testset "Run" begin
instance = UnitCommitment.read_benchmark("test/case14")
#instance = UnitCommitment.read_benchmark("matpower/case3375wp/2017-02-01")
#instance = UnitCommitment.read_benchmark("matpower/case1888rte/2017-02-01")
for line in instance.lines, t in 1:4
line.normal_flow_limit[t] = 10.0
end
#for formulation in [UnitCommitment.DefaultFormulation, UnitCommitment.TightFormulation]
for formulation in [UnitCommitment.TightFormulation]
@info string("Running test of ", formulation)
if USE_CBC
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => LOG_LEVEL)
end
if USE_GUROBI
optimizer = optimizer_with_attributes(Gurobi.Optimizer, "Threads" => NUM_THREADS)
end
model = build_model(instance=instance,
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
model = build_model(
instance = instance,
optimizer = optimizer,
variable_names = true,
formulation=formulation)
JuMP.write_to_file(model.mip, "test.mps")
)
@test name(model[:is_on]["g1", 1]) == "is_on[g1,1]"
# Optimize and retrieve solution
UnitCommitment.optimize!(model)
solution = get_solution(model)
solution = UnitCommitment.solution(model)
# Write solution to a file
filename = tempname()
UnitCommitment.write(filename, solution)
loaded = JSON.parsefile(filename)
@test length(loaded["Is on"]) == 6
# Verify solution
@test UnitCommitment.validate(instance, solution)
@@ -49,8 +35,5 @@ LOG_LEVEL = 1
UnitCommitment.fix!(model, solution)
UnitCommitment.optimize!(model)
@test UnitCommitment.validate(instance, solution)
#@show solution
end # loop over components
end # end testset Run
end # end test
end
end

View File

@@ -3,6 +3,9 @@
# Released under the modified BSD license. See COPYING.md for more details.
using Test
using UnitCommitment
UnitCommitment._setup_logger()
@testset "UnitCommitment" begin
include("instance_test.jl")

View File

@@ -3,51 +3,83 @@
# Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, Test, LinearAlgebra
import UnitCommitment: Violation, _offer, _query
@testset "Screening" begin
@testset "Violation filter" begin
instance = UnitCommitment.read_benchmark("test/case14")
filter = ViolationFilter(max_per_line=1, max_total=2)
filter = UnitCommitment.ViolationFilter(max_per_line = 1, max_total = 2)
offer(filter, Violation(time=1,
_offer(
filter,
Violation(
time = 1,
monitored_line = instance.lines[1],
outage_line = nothing,
amount=100.))
offer(filter, Violation(time=1,
amount = 100.0,
),
)
_offer(
filter,
Violation(
time = 1,
monitored_line = instance.lines[1],
outage_line = instance.lines[1],
amount=300.))
offer(filter, Violation(time=1,
amount = 300.0,
),
)
_offer(
filter,
Violation(
time = 1,
monitored_line = instance.lines[1],
outage_line = instance.lines[5],
amount=500.))
offer(filter, Violation(time=1,
amount = 500.0,
),
)
_offer(
filter,
Violation(
time = 1,
monitored_line = instance.lines[1],
outage_line = instance.lines[4],
amount=400.))
offer(filter, Violation(time=1,
amount = 400.0,
),
)
_offer(
filter,
Violation(
time = 1,
monitored_line = instance.lines[2],
outage_line = instance.lines[1],
amount=200.))
offer(filter, Violation(time=1,
amount = 200.0,
),
)
_offer(
filter,
Violation(
time = 1,
monitored_line = instance.lines[2],
outage_line = instance.lines[8],
amount=100.))
amount = 100.0,
),
)
actual = query(filter)
expected = [Violation(time=1,
actual = _query(filter)
expected = [
Violation(
time = 1,
monitored_line = instance.lines[2],
outage_line = instance.lines[1],
amount=200.),
Violation(time=1,
amount = 200.0,
),
Violation(
time = 1,
monitored_line = instance.lines[1],
outage_line = instance.lines[5],
amount=500.)]
amount = 500.0,
),
]
@test actual == expected
end
@@ -57,19 +89,24 @@ using UnitCommitment, Test, LinearAlgebra
line.normal_flow_limit[t] = 1.0
line.emergency_flow_limit[t] = 1.0
end
isf = UnitCommitment.injection_shift_factors(lines=instance.lines,
buses=instance.buses)
lodf = UnitCommitment.line_outage_factors(lines=instance.lines,
isf = UnitCommitment._injection_shift_factors(
lines = instance.lines,
buses = instance.buses,
isf=isf)
)
lodf = UnitCommitment._line_outage_factors(
lines = instance.lines,
buses = instance.buses,
isf = isf,
)
inj = [1000.0 for b in 1:13, t in 1:instance.time]
overflow = [0.0 for l in instance.lines, t in 1:instance.time]
violations = UnitCommitment.find_violations(instance=instance,
violations = UnitCommitment._find_violations(
instance = instance,
net_injections = inj,
overflow = overflow,
isf = isf,
lodf=lodf)
lodf = lodf,
)
@test length(violations) == 20
end
end

View File

@@ -7,19 +7,39 @@ using UnitCommitment, Test, LinearAlgebra
@testset "Sensitivity" begin
@testset "Susceptance matrix" begin
instance = UnitCommitment.read_benchmark("test/case14")
actual = UnitCommitment.susceptance_matrix(instance.lines)
actual = UnitCommitment._susceptance_matrix(instance.lines)
@test size(actual) == (20, 20)
expected = Diagonal([29.5, 7.83, 8.82, 9.9, 10.04,
10.2, 41.45, 8.35, 3.14, 6.93,
8.77, 6.82, 13.4, 9.91, 15.87,
20.65, 6.46, 9.09, 8.73, 5.02])
expected = Diagonal([
29.5,
7.83,
8.82,
9.9,
10.04,
10.2,
41.45,
8.35,
3.14,
6.93,
8.77,
6.82,
13.4,
9.91,
15.87,
20.65,
6.46,
9.09,
8.73,
5.02,
])
@test round.(actual, digits = 2) == expected
end
@testset "Reduced incidence matrix" begin
instance = UnitCommitment.read_benchmark("test/case14")
actual = UnitCommitment.reduced_incidence_matrix(lines=instance.lines,
buses=instance.buses)
actual = UnitCommitment._reduced_incidence_matrix(
lines = instance.lines,
buses = instance.buses,
)
@test size(actual) == (20, 13)
@test actual[1, 1] == -1.0
@test actual[3, 1] == 1.0
@@ -63,49 +83,59 @@ using UnitCommitment, Test, LinearAlgebra
@testset "Injection Shift Factors (ISF)" begin
instance = UnitCommitment.read_benchmark("test/case14")
actual = UnitCommitment.injection_shift_factors(lines=instance.lines,
buses=instance.buses)
actual = UnitCommitment._injection_shift_factors(
lines = instance.lines,
buses = instance.buses,
)
@test size(actual) == (20, 13)
@test round.(actual, digits = 2) == [
-0.84 -0.75 -0.67 -0.61 -0.63 -0.66 -0.66 -0.65 -0.65 -0.64 -0.63 -0.63 -0.64;
-0.16 -0.25 -0.33 -0.39 -0.37 -0.34 -0.34 -0.35 -0.35 -0.36 -0.37 -0.37 -0.36;
0.03 -0.53 -0.15 -0.1 -0.12 -0.14 -0.14 -0.14 -0.13 -0.13 -0.12 -0.12 -0.13;
0.06 -0.14 -0.32 -0.22 -0.25 -0.3 -0.3 -0.29 -0.28 -0.27 -0.25 -0.26 -0.27;
0.08 -0.07 -0.2 -0.29 -0.26 -0.22 -0.22 -0.22 -0.23 -0.25 -0.26 -0.26 -0.24;
0.03 0.47 -0.15 -0.1 -0.12 -0.14 -0.14 -0.14 -0.13 -0.13 -0.12 -0.12 -0.13;
0.08 0.31 0.5 -0.3 -0.03 0.36 0.36 0.28 0.23 0.1 -0.0 0.02 0.17;
0.0 0.01 0.02 -0.01 -0.22 -0.63 -0.63 -0.45 -0.41 -0.32 -0.24 -0.25 -0.36;
0.0 0.01 0.01 -0.01 -0.12 -0.17 -0.17 -0.26 -0.24 -0.18 -0.14 -0.14 -0.21;
-0.0 -0.02 -0.03 0.02 -0.66 -0.2 -0.2 -0.29 -0.36 -0.5 -0.63 -0.61 -0.43;
-0.0 -0.01 -0.02 0.01 0.21 -0.12 -0.12 -0.17 -0.28 -0.53 0.18 0.15 -0.03;
-0.0 -0.0 -0.0 0.0 0.03 -0.02 -0.02 -0.03 -0.02 0.01 -0.52 -0.17 -0.09;
-0.0 -0.01 -0.01 0.01 0.11 -0.06 -0.06 -0.09 -0.05 0.02 -0.28 -0.59 -0.31;
-0.0 -0.0 -0.0 -0.0 -0.0 -0.0 -1.0 -0.0 -0.0 -0.0 -0.0 -0.0 0.0 ;
0.0 0.01 0.02 -0.01 -0.22 0.37 0.37 -0.45 -0.41 -0.32 -0.24 -0.25 -0.36;
0.0 0.01 0.02 -0.01 -0.21 0.12 0.12 0.17 -0.72 -0.47 -0.18 -0.15 0.03;
0.0 0.01 0.01 -0.01 -0.14 0.08 0.08 0.12 0.07 -0.03 -0.2 -0.24 -0.6 ;
0.0 0.01 0.02 -0.01 -0.21 0.12 0.12 0.17 0.28 -0.47 -0.18 -0.15 0.03;
-0.0 -0.0 -0.0 0.0 0.03 -0.02 -0.02 -0.03 -0.02 0.01 0.48 -0.17 -0.09;
-0.0 -0.01 -0.01 0.01 0.14 -0.08 -0.08 -0.12 -0.07 0.03 0.2 0.24 -0.4 ]
-0.84 -0.75 -0.67 -0.61 -0.63 -0.66 -0.66 -0.65 -0.65 -0.64 -0.63 -0.63 -0.64
-0.16 -0.25 -0.33 -0.39 -0.37 -0.34 -0.34 -0.35 -0.35 -0.36 -0.37 -0.37 -0.36
0.03 -0.53 -0.15 -0.1 -0.12 -0.14 -0.14 -0.14 -0.13 -0.13 -0.12 -0.12 -0.13
0.06 -0.14 -0.32 -0.22 -0.25 -0.3 -0.3 -0.29 -0.28 -0.27 -0.25 -0.26 -0.27
0.08 -0.07 -0.2 -0.29 -0.26 -0.22 -0.22 -0.22 -0.23 -0.25 -0.26 -0.26 -0.24
0.03 0.47 -0.15 -0.1 -0.12 -0.14 -0.14 -0.14 -0.13 -0.13 -0.12 -0.12 -0.13
0.08 0.31 0.5 -0.3 -0.03 0.36 0.36 0.28 0.23 0.1 -0.0 0.02 0.17
0.0 0.01 0.02 -0.01 -0.22 -0.63 -0.63 -0.45 -0.41 -0.32 -0.24 -0.25 -0.36
0.0 0.01 0.01 -0.01 -0.12 -0.17 -0.17 -0.26 -0.24 -0.18 -0.14 -0.14 -0.21
-0.0 -0.02 -0.03 0.02 -0.66 -0.2 -0.2 -0.29 -0.36 -0.5 -0.63 -0.61 -0.43
-0.0 -0.01 -0.02 0.01 0.21 -0.12 -0.12 -0.17 -0.28 -0.53 0.18 0.15 -0.03
-0.0 -0.0 -0.0 0.0 0.03 -0.02 -0.02 -0.03 -0.02 0.01 -0.52 -0.17 -0.09
-0.0 -0.01 -0.01 0.01 0.11 -0.06 -0.06 -0.09 -0.05 0.02 -0.28 -0.59 -0.31
-0.0 -0.0 -0.0 -0.0 -0.0 -0.0 -1.0 -0.0 -0.0 -0.0 -0.0 -0.0 0.0
0.0 0.01 0.02 -0.01 -0.22 0.37 0.37 -0.45 -0.41 -0.32 -0.24 -0.25 -0.36
0.0 0.01 0.02 -0.01 -0.21 0.12 0.12 0.17 -0.72 -0.47 -0.18 -0.15 0.03
0.0 0.01 0.01 -0.01 -0.14 0.08 0.08 0.12 0.07 -0.03 -0.2 -0.24 -0.6
0.0 0.01 0.02 -0.01 -0.21 0.12 0.12 0.17 0.28 -0.47 -0.18 -0.15 0.03
-0.0 -0.0 -0.0 0.0 0.03 -0.02 -0.02 -0.03 -0.02 0.01 0.48 -0.17 -0.09
-0.0 -0.01 -0.01 0.01 0.14 -0.08 -0.08 -0.12 -0.07 0.03 0.2 0.24 -0.4
]
end
@testset "Line Outage Distribution Factors (LODF)" begin
instance = UnitCommitment.read_benchmark("test/case14")
isf_before = UnitCommitment.injection_shift_factors(lines=instance.lines,
buses=instance.buses)
lodf = UnitCommitment.line_outage_factors(lines=instance.lines,
isf_before = UnitCommitment._injection_shift_factors(
lines = instance.lines,
buses = instance.buses,
isf=isf_before)
)
lodf = UnitCommitment._line_outage_factors(
lines = instance.lines,
buses = instance.buses,
isf = isf_before,
)
for contingency in instance.contingencies
for lc in contingency.lines
prev_susceptance = lc.susceptance
lc.susceptance = 0.0
isf_after = UnitCommitment.injection_shift_factors(lines=instance.lines,
buses=instance.buses)
isf_after = UnitCommitment._injection_shift_factors(
lines = instance.lines,
buses = instance.buses,
)
lc.susceptance = prev_susceptance
for lm in instance.lines
expected = isf_after[lm.offset, :]
actual = isf_before[lm.offset, :] +
actual =
isf_before[lm.offset, :] +
lodf[lm.offset, lc.offset] * isf_before[lc.offset, :]
@test norm(expected - actual) < 1e-6
end

View File

@@ -4,18 +4,23 @@
using UnitCommitment, JSON, GZip, DataStructures
parse_case14() = JSON.parse(GZip.gzopen("../instances/test/case14.json.gz"),
dicttype=()->DefaultOrderedDict(nothing))
function parse_case14()
return JSON.parse(
GZip.gzopen("../instances/test/case14.json.gz"),
dicttype = () -> DefaultOrderedDict(nothing),
)
end
@testset "Validation" begin
@testset "fix!" begin
@testset "repair!" begin
@testset "Cost curve should be convex" begin
json = parse_case14()
json["Generators"]["g1"]["Production cost curve (MW)"] = [100, 150, 200]
json["Generators"]["g1"]["Production cost curve (\$)"] = [10, 25, 30]
instance = UnitCommitment.from_json(json, fix=false)
@test UnitCommitment.fix!(instance) == 4
json["Generators"]["g1"]["Production cost curve (MW)"] =
[100, 150, 200]
json["Generators"]["g1"]["Production cost curve (\$)"] =
[10, 25, 30]
instance = UnitCommitment._from_json(json, repair = false)
@test UnitCommitment.repair!(instance) == 4
end
@testset "Startup limit must be greater than Pmin" begin
@@ -23,17 +28,16 @@ parse_case14() = JSON.parse(GZip.gzopen("../instances/test/case14.json.gz"),
json["Generators"]["g1"]["Production cost curve (MW)"] = [100, 150]
json["Generators"]["g1"]["Production cost curve (\$)"] = [100, 150]
json["Generators"]["g1"]["Startup limit (MW)"] = 80
instance = UnitCommitment.from_json(json, fix=false)
@test UnitCommitment.fix!(instance) == 1
instance = UnitCommitment._from_json(json, repair = false)
@test UnitCommitment.repair!(instance) == 1
end
@testset "Startup costs and delays must be increasing" begin
json = parse_case14()
json["Generators"]["g1"]["Startup costs (\$)"] = [300, 200, 100]
json["Generators"]["g1"]["Startup delays (h)"] = [8, 4, 2]
instance = UnitCommitment.from_json(json, fix=false)
@test UnitCommitment.fix!(instance) == 4
end
instance = UnitCommitment._from_json(json, repair = false)
@test UnitCommitment.repair!(instance) == 4
end
end
end