Compare commits

..

3 Commits

Author SHA1 Message Date
675143967f Fix some merge issues 2020-11-20 12:17:59 -06:00
ba9e086bea Merge branch 'dev' into akazachk/formulations 2020-11-20 10:55:48 -06:00
Aleksandr Kazachkov
3baddf158a Implement multiple formulations 2020-11-20 10:52:23 -06:00
59 changed files with 5529 additions and 2185 deletions

View File

@@ -1,5 +0,0 @@
always_for_in = true
always_use_return = true
margin = 80
remove_extra_newlines = true
short_to_long_function_def = true

View File

@@ -1,25 +0,0 @@
---
name: Bug report
about: Something is broken in the package
title: ''
labels: ''
assignees: ''
---
## Description
A clear and concise description of what the bug is.
## Steps to Reproduce
Please describe how can the developers reproduce the problem in their own computers. Code snippets and sample input files are specially helpful. For example:
1. Install the package
2. Run the code below with the attached input file...
3. The following error appears...
## System Information
- Operating System: [e.g. Ubuntu 20.04]
- Julia version: [e.g. 1.4]
- Package version: [e.g. 0.0.1]

View File

@@ -1,8 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: Feature Request
url: https://github.com/ANL-CEEESA/UnitCommitment.jl/discussions/categories/feature-requests
about: Submit ideas for new features and small enhancements
- name: Help & FAQ
url: https://github.com/ANL-CEEESA/UnitCommitment.jl/discussions/categories/help-faq
about: Ask questions about the package and get help from the community

28
.github/workflows/benchmark.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
name: Benchmark
on: push
jobs:
benchmark:
runs-on: [self-hosted, benchmark]
if: "contains(github.event.head_commit.message, '[benchmark]')"
timeout-minutes: 10080
steps:
- uses: actions/checkout@v1
- name: Benchmark
run: |
julia --project=@. -e 'using Pkg; Pkg.instantiate()'
make build/sysimage.so
make -C benchmark clean
make -C benchmark -kj4
make -C benchmark tables
make -C benchmark clean-mps clean-sol
- name: Upload logs
uses: actions/upload-artifact@v2
with:
name: Logs
path: benchmark/results/*
- name: Upload tables & charts
uses: actions/upload-artifact@v2
with:
name: Tables
path: benchmark/tables/*

View File

@@ -1,28 +0,0 @@
name: lint
on:
push:
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@latest
with:
version: '1'
- uses: actions/checkout@v1
- name: Format check
shell: julia --color=yes {0}
run: |
using Pkg
Pkg.add(PackageSpec(name="JuliaFormatter", version="0.14.4"))
using JuliaFormatter
format("src", verbose=true)
format("test", verbose=true)
format("benchmark", verbose=true)
out = String(read(Cmd(`git diff`)))
if isempty(out)
exit(0)
end
@error "Some files have not been formatted !!!"
write(stdout, out)
exit(1)

View File

@@ -1,15 +1,19 @@
name: Tests name: Tests
on: on:
push: push:
paths:
- '**.jl'
- '**.toml'
pull_request: pull_request:
schedule: paths:
- cron: '45 10 * * *' - '**.jl'
- '**.toml'
jobs: jobs:
test: test:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
julia-version: ['1.3', '1.4', '1.5', '1.6'] julia-version: ['1.3', '1.4', '1']
julia-arch: [x64, x86] julia-arch: [x64, x86]
os: [ubuntu-latest, windows-latest, macOS-latest] os: [ubuntu-latest, windows-latest, macOS-latest]
exclude: exclude:

3
.gitignore vendored
View File

@@ -14,6 +14,3 @@ instances/_source
local local
notebooks notebooks
TODO.md TODO.md
docs/_build
.vscode
Manifest.toml

View File

@@ -1,49 +1,11 @@
# Changelog # UnitCommitment.jl
All notable changes to this project will be documented in this file. ### Version 0.1.1 (Nov 16, 2020)
- The format is based on [Keep a Changelog][changelog]. * Fixes to MATLAB and PGLIB-UC instances
- This project adheres to [Semantic Versioning][semver]. * Add OR-LIB and Tejada19 instances
- For versions before 1.0, we follow [the Pkg.jl convention][pkjjl] * Improve documentation
that `0.a.b` is compatible with `0.a.c`.
[changelog]: https://keepachangelog.com/en/1.0.0/ ### Version 0.1.0 (Nov 6, 2020)
[semver]: https://semver.org/spec/v2.0.0.html
[pkjjl]: https://pkgdocs.julialang.org/v1/compatibility/#compat-pre-1.0
## [0.2.0] - 2021-05-28 * Initial public release
### Added
- Add sub-hourly unit commitment.
- Add `UnitCommitment.write(filename, solution)`.
- Add mathematical formulation to the documentation.
### Changed
- Rename "Time (h)" parameter to "Time horizon (h)".
- Rename `UnitCommitment.get_solution` to `UnitCommitment.solution`, for better
consistency with JuMP style.
- Add an underscore to the name of all functions that do not appear in the
documentation (e.g. `something` becomes `_something`) These functions are not
part of the public API and may change without notice, even in PATCH releases.
- The function `UnitCommitment.build_model` now returns a plain JuMP model. The
struct `UnitCommitmentModel` has been completely removed. Accessing model
elements can now be accomplished as follows:
- `model.vars.x[idx]` becomes `model[:x][idx]`
- `model.eqs.y[idx]` becomes `model[:eq_y][idx]`
- `model.expr.z[idx]` becomes `model[:expr_z][idx]`
- `model.obj` becomes `model[:obj]`
- `model.isf` becomes `model[:isf]`
- `model.lodf` becomes `model[:lodf]`
### Fixed
- Properly validate solutions with price-sensitive loads.
## [0.1.1] - 2020-11-16
### Added
- Add OR-LIB and Tejada19 instances.
- Improve documentation.
## Fixed
- Fixes to MATLAB and PGLIB-UC instances.
## [0.1.0] - 2020-11-06
- Initial public release.

View File

@@ -3,7 +3,8 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
JULIA := julia --color=yes --project=@. JULIA := julia --color=yes --project=@.
VERSION := 0.2 MKDOCS := ~/.local/bin/mkdocs
VERSION := 0.1
build/sysimage.so: src/sysimage.jl Project.toml Manifest.toml build/sysimage.so: src/sysimage.jl Project.toml Manifest.toml
mkdir -p build mkdir -p build
@@ -15,18 +16,14 @@ clean:
rm -rf build/* rm -rf build/*
docs: docs:
cd docs; make clean; make dirhtml $(MKDOCS) build -d ../docs/$(VERSION)/
rsync -avP --delete-after docs/_build/dirhtml/ ../docs/$(VERSION)/ rm ../docs/$(VERSION)/*.ipynb
install-deps-docs:
pip install --user mkdocs mkdocs-cinder python-markdown-math
test: build/sysimage.so test: build/sysimage.so
@echo Running tests... @echo Running tests...
$(JULIA) --sysimage build/sysimage.so -e 'using Pkg; Pkg.test("UnitCommitment")' | tee build/test.log $(JULIA) --sysimage build/sysimage.so -e 'using Pkg; Pkg.test("UnitCommitment")' | tee build/test.log
.PHONY: docs test
format:
julia -e 'using JuliaFormatter; format("src"); format("test"); format("benchmark")'
install-deps:
julia -e 'using Pkg; Pkg.add(PackageSpec(name="JuliaFormatter", version="0.14.4"))'
.PHONY: docs test format install-deps

367
Manifest.toml Normal file
View File

@@ -0,0 +1,367 @@
# This file is machine-generated - editing it directly is not advised
[[Artifacts]]
deps = ["Pkg"]
git-tree-sha1 = "c30985d8821e0cd73870b17b0ed0ce6dc44cb744"
uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
version = "1.3.0"
[[Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[BenchmarkTools]]
deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"]
git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260"
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
version = "0.5.0"
[[BinaryProvider]]
deps = ["Libdl", "Logging", "SHA"]
git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058"
uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
version = "0.5.10"
[[Bzip2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "c3598e525718abcc440f69cc6d5f60dda0a1b61e"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.6+5"
[[CEnum]]
git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.4.1"
[[Calculus]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "f641eb0a4f00c343bbc32346e1217b86f3ce9dad"
uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
version = "0.5.1"
[[Cbc]]
deps = ["BinaryProvider", "CEnum", "Cbc_jll", "Libdl", "MathOptInterface", "SparseArrays"]
git-tree-sha1 = "929d0500c50387e7ac7ae9956ca7d7ce5312c90d"
uuid = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
version = "0.7.1"
[[Cbc_jll]]
deps = ["Cgl_jll", "Clp_jll", "CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "16b8ffa56b3ded6b201aa2f50623f260448aa205"
uuid = "38041ee0-ae04-5750-a4d2-bb4d0d83d27d"
version = "2.10.3+4"
[[Cgl_jll]]
deps = ["Clp_jll", "CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "32be20ec1e4c40e5c5d1bbf949ba9918a92a7569"
uuid = "3830e938-1dd0-5f3e-8b8e-b3ee43226782"
version = "0.60.2+5"
[[Clp_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "79263d9383ca89b35f31c33ab5b880536a8413a4"
uuid = "06985876-5285-5a41-9fcb-8948a742cc53"
version = "1.17.6+6"
[[CodecBzip2]]
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.7.2"
[[CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.0"
[[CoinUtils_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "ee1f06ab89337b7f194c29377ab174e752cdf60d"
uuid = "be027038-0da8-5614-b30d-e42594cb92df"
version = "2.11.3+3"
[[CommonSubexpressions]]
deps = ["MacroTools", "Test"]
git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7"
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
version = "0.3.0"
[[Compat]]
deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"]
git-tree-sha1 = "a706ff10f1cd8dab94f59fd09c0e657db8e77ff0"
uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
version = "3.23.0"
[[CompilerSupportLibraries_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "8e695f735fca77e9708e795eda62afdb869cbb70"
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "0.3.4+0"
[[DataStructures]]
deps = ["Compat", "InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "fb0aa371da91c1ff9dc7fbed6122d3e411420b9c"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.18.8"
[[Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[DelimitedFiles]]
deps = ["Mmap"]
uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
[[DiffResults]]
deps = ["StaticArrays"]
git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc"
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
version = "1.0.2"
[[DiffRules]]
deps = ["NaNMath", "Random", "SpecialFunctions"]
git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1"
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
version = "1.0.1"
[[Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[DocStringExtensions]]
deps = ["LibGit2", "Markdown", "Pkg", "Test"]
git-tree-sha1 = "50ddf44c53698f5e784bbebb3f4b21c5807401b1"
uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
version = "0.8.3"
[[Documenter]]
deps = ["Base64", "Dates", "DocStringExtensions", "IOCapture", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"]
git-tree-sha1 = "71e35e069daa9969b8af06cef595a1add76e0a11"
uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
version = "0.25.3"
[[ForwardDiff]]
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"]
git-tree-sha1 = "1d090099fb82223abc48f7ce176d3f7696ede36d"
uuid = "f6369f11-7733-5829-9624-2563aa707210"
version = "0.10.12"
[[GZip]]
deps = ["Libdl"]
git-tree-sha1 = "039be665faf0b8ae36e089cd694233f5dee3f7d6"
uuid = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
version = "0.5.1"
[[HTTP]]
deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"]
git-tree-sha1 = "c7ec02c4c6a039a98a15f955462cd7aea5df4508"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "0.8.19"
[[IOCapture]]
deps = ["Logging"]
git-tree-sha1 = "377252859f740c217b936cebcd918a44f9b53b59"
uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89"
version = "0.1.1"
[[IniFile]]
deps = ["Test"]
git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8"
uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f"
version = "0.5.0"
[[InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[JLLWrappers]]
git-tree-sha1 = "c70593677bbf2c3ccab4f7500d0f4dacfff7b75c"
uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
version = "1.1.3"
[[JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.1"
[[JSONSchema]]
deps = ["HTTP", "JSON", "ZipFile"]
git-tree-sha1 = "a9ecdbc90be216912a2e3e8a8e38dc4c93f0d065"
uuid = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
version = "0.3.2"
[[JuMP]]
deps = ["Calculus", "DataStructures", "ForwardDiff", "JSON", "LinearAlgebra", "MathOptInterface", "MutableArithmetics", "NaNMath", "Random", "SparseArrays", "Statistics"]
git-tree-sha1 = "57c17a221a55f81890aabf00f478886859e25eaf"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "0.21.5"
[[LibGit2]]
deps = ["Printf"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[LinearAlgebra]]
deps = ["Libdl"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "6a8a2a625ab0dea913aba95c11370589e0239ff0"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.6"
[[Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[MathOptInterface]]
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "JSON", "JSONSchema", "LinearAlgebra", "MutableArithmetics", "OrderedCollections", "SparseArrays", "Test", "Unicode"]
git-tree-sha1 = "4fd15565d1811be771e87a877f1e691a005d2b90"
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
version = "0.9.18"
[[MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"]
git-tree-sha1 = "1c38e51c3d08ef2278062ebceade0e46cefc96fe"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.0.3"
[[MbedTLS_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "0eef589dd1c26a3ac9d753fe1a8bcad63f956fa6"
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.16.8+1"
[[Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "c14dea152799bd0376024e3c3c1c3a6cb06764c7"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "0.2.11"
[[NaNMath]]
git-tree-sha1 = "c84c576296d0e2fbb3fc134d3e09086b3ea617cd"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "0.3.4"
[[OpenBLAS32_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "19c33675cdeb572c1b17f96c492459d4f4958036"
uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2"
version = "0.3.10+0"
[[OpenSpecFun_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "9db77584158d0ab52307f8c04f8e7c08ca76b5b3"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.3+4"
[[OrderedCollections]]
git-tree-sha1 = "cf59cfed2e2c12e8a2ff0a4f1e9b2cd8650da6db"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.3.2"
[[Osi_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "bd436a97280df40938e66ae8d18e57aceb072856"
uuid = "7da25872-d9ce-5375-a4d3-7a845f58efdd"
version = "0.108.5+3"
[[PackageCompiler]]
deps = ["Libdl", "Pkg", "UUIDs"]
git-tree-sha1 = "3eee77c94646163f15bd8626acf494360897f890"
uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
version = "1.2.3"
[[Parsers]]
deps = ["Dates"]
git-tree-sha1 = "6fa4202675c05ba0f8268a6ddf07606350eda3ce"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "1.0.11"
[[Pkg]]
deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
[[Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[Random]]
deps = ["Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
[[Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[SharedArrays]]
deps = ["Distributed", "Mmap", "Random", "Serialization"]
uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
[[Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[SparseArrays]]
deps = ["LinearAlgebra", "Random"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[SpecialFunctions]]
deps = ["OpenSpecFun_jll"]
git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "0.10.3"
[[StaticArrays]]
deps = ["LinearAlgebra", "Random", "Statistics"]
git-tree-sha1 = "da4cf579416c81994afd6322365d00916c79b8ae"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "0.12.5"
[[Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
[[Test]]
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[TranscodingStreams]]
deps = ["Random", "Test"]
git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.9.5"
[[UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[ZipFile]]
deps = ["Libdl", "Printf", "Zlib_jll"]
git-tree-sha1 = "c3a5637e27e914a7a445b8d0ad063d701931e9f7"
uuid = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
version = "0.9.3"
[[Zlib_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "320228915c8debb12cb434c59057290f0834dbf6"
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.11+18"

View File

@@ -2,7 +2,7 @@ name = "UnitCommitment"
uuid = "64606440-39ea-11e9-0f29-3303a1d3d877" uuid = "64606440-39ea-11e9-0f29-3303a1d3d877"
authors = ["Santos Xavier, Alinson <axavier@anl.gov>"] authors = ["Santos Xavier, Alinson <axavier@anl.gov>"]
repo = "https://github.com/ANL-CEEESA/UnitCommitment.jl" repo = "https://github.com/ANL-CEEESA/UnitCommitment.jl"
version = "0.2.0" version = "0.1.1"
[deps] [deps]
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
@@ -12,6 +12,7 @@ JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568" Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
PackageCompiler = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d" PackageCompiler = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"

View File

@@ -1,18 +1,9 @@
<h1 align="center">UnitCommitment.jl</h1> <a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/actions?query=workflow%3ATest+branch%3Adev"><img src="https://github.com/iSoron/UnitCommitment.jl/workflows/Tests/badge.svg"></img></a>
<p align="center"> <a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/actions?query=workflow%3ABenchmark+branch%3Adev+is%3Asuccess"><img src="https://github.com/iSoron/UnitCommitment.jl/workflows/Benchmark/badge.svg"></img></a>
<a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/actions?query=workflow%3ATest+branch%3Adev"> <a href="https://doi.org/10.5281/zenodo.4269874"><img src="https://zenodo.org/badge/doi/10.5281/zenodo.4269874.svg" alt="DOI"></a>
<img src="https://github.com/iSoron/UnitCommitment.jl/workflows/Tests/badge.svg"></img>
</a>
<a href="https://doi.org/10.5281/zenodo.4269874"> # UnitCommitment.jl
<img src="https://zenodo.org/badge/doi/10.5281/zenodo.4269874.svg" alt="DOI"></img>
</a>
<a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/releases/">
<img src="https://img.shields.io/github/v/release/ANL-CEEESA/UnitCommitment.jl?include_prereleases&label=pre-release">
</a>
<a href="https://github.com/ANL-CEEESA/UnitCommitment.jl/discussions">
<img src="https://img.shields.io/badge/GitHub-Discussions-%23fc4ebc" />
</a>
</p>
**UnitCommitment.jl** (UC.jl) is an optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and JuMP implementations of state-of-the-art mixed-integer programming formulations. **UnitCommitment.jl** (UC.jl) is an optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and JuMP implementations of state-of-the-art mixed-integer programming formulations.
@@ -37,7 +28,7 @@
* We would like to thank **Aleksandr M. Kazachkov** (University of Florida), **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package. * We would like to thank **Aleksandr M. Kazachkov** (University of Florida), **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package.
* Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357, and the U.S. Department of Energy **Advanced Grid Modeling Program** under Grant DE-OE0000875 * Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357.
### Citing ### Citing

View File

@@ -6,6 +6,9 @@ SHELL := /bin/bash
JULIA := julia --project=. --sysimage ../build/sysimage.so JULIA := julia --project=. --sysimage ../build/sysimage.so
TIMESTAMP := $(shell date "+%Y-%m-%d %H:%M") TIMESTAMP := $(shell date "+%Y-%m-%d %H:%M")
SRC_FILES := $(wildcard ../src/*.jl) SRC_FILES := $(wildcard ../src/*.jl)
DEST := .
FORMULATION := tight
results_dir := results_$(FORMULATION)
INSTANCES_PGLIB := \ INSTANCES_PGLIB := \
pglib-uc/ca/2014-09-01_reserves_0 \ pglib-uc/ca/2014-09-01_reserves_0 \
@@ -38,6 +41,206 @@ INSTANCES_MATPOWER := \
matpower/case6468rte/2017-08-01 \ matpower/case6468rte/2017-08-01 \
matpower/case6515rte/2017-08-01 matpower/case6515rte/2017-08-01
INSTANCES_INFORMS1 := \
matpower/case1888rte/2017-01-01 \
matpower/case1888rte/2017-01-02 \
matpower/case1888rte/2017-01-03 \
matpower/case1888rte/2017-01-04 \
matpower/case1888rte/2017-01-05 \
matpower/case1888rte/2017-01-06 \
matpower/case1888rte/2017-01-07 \
matpower/case1888rte/2017-01-08 \
matpower/case1888rte/2017-01-09 \
matpower/case1888rte/2017-01-10 \
matpower/case1888rte/2017-01-11 \
matpower/case1888rte/2017-01-12 \
matpower/case1888rte/2017-01-13 \
matpower/case1888rte/2017-01-14 \
matpower/case1888rte/2017-01-15 \
matpower/case1888rte/2017-01-16 \
matpower/case1888rte/2017-01-17 \
matpower/case1888rte/2017-01-18 \
matpower/case1888rte/2017-01-19 \
matpower/case1888rte/2017-01-20 \
matpower/case1888rte/2017-01-21 \
matpower/case1888rte/2017-01-22 \
matpower/case1888rte/2017-01-23 \
matpower/case1888rte/2017-01-24 \
matpower/case1888rte/2017-01-25 \
matpower/case1888rte/2017-01-26 \
matpower/case1888rte/2017-01-27 \
matpower/case1888rte/2017-01-28 \
matpower/case1888rte/2017-01-29 \
matpower/case1888rte/2017-01-30 \
matpower/case1888rte/2017-01-31 \
matpower/case1888rte/2017-02-01 \
matpower/case1888rte/2017-02-02 \
matpower/case1888rte/2017-02-03 \
matpower/case1888rte/2017-02-04 \
matpower/case1888rte/2017-02-05 \
matpower/case1888rte/2017-02-06 \
matpower/case1888rte/2017-02-07 \
matpower/case1888rte/2017-02-08 \
matpower/case1888rte/2017-02-09 \
matpower/case1888rte/2017-02-10 \
matpower/case1888rte/2017-02-11 \
matpower/case1888rte/2017-02-12 \
matpower/case1888rte/2017-02-13 \
matpower/case1888rte/2017-02-14 \
matpower/case1888rte/2017-02-15 \
matpower/case1888rte/2017-02-16 \
matpower/case1888rte/2017-02-17 \
matpower/case1888rte/2017-02-18 \
matpower/case1888rte/2017-02-19 \
matpower/case1888rte/2017-02-20 \
matpower/case1888rte/2017-02-21 \
matpower/case1888rte/2017-02-22 \
matpower/case1888rte/2017-02-23 \
matpower/case1888rte/2017-02-24 \
matpower/case1888rte/2017-02-25 \
matpower/case1888rte/2017-02-26 \
matpower/case1888rte/2017-02-27 \
matpower/case1888rte/2017-02-28 \
matpower/case1888rte/2017-03-01
INSTANCES_INFORMS2 := \
matpower/case3375wp/2017-01-01 \
matpower/case3375wp/2017-01-02 \
matpower/case3375wp/2017-01-03 \
matpower/case3375wp/2017-01-04 \
matpower/case3375wp/2017-01-05 \
matpower/case3375wp/2017-01-06 \
matpower/case3375wp/2017-01-07 \
matpower/case3375wp/2017-01-08 \
matpower/case3375wp/2017-01-09 \
matpower/case3375wp/2017-01-10 \
matpower/case3375wp/2017-01-11 \
matpower/case3375wp/2017-01-12 \
matpower/case3375wp/2017-01-13 \
matpower/case3375wp/2017-01-14 \
matpower/case3375wp/2017-01-15 \
matpower/case3375wp/2017-01-16 \
matpower/case3375wp/2017-01-17 \
matpower/case3375wp/2017-01-18 \
matpower/case3375wp/2017-01-19 \
matpower/case3375wp/2017-01-20 \
matpower/case3375wp/2017-01-21 \
matpower/case3375wp/2017-01-22 \
matpower/case3375wp/2017-01-23 \
matpower/case3375wp/2017-01-24 \
matpower/case3375wp/2017-01-25 \
matpower/case3375wp/2017-01-26 \
matpower/case3375wp/2017-01-27 \
matpower/case3375wp/2017-01-28 \
matpower/case3375wp/2017-01-29 \
matpower/case3375wp/2017-01-30 \
matpower/case3375wp/2017-01-31 \
matpower/case3375wp/2017-02-01 \
matpower/case3375wp/2017-02-02 \
matpower/case3375wp/2017-02-03 \
matpower/case3375wp/2017-02-04 \
matpower/case3375wp/2017-02-05 \
matpower/case3375wp/2017-02-06 \
matpower/case3375wp/2017-02-07 \
matpower/case3375wp/2017-02-08 \
matpower/case3375wp/2017-02-09 \
matpower/case3375wp/2017-02-10 \
matpower/case3375wp/2017-02-11 \
matpower/case3375wp/2017-02-12 \
matpower/case3375wp/2017-02-13 \
matpower/case3375wp/2017-02-14 \
matpower/case3375wp/2017-02-15 \
matpower/case3375wp/2017-02-16 \
matpower/case3375wp/2017-02-17 \
matpower/case3375wp/2017-02-18 \
matpower/case3375wp/2017-02-19 \
matpower/case3375wp/2017-02-20 \
matpower/case3375wp/2017-02-21 \
matpower/case3375wp/2017-02-22 \
matpower/case3375wp/2017-02-23 \
matpower/case3375wp/2017-02-24 \
matpower/case3375wp/2017-02-25 \
matpower/case3375wp/2017-02-26 \
matpower/case3375wp/2017-02-27 \
matpower/case3375wp/2017-02-28 \
matpower/case3375wp/2017-03-01
INSTANCES_INFORMS3 := \
matpower/case6468rte/2017-01-01 \
matpower/case6468rte/2017-01-02 \
matpower/case6468rte/2017-01-03 \
matpower/case6468rte/2017-01-04 \
matpower/case6468rte/2017-01-05 \
matpower/case6468rte/2017-01-06 \
matpower/case6468rte/2017-01-07 \
matpower/case6468rte/2017-01-08 \
matpower/case6468rte/2017-01-09 \
matpower/case6468rte/2017-01-10 \
matpower/case6468rte/2017-01-11 \
matpower/case6468rte/2017-01-12 \
matpower/case6468rte/2017-01-13 \
matpower/case6468rte/2017-01-14 \
matpower/case6468rte/2017-01-15 \
matpower/case6468rte/2017-01-16 \
matpower/case6468rte/2017-01-17 \
matpower/case6468rte/2017-01-18 \
matpower/case6468rte/2017-01-19 \
matpower/case6468rte/2017-01-20 \
matpower/case6468rte/2017-01-21 \
matpower/case6468rte/2017-01-22 \
matpower/case6468rte/2017-01-23 \
matpower/case6468rte/2017-01-24 \
matpower/case6468rte/2017-01-25 \
matpower/case6468rte/2017-01-26 \
matpower/case6468rte/2017-01-27 \
matpower/case6468rte/2017-01-28 \
matpower/case6468rte/2017-01-29 \
matpower/case6468rte/2017-01-30 \
matpower/case6468rte/2017-01-31 \
matpower/case6468rte/2017-02-01 \
matpower/case6468rte/2017-02-02 \
matpower/case6468rte/2017-02-03 \
matpower/case6468rte/2017-02-04 \
matpower/case6468rte/2017-02-05 \
matpower/case6468rte/2017-02-06 \
matpower/case6468rte/2017-02-07 \
matpower/case6468rte/2017-02-08 \
matpower/case6468rte/2017-02-09 \
matpower/case6468rte/2017-02-10 \
matpower/case6468rte/2017-02-11 \
matpower/case6468rte/2017-02-12 \
matpower/case6468rte/2017-02-13 \
matpower/case6468rte/2017-02-14 \
matpower/case6468rte/2017-02-15 \
matpower/case6468rte/2017-02-16 \
matpower/case6468rte/2017-02-17 \
matpower/case6468rte/2017-02-18 \
matpower/case6468rte/2017-02-19 \
matpower/case6468rte/2017-02-20 \
matpower/case6468rte/2017-02-21 \
matpower/case6468rte/2017-02-22 \
matpower/case6468rte/2017-02-23 \
matpower/case6468rte/2017-02-24 \
matpower/case6468rte/2017-02-25 \
matpower/case6468rte/2017-02-26 \
matpower/case6468rte/2017-02-27 \
matpower/case6468rte/2017-02-28 \
matpower/case6468rte/2017-03-01
INSTANCES_TEST := \
test/case14
#SAMPLES := 1 2 3
SAMPLES := 1
SOLUTIONS_MATPOWER := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_MATPOWER))))
SOLUTIONS_PGLIB := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_PGLIB))))
SOLUTIONS_INFORMS1 := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_INFORMS1))))
SOLUTIONS_INFORMS2 := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_INFORMS2))))
SOLUTIONS_INFORMS3 := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_INFORMS3))))
SOLUTIONS_TEST := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_TEST))))
.PHONY: tables save small large clean-mps matpower pglib informs1 informs2 informs3 test pglib orlib
INSTANCES_ORLIB := \ INSTANCES_ORLIB := \
or-lib/20_0_1_w \ or-lib/20_0_1_w \
or-lib/20_0_5_w \ or-lib/20_0_5_w \
@@ -62,13 +265,8 @@ INSTANCES_TEJADA19 := \
tejada19/UC_168h_131g \ tejada19/UC_168h_131g \
tejada19/UC_168h_199g tejada19/UC_168h_199g
SAMPLES := 1 2 3 4 5 SOLUTIONS_ORLIB := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_ORLIB))))
SOLUTIONS_MATPOWER := $(foreach s,$(SAMPLES),$(addprefix results/,$(addsuffix .$(s).sol.json,$(INSTANCES_MATPOWER)))) SOLUTIONS_TEJADA19 := $(foreach s,$(SAMPLES),$(addprefix $(results_dir)/,$(addsuffix .$(s).sol.json,$(INSTANCES_TEJADA19))))
SOLUTIONS_PGLIB := $(foreach s,$(SAMPLES),$(addprefix results/,$(addsuffix .$(s).sol.json,$(INSTANCES_PGLIB))))
SOLUTIONS_ORLIB := $(foreach s,$(SAMPLES),$(addprefix results/,$(addsuffix .$(s).sol.json,$(INSTANCES_ORLIB))))
SOLUTIONS_TEJADA19 := $(foreach s,$(SAMPLES),$(addprefix results/,$(addsuffix .$(s).sol.json,$(INSTANCES_TEJADA19))))
.PHONY: tables save small large clean-mps matpower pglib orlib
all: matpower pglib orlib tejada19 all: matpower pglib orlib tejada19
@@ -76,27 +274,51 @@ matpower: $(SOLUTIONS_MATPOWER)
pglib: $(SOLUTIONS_PGLIB) pglib: $(SOLUTIONS_PGLIB)
informs1: $(SOLUTIONS_INFORMS1)
informs2: $(SOLUTIONS_INFORMS2)
informs3: $(SOLUTIONS_INFORMS3)
test: $(SOLUTIONS_TEST)
orlib: $(SOLUTIONS_ORLIB) orlib: $(SOLUTIONS_ORLIB)
tejada19: $(SOLUTIONS_TEJADA19) tejada19: $(SOLUTIONS_TEJADA19)
clean: clean:
@rm -rf tables/benchmark* tables/compare* results @rm -rf tables/benchmark* tables/compare* $(results_dir)
clean-mps: clean-mps:
@rm -fv results/*/*.mps.gz results/*/*/*.mps.gz @rm -fv $(results_dir)/*/*.mps.gz results/*/*/*.mps.gz
clean-sol: clean-sol:
@rm -rf results/*/*.sol.* results/*/*/*.sol.* @rm -rf $(results_dir)/*/*.sol.* results/*/*/*.sol.*
save: save:
mkdir -p "runs/$(TIMESTAMP)" mkdir -p "runs/$(TIMESTAMP)"
rsync -avP results tables "runs/$(TIMESTAMP)/" rsync -avP $(results_dir) tables "runs/$(TIMESTAMP)/"
results/%.sol.json: run.jl results/%.sol.json: run.jl
@echo "run $*" @echo "run $*"
@mkdir -p $(dir results/$*) @mkdir -p $(dir $(DEST)/$(results_dir)/$*)
@$(JULIA) run.jl $* 2>&1 | cat > results/$*.log @$(JULIA) run.jl $* default $(DEST)/$(results_dir) 2>&1 | cat > $(DEST)/$(results_dir)/$*.log
@echo "run $* [done]"
results_tight/%.sol.json: run.jl
@echo "run $*"
@mkdir -p $(dir $(DEST)/$(results_dir)/$*)
@$(JULIA) run.jl $* tight $(DEST)/$(results_dir) 2>&1 | cat > $(DEST)/$(results_dir)/$*.log
@echo "run $* [done]"
results_default/%.sol.json: run.jl
@echo "run $*"
@mkdir -p $(dir $(DEST)/$(results_dir)/$*)
@$(JULIA) run.jl $* default $(DEST)/$(results_dir) 2>&1 | cat > $(DEST)/$(results_dir)/$*.log
@echo "run $* [done]"
results_sparse/%.sol.json: run.jl
@echo "run $*"
@mkdir -p $(dir $(DEST)/$(results_dir)/$*)
@$(JULIA) run.jl $* sparse $(DEST)/$(results_dir) 2>&1 | cat > $(DEST)/$(results_dir)/$*.log
@echo "run $* [done]" @echo "run $* [done]"
tables: tables:

389
benchmark/Manifest.toml Normal file
View File

@@ -0,0 +1,389 @@
# This file is machine-generated - editing it directly is not advised
[[Artifacts]]
deps = ["Pkg"]
git-tree-sha1 = "c30985d8821e0cd73870b17b0ed0ce6dc44cb744"
uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
version = "1.3.0"
[[Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[BenchmarkTools]]
deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"]
git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260"
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
version = "0.5.0"
[[BinaryProvider]]
deps = ["Libdl", "Logging", "SHA"]
git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058"
uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
version = "0.5.10"
[[Bzip2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "c3598e525718abcc440f69cc6d5f60dda0a1b61e"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.6+5"
[[CEnum]]
git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.4.1"
[[Calculus]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "f641eb0a4f00c343bbc32346e1217b86f3ce9dad"
uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
version = "0.5.1"
[[Cbc]]
deps = ["BinaryProvider", "CEnum", "Cbc_jll", "Libdl", "MathOptInterface", "SparseArrays"]
git-tree-sha1 = "929d0500c50387e7ac7ae9956ca7d7ce5312c90d"
uuid = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
version = "0.7.1"
[[Cbc_jll]]
deps = ["Cgl_jll", "Clp_jll", "CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "16b8ffa56b3ded6b201aa2f50623f260448aa205"
uuid = "38041ee0-ae04-5750-a4d2-bb4d0d83d27d"
version = "2.10.3+4"
[[Cgl_jll]]
deps = ["Clp_jll", "CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "32be20ec1e4c40e5c5d1bbf949ba9918a92a7569"
uuid = "3830e938-1dd0-5f3e-8b8e-b3ee43226782"
version = "0.60.2+5"
[[Clp_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "79263d9383ca89b35f31c33ab5b880536a8413a4"
uuid = "06985876-5285-5a41-9fcb-8948a742cc53"
version = "1.17.6+6"
[[CodecBzip2]]
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.7.2"
[[CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.0"
[[CoinUtils_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "ee1f06ab89337b7f194c29377ab174e752cdf60d"
uuid = "be027038-0da8-5614-b30d-e42594cb92df"
version = "2.11.3+3"
[[CommonSubexpressions]]
deps = ["MacroTools", "Test"]
git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7"
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
version = "0.3.0"
[[CompilerSupportLibraries_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "8e695f735fca77e9708e795eda62afdb869cbb70"
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "0.3.4+0"
[[DataStructures]]
deps = ["InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "88d48e133e6d3dd68183309877eac74393daa7eb"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.17.20"
[[Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[DiffResults]]
deps = ["StaticArrays"]
git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc"
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
version = "1.0.2"
[[DiffRules]]
deps = ["NaNMath", "Random", "SpecialFunctions"]
git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1"
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
version = "1.0.1"
[[Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[DocStringExtensions]]
deps = ["LibGit2", "Markdown", "Pkg", "Test"]
git-tree-sha1 = "50ddf44c53698f5e784bbebb3f4b21c5807401b1"
uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
version = "0.8.3"
[[Documenter]]
deps = ["Base64", "Dates", "DocStringExtensions", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"]
git-tree-sha1 = "fb1ff838470573adc15c71ba79f8d31328f035da"
uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
version = "0.25.2"
[[ForwardDiff]]
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"]
git-tree-sha1 = "1d090099fb82223abc48f7ce176d3f7696ede36d"
uuid = "f6369f11-7733-5829-9624-2563aa707210"
version = "0.10.12"
[[GLPK]]
deps = ["BinaryProvider", "CEnum", "GLPK_jll", "Libdl", "MathOptInterface"]
git-tree-sha1 = "0984f1669480cdecd465458b4abf81b238fbfe50"
uuid = "60bf3e95-4087-53dc-ae20-288a0d20c6a6"
version = "0.14.2"
[[GLPK_jll]]
deps = ["GMP_jll", "Libdl", "Pkg"]
git-tree-sha1 = "ccc855de74292e478d4278e3a6fdd8212f75e81e"
uuid = "e8aa6df9-e6ca-548a-97ff-1f85fc5b8b98"
version = "4.64.0+0"
[[GMP_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "15abc5f976569a1c9d651aff02f7222ef305eb2a"
uuid = "781609d7-10c4-51f6-84f2-b8444358ff6d"
version = "6.1.2+6"
[[GZip]]
deps = ["Libdl"]
git-tree-sha1 = "039be665faf0b8ae36e089cd694233f5dee3f7d6"
uuid = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
version = "0.5.1"
[[Gurobi]]
deps = ["CEnum", "Libdl", "MathOptInterface"]
git-tree-sha1 = "de2015da3bffcf005ef51b78163e81bfb7b2301d"
uuid = "2e9cd046-0924-5485-92f1-d5272153d98b"
version = "0.9.2"
[[HTTP]]
deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"]
git-tree-sha1 = "c7ec02c4c6a039a98a15f955462cd7aea5df4508"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "0.8.19"
[[IniFile]]
deps = ["Test"]
git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8"
uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f"
version = "0.5.0"
[[InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[JLLWrappers]]
git-tree-sha1 = "c70593677bbf2c3ccab4f7500d0f4dacfff7b75c"
uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
version = "1.1.3"
[[JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "b34d7cef7b337321e97d22242c3c2b91f476748e"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.0"
[[JSONSchema]]
deps = ["HTTP", "JSON", "ZipFile"]
git-tree-sha1 = "a9ecdbc90be216912a2e3e8a8e38dc4c93f0d065"
uuid = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
version = "0.3.2"
[[JuMP]]
deps = ["Calculus", "DataStructures", "ForwardDiff", "LinearAlgebra", "MathOptInterface", "MutableArithmetics", "NaNMath", "Random", "SparseArrays", "Statistics"]
git-tree-sha1 = "cbab42e2e912109d27046aa88f02a283a9abac7c"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "0.21.3"
[[LibGit2]]
deps = ["Printf"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[LinearAlgebra]]
deps = ["Libdl"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "6a8a2a625ab0dea913aba95c11370589e0239ff0"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.6"
[[Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[MathOptInterface]]
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "JSON", "JSONSchema", "LinearAlgebra", "MutableArithmetics", "OrderedCollections", "SparseArrays", "Test", "Unicode"]
git-tree-sha1 = "5a1d631e0a9087d425e024d66b9c71e92e78fda8"
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
version = "0.9.17"
[[MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"]
git-tree-sha1 = "1c38e51c3d08ef2278062ebceade0e46cefc96fe"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.0.3"
[[MbedTLS_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "c0b1286883cac4e2b617539de41111e0776d02e8"
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.16.8+0"
[[Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "6cf09794783b9de2e662c4e8b60d743021e338d0"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "0.2.10"
[[NaNMath]]
git-tree-sha1 = "c84c576296d0e2fbb3fc134d3e09086b3ea617cd"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "0.3.4"
[[OpenBLAS32_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "19c33675cdeb572c1b17f96c492459d4f4958036"
uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2"
version = "0.3.10+0"
[[OpenSpecFun_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "9db77584158d0ab52307f8c04f8e7c08ca76b5b3"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.3+4"
[[OrderedCollections]]
git-tree-sha1 = "16c08bf5dba06609fe45e30860092d6fa41fde7b"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.3.1"
[[Osi_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "bd436a97280df40938e66ae8d18e57aceb072856"
uuid = "7da25872-d9ce-5375-a4d3-7a845f58efdd"
version = "0.108.5+3"
[[PackageCompiler]]
deps = ["Libdl", "Pkg", "UUIDs"]
git-tree-sha1 = "3eee77c94646163f15bd8626acf494360897f890"
uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
version = "1.2.3"
[[Parsers]]
deps = ["Dates"]
git-tree-sha1 = "6fa4202675c05ba0f8268a6ddf07606350eda3ce"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "1.0.11"
[[Pkg]]
deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
[[Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[Random]]
deps = ["Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[Requires]]
deps = ["UUIDs"]
git-tree-sha1 = "28faf1c963ca1dc3ec87f166d92982e3c4a1f66d"
uuid = "ae029012-a4dd-5104-9daa-d747884805df"
version = "1.1.0"
[[SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
[[Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[SparseArrays]]
deps = ["LinearAlgebra", "Random"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[SpecialFunctions]]
deps = ["OpenSpecFun_jll"]
git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "0.10.3"
[[StaticArrays]]
deps = ["LinearAlgebra", "Random", "Statistics"]
git-tree-sha1 = "016d1e1a00fabc556473b07161da3d39726ded35"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "0.12.4"
[[Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
[[Test]]
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[TimerOutputs]]
deps = ["Printf"]
git-tree-sha1 = "f458ca23ff80e46a630922c555d838303e4b9603"
uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
version = "0.5.6"
[[TranscodingStreams]]
deps = ["Random", "Test"]
git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.9.5"
[[UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[UnitCommitment]]
deps = ["Cbc", "DataStructures", "Documenter", "GLPK", "GZip", "Gurobi", "JSON", "JuMP", "LinearAlgebra", "Logging", "MathOptInterface", "OrderedCollections", "PackageCompiler", "Printf", "Requires", "SparseArrays", "Test", "TimerOutputs"]
path = ".."
uuid = "64606440-39ea-11e9-0f29-3303a1d3d877"
version = "2.1.0"
[[ZipFile]]
deps = ["Libdl", "Printf", "Zlib_jll"]
git-tree-sha1 = "c3a5637e27e914a7a445b8d0ad063d701931e9f7"
uuid = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
version = "0.9.3"
[[Zlib_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "320228915c8debb12cb434c59057290f0834dbf6"
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.11+18"

View File

@@ -10,16 +10,48 @@ using Logging
using Printf using Printf
using LinearAlgebra using LinearAlgebra
UnitCommitment._setup_logger()
function main() function main()
basename, suffix = split(ARGS[1], ".") NUM_THREADS = 4
solution_filename = "results/$basename.$suffix.sol.json"
model_filename = "results/$basename.$suffix.mps.gz"
time_limit = 60 * 20 time_limit = 60 * 20
BLAS.set_num_threads(NUM_THREADS)
BLAS.set_num_threads(4) if length(ARGS) >= 2
mode = string("_", ARGS[2])
else
mode = "_default"
end
if length(ARGS) >= 3 && !isempty(strip(ARGS[3]))
results_dir = ARGS[3]
else
results_dir = string("./","results$mode")
end
# Validate mode and set formulation
if mode == "_default"
formulation = UnitCommitment.DefaultFormulation
elseif mode == "_tight"
formulation = UnitCommitment.TightFormulation
elseif mode == "_sparse"
formulation = UnitCommitment.SparseDefaultFormulation
else
error("Unknown formulation requested: ", ARGS[2])
end
# Filename is instance_name.sample_number.sol.gz
# Parse out the instance + sample parts to create output files
basename, suffix = split(ARGS[1], ".") # will not work if suffix part is not present
model_filename_stub = string(results_dir,"/$basename.$suffix")
solution_filename = string("$model_filename_stub.sol.json")
# Choose logging options
logname, logfile = nothing, nothing
#logname = string("$model_filename_stub.out")
if isa(logname, String) && !isempty(logname)
logfile = open(logname, "w")
global_logger(TimeLogger(initial_time = time(), file = logfile))
else
global_logger(TimeLogger(initial_time = time()))
end
total_time = @elapsed begin total_time = @elapsed begin
@info "Reading: $basename" @info "Reading: $basename"
@@ -29,38 +61,44 @@ function main()
@info @sprintf("Read problem in %.2f seconds", time_read) @info @sprintf("Read problem in %.2f seconds", time_read)
time_model = @elapsed begin time_model = @elapsed begin
model = build_model( optimizer=optimizer_with_attributes(Gurobi.Optimizer,
instance = instance, "Threads" => NUM_THREADS,
optimizer = optimizer_with_attributes( "Seed" => rand(1:1000))
Gurobi.Optimizer, model = build_model(instance=instance, optimizer=optimizer, formulation=formulation)
"Threads" => 4,
"Seed" => rand(1:1000),
),
variable_names = true,
)
end end
end
@info "Setting names..."
UnitCommitment.set_variable_names!(model)
model_filename = string(model_filename_stub,".init",".mps.gz")
@info string("Exporting initial model without transmission constraints to ", model_filename)
JuMP.write_to_file(model.mip, model_filename)
total_time += @elapsed begin
@info "Optimizing..." @info "Optimizing..."
BLAS.set_num_threads(1) BLAS.set_num_threads(1)
UnitCommitment.optimize!( UnitCommitment.optimize!(model, time_limit=time_limit, gap_limit=1e-3)
model,
time_limit = time_limit,
gap_limit = 1e-3,
)
end end
@info @sprintf("Total time was %.2f seconds", total_time) @info @sprintf("Total time was %.2f seconds", total_time)
@info "Writing: $solution_filename" @info "Writing: $solution_filename"
solution = UnitCommitment.solution(model) solution = UnitCommitment.get_solution(model)
open(solution_filename, "w") do file open(solution_filename, "w") do file
return JSON.print(file, solution, 2) JSON.print(file, solution, 2)
end end
@info "Verifying solution..." @info "Verifying solution..."
UnitCommitment.validate(instance, solution) UnitCommitment.validate(instance, solution)
@info "Exporting model..." model_filename = string(model_filename_stub,".final",".mps.gz")
return JuMP.write_to_file(model, model_filename) @info string("Exporting final model to ", model_filename)
end JuMP.write_to_file(model.mip, model_filename)
if !isnothing(logfile)
close(logfile)
end
end # main
main() main()

View File

@@ -8,49 +8,41 @@ import seaborn as sns
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import sys import sys
# easy_cutoff = 120 #easy_cutoff = 120
b1 = pd.read_csv(sys.argv[1], index_col=0) b1 = pd.read_csv(sys.argv[1], index_col=0)
b2 = pd.read_csv(sys.argv[2], index_col=0) b2 = pd.read_csv(sys.argv[2], index_col=0)
c1 = b1.groupby(["Group", "Instance", "Sample"])[ c1 = b1.groupby(["Group", "Instance", "Sample"])[["Optimization time (s)", "Primal bound"]].mean()
["Optimization time (s)", "Primal bound"] c2 = b2.groupby(["Group", "Instance", "Sample"])[["Optimization time (s)", "Primal bound"]].mean()
].mean()
c2 = b2.groupby(["Group", "Instance", "Sample"])[
["Optimization time (s)", "Primal bound"]
].mean()
c1.columns = ["A Time (s)", "A Value"] c1.columns = ["A Time (s)", "A Value"]
c2.columns = ["B Time (s)", "B Value"] c2.columns = ["B Time (s)", "B Value"]
merged = pd.concat([c1, c2], axis=1) merged = pd.concat([c1, c2], axis=1)
merged["Speedup"] = merged["A Time (s)"] / merged["B Time (s)"] merged["Speedup"] = merged["A Time (s)"] / merged["B Time (s)"]
merged["Time diff (s)"] = merged["B Time (s)"] - merged["A Time (s)"] merged["Time diff (s)"] = merged["B Time (s)"] - merged["A Time (s)"]
merged["Value diff (%)"] = np.round( merged["Value diff (%)"] = np.round((merged["B Value"] - merged["A Value"]) / merged["A Value"] * 100.0, 5)
(merged["B Value"] - merged["A Value"]) / merged["A Value"] * 100.0, 5
)
merged.loc[merged.loc[:, "B Time (s)"] <= 0, "Speedup"] = float("nan") merged.loc[merged.loc[:, "B Time (s)"] <= 0, "Speedup"] = float("nan")
merged.loc[merged.loc[:, "B Time (s)"] <= 0, "Time diff (s)"] = float("nan") merged.loc[merged.loc[:, "B Time (s)"] <= 0, "Time diff (s)"] = float("nan")
# merged = merged[(merged["A Time (s)"] >= easy_cutoff) | (merged["B Time (s)"] >= easy_cutoff)] #merged = merged[(merged["A Time (s)"] >= easy_cutoff) | (merged["B Time (s)"] >= easy_cutoff)]
merged.reset_index(inplace=True) merged.reset_index(inplace=True)
merged["Name"] = merged["Group"] + "/" + merged["Instance"] merged["Name"] = merged["Group"] + "/" + merged["Instance"]
# merged = merged.sort_values(by="Speedup", ascending=False) #merged = merged.sort_values(by="Speedup", ascending=False)
k = len(merged.groupby("Name")) k = len(merged.groupby("Name"))
plt.figure(figsize=(12, 0.50 * k)) plt.figure(figsize=(12, 0.50 * k))
plt.rcParams["xtick.bottom"] = plt.rcParams["xtick.labelbottom"] = True plt.rcParams['xtick.bottom'] = plt.rcParams['xtick.labelbottom'] = True
plt.rcParams["xtick.top"] = plt.rcParams["xtick.labeltop"] = True plt.rcParams['xtick.top'] = plt.rcParams['xtick.labeltop'] = True
sns.set_style("whitegrid") sns.set_style("whitegrid")
sns.set_palette("Set1") sns.set_palette("Set1")
sns.barplot( sns.barplot(data=merged,
data=merged, x="Speedup",
x="Speedup", y="Name",
y="Name", color="tab:red",
color="tab:red", capsize=0.15,
capsize=0.15, errcolor="k",
errcolor="k", errwidth=1.25)
errwidth=1.25,
)
plt.axvline(1.0, linestyle="--", color="k") plt.axvline(1.0, linestyle="--", color="k")
plt.tight_layout() plt.tight_layout()
@@ -58,18 +50,15 @@ print("Writing tables/compare.png")
plt.savefig("tables/compare.png", dpi=150) plt.savefig("tables/compare.png", dpi=150)
print("Writing tables/compare.csv") print("Writing tables/compare.csv")
merged.loc[ merged.loc[:, ["Group",
:, "Instance",
[ "Sample",
"Group", "A Time (s)",
"Instance", "B Time (s)",
"Sample", "Speedup",
"A Time (s)", "Time diff (s)",
"B Time (s)", "A Value",
"Speedup", "B Value",
"Time diff (s)", "Value diff (%)",
"A Value", ]
"B Value", ].to_csv("tables/compare.csv", index_label="Index")
"Value diff (%)",
],
].to_csv("tables/compare.csv", index_label="Index")

View File

@@ -9,8 +9,8 @@ from tabulate import tabulate
def process_all_log_files(): def process_all_log_files():
pathlist = list(Path(".").glob("results/*/*/*.log")) pathlist = list(Path(".").glob('results/*/*/*.log'))
pathlist += list(Path(".").glob("results/*/*.log")) pathlist += list(Path(".").glob('results/*/*.log'))
rows = [] rows = []
for path in pathlist: for path in pathlist:
if ".ipy" in str(path): if ".ipy" in str(path):
@@ -22,8 +22,8 @@ def process_all_log_files():
df.index = range(len(df)) df.index = range(len(df))
print("Writing tables/benchmark.csv") print("Writing tables/benchmark.csv")
df.to_csv("tables/benchmark.csv", index_label="Index") df.to_csv("tables/benchmark.csv", index_label="Index")
def process(filename): def process(filename):
parts = filename.replace(".log", "").split("/") parts = filename.replace(".log", "").split("/")
group_name = "/".join(parts[1:-1]) group_name = "/".join(parts[1:-1])
@@ -45,74 +45,56 @@ def process(filename):
read_time, model_time, isf_time, total_time = None, None, None, None read_time, model_time, isf_time, total_time = None, None, None, None
cb_calls, cb_time = 0, 0.0 cb_calls, cb_time = 0, 0.0
transmission_count, transmission_time, transmission_calls = 0, 0.0, 0 transmission_count, transmission_time, transmission_calls = 0, 0.0, 0
# m = re.search("case([0-9]*)", instance_name) # m = re.search("case([0-9]*)", instance_name)
# n_buses = int(m.group(1)) # n_buses = int(m.group(1))
n_buses = 0 n_buses = 0
with open(filename) as file: with open(filename) as file:
for line in file.readlines(): for line in file.readlines():
m = re.search( m = re.search(r"Explored ([0-9.e+]*) nodes \(([0-9.e+]*) simplex iterations\) in ([0-9.e+]*) seconds", line)
r"Explored ([0-9.e+]*) nodes \(([0-9.e+]*) simplex iterations\) in ([0-9.e+]*) seconds",
line,
)
if m is not None: if m is not None:
nodes += int(m.group(1)) nodes += int(m.group(1))
simplex_iterations += int(m.group(2)) simplex_iterations += int(m.group(2))
optimize_time += float(m.group(3)) optimize_time += float(m.group(3))
m = re.search( m = re.search(r"Best objective ([0-9.e+]*), best bound ([0-9.e+]*), gap ([0-9.e+]*)\%", line)
r"Best objective ([0-9.e+]*), best bound ([0-9.e+]*), gap ([0-9.e+]*)\%",
line,
)
if m is not None: if m is not None:
primal_bound = float(m.group(1)) primal_bound = float(m.group(1))
dual_bound = float(m.group(2)) dual_bound = float(m.group(2))
gap = round(float(m.group(3)), 3) gap = round(float(m.group(3)), 3)
m = re.search( m = re.search(r"Root relaxation: objective ([0-9.e+]*), ([0-9.e+]*) iterations, ([0-9.e+]*) seconds", line)
r"Root relaxation: objective ([0-9.e+]*), ([0-9.e+]*) iterations, ([0-9.e+]*) seconds",
line,
)
if m is not None: if m is not None:
root_obj = float(m.group(1)) root_obj = float(m.group(1))
root_iterations += int(m.group(2)) root_iterations += int(m.group(2))
root_time += float(m.group(3)) root_time += float(m.group(3))
m = re.search( m = re.search(r"Presolved: ([0-9.e+]*) rows, ([0-9.e+]*) columns, ([0-9.e+]*) nonzeros", line)
r"Presolved: ([0-9.e+]*) rows, ([0-9.e+]*) columns, ([0-9.e+]*) nonzeros",
line,
)
if m is not None: if m is not None:
n_rows_presolved = int(m.group(1)) n_rows_presolved = int(m.group(1))
n_cols_presolved = int(m.group(2)) n_cols_presolved = int(m.group(2))
n_nz_presolved = int(m.group(3)) n_nz_presolved = int(m.group(3))
m = re.search( m = re.search(r"Optimize a model with ([0-9.e+]*) rows, ([0-9.e+]*) columns and ([0-9.e+]*) nonzeros", line)
r"Optimize a model with ([0-9.e+]*) rows, ([0-9.e+]*) columns and ([0-9.e+]*) nonzeros",
line,
)
if m is not None: if m is not None:
n_rows_orig = int(m.group(1)) n_rows_orig = int(m.group(1))
n_cols_orig = int(m.group(2)) n_cols_orig = int(m.group(2))
n_nz_orig = int(m.group(3)) n_nz_orig = int(m.group(3))
m = re.search( m = re.search(r"Variable types: ([0-9.e+]*) continuous, ([0-9.e+]*) integer \(([0-9.e+]*) binary\)", line)
r"Variable types: ([0-9.e+]*) continuous, ([0-9.e+]*) integer \(([0-9.e+]*) binary\)",
line,
)
if m is not None: if m is not None:
n_cont_vars_presolved = int(m.group(1)) n_cont_vars_presolved = int(m.group(1))
n_bin_vars_presolved = int(m.group(3)) n_bin_vars_presolved = int(m.group(3))
m = re.search(r"Read problem in ([0-9.e+]*) seconds", line) m = re.search(r"Read problem in ([0-9.e+]*) seconds", line)
if m is not None: if m is not None:
read_time = float(m.group(1)) read_time = float(m.group(1))
m = re.search(r"Computed ISF in ([0-9.e+]*) seconds", line) m = re.search(r"Computed ISF in ([0-9.e+]*) seconds", line)
if m is not None: if m is not None:
isf_time = float(m.group(1)) isf_time = float(m.group(1))
m = re.search(r"Built model in ([0-9.e+]*) seconds", line) m = re.search(r"Built model in ([0-9.e+]*) seconds", line)
if m is not None: if m is not None:
model_time = float(m.group(1)) model_time = float(m.group(1))
@@ -121,10 +103,7 @@ def process(filename):
if m is not None: if m is not None:
total_time = float(m.group(1)) total_time = float(m.group(1))
m = re.search( m = re.search(r"User-callback calls ([0-9.e+]*), time in user-callback ([0-9.e+]*) sec", line)
r"User-callback calls ([0-9.e+]*), time in user-callback ([0-9.e+]*) sec",
line,
)
if m is not None: if m is not None:
cb_calls = int(m.group(1)) cb_calls = int(m.group(1))
cb_time = float(m.group(2)) cb_time = float(m.group(2))
@@ -137,7 +116,7 @@ def process(filename):
m = re.search(r".*MW overflow", line) m = re.search(r".*MW overflow", line)
if m is not None: if m is not None:
transmission_count += 1 transmission_count += 1
return { return {
"Group": group_name, "Group": group_name,
"Instance": instance_name, "Instance": instance_name,
@@ -171,7 +150,6 @@ def process(filename):
"Transmission screening calls": transmission_calls, "Transmission screening calls": transmission_calls,
} }
def generate_chart(): def generate_chart():
import pandas as pd import pandas as pd
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
@@ -181,9 +159,7 @@ def generate_chart():
files = ["tables/benchmark.csv"] files = ["tables/benchmark.csv"]
for f in files: for f in files:
table = pd.read_csv(f, index_col=0) table = pd.read_csv(f, index_col=0)
table.loc[:, "Instance"] = ( table.loc[:, "Instance"] = table.loc[:,"Group"] + "/" + table.loc[:,"Instance"]
table.loc[:, "Group"] + "/" + table.loc[:, "Instance"]
)
table.loc[:, "Filename"] = f table.loc[:, "Filename"] = f
tables += [table] tables += [table]
benchmark = pd.concat(tables, sort=True) benchmark = pd.concat(tables, sort=True)
@@ -192,18 +168,16 @@ def generate_chart():
plt.figure(figsize=(12, 0.50 * k)) plt.figure(figsize=(12, 0.50 * k))
sns.set_style("whitegrid") sns.set_style("whitegrid")
sns.set_palette("Set1") sns.set_palette("Set1")
sns.barplot( sns.barplot(y="Instance",
y="Instance", x="Total time (s)",
x="Total time (s)", color="tab:red",
color="tab:red", capsize=0.15,
capsize=0.15, errcolor="k",
errcolor="k", errwidth=1.25,
errwidth=1.25, data=benchmark);
data=benchmark,
)
plt.tight_layout() plt.tight_layout()
print("Writing tables/benchmark.png") print("Writing tables/benchmark.png")
plt.savefig("tables/benchmark.png", dpi=150) plt.savefig("tables/benchmark.png", dpi=150);
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1,14 +0,0 @@
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View File

@@ -1,49 +0,0 @@
h1.site-logo {
font-size: 30px !important;
}
h1.site-logo small {
font-size: 20px !important;
}
h1.site-logo {
font-size: 30px !important;
}
h1.site-logo small {
font-size: 20px !important;
}
tbody, thead, pre {
border: 1px solid rgba(0, 0, 0, 0.25);
}
table td, th {
padding: 8px;
}
table p {
margin-bottom: 0;
}
table td code {
white-space: nowrap;
}
table tr,
table th {
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
}
table tr:last-child {
border-bottom: 0;
}
pre {
box-shadow: inherit !important;
background-color: #fff;
}
.text-align\:center {
text-align: center;
}

View File

@@ -1,16 +0,0 @@
project = "UnitCommitment.jl"
copyright = "2020-2021, UChicago Argonne, LLC"
author = ""
release = "0.2"
extensions = ["myst_parser"]
templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
html_theme = "sphinx_book_theme"
html_static_path = ["_static"]
html_css_files = ["custom.css"]
html_theme_options = {
"repository_url": "https://github.com/ANL-CEEESA/UnitCommitment.jl/",
"use_repository_button": True,
"extra_navbar": "",
}
html_title = f"UnitCommitment.jl<br/><small>{release}</small>"

View File

@@ -1,72 +0,0 @@
# UnitCommitment.jl
**UnitCommitment.jl** (UC.jl) is a Julia/JuMP optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and Julia/JuMP implementations of state-of-the-art mixed-integer programming formulations.
### Package Components
* **Data Format:** The package proposes an extensible and fully-documented JSON-based data specification format for SCUC, developed in collaboration with Independent System Operators (ISOs), which describes the most important aspects of the problem. The format supports all the most common generator characteristics (including ramping, piecewise-linear production cost curves and time-dependent startup costs), as well as operating reserves, price-sensitive loads, transmission networks and contingencies.
* **Benchmark Instances:** The package provides a diverse collection of large-scale benchmark instances collected from the literature and extended to make them more challenging and realistic.
* **Model Implementation**: The package provides a Julia/JuMP implementation of state-of-the-art formulations and solution methods for SCUC. Our goal is to keep this implementation up-to-date, as new methods are proposed in the literature.
* **Benchmark Tools:** The package provides automated benchmark scripts to accurately evaluate the performance impact of proposed code changes.
### Authors
* **Alinson Santos Xavier** (Argonne National Laboratory)
* **Feng Qiu** (Argonne National Laboratory)
### Acknowledgments
* We would like to thank **Aleksandr M. Kazachkov** (University of Florida), **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package.
* Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357
* Based upon work supported by the **U.S. Department of Energy Advanced Grid Modeling Program** under Grant DE-OE0000875.
### Citing
If you use UnitCommitment.jl in your research (instances, models or algorithms), we kindly request that you cite the package as follows:
* **Alinson S. Xavier, Feng Qiu**, "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment". Zenodo (2020). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874).
If you use the instances, we additionally request that you cite the original sources, as described in the [instances page](instances.md).
### License
```text
UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment
Copyright © 2020, UChicago Argonne, LLC. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of
conditions and the following disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
```
## Site contents
```{toctree}
---
maxdepth: 2
---
usage.md
format.md
instances.md
model.md
```

View File

@@ -1,196 +0,0 @@
```{sectnum}
---
start: 4
depth: 2
suffix: .
---
```
JuMP Model
==========
In this page, we describe the JuMP optimization model produced by the function `UnitCommitment.build_model`. A detailed understanding of this model is not necessary if you are just interested in using the package to solve some standard unit commitment cases, but it may be useful, for example, if you need to solve a slightly different problem, with additional variables and constraints. The notation in this page generally follows [KnOsWa20].
Decision variables
------------------
### Generators
Name | Symbol | Description | Unit
-----|:--------:|-------------|:------:
`is_on[g,t]` | $u_{g}(t)$ | True if generator `g` is on at time `t`. | Binary
`switch_on[g,t]` | $v_{g}(t)$ | True is generator `g` switches on at time `t`. | Binary
`switch_off[g,t]` | $w_{g}(t)$ | True if generator `g` switches off at time `t`. | Binary
`prod_above[g,t]` |$p'_{g}(t)$ | Amount of power produced by generator `g` above its minimum power output at time `t`. For example, if the minimum power of generator `g` is 100 MW and `g` is producing 115 MW of power at time `t`, then `prod_above[g,t]` equals `15.0`. | MW
`segprod[g,t,k]` | $p^k_g(t)$ | Amount of power from piecewise linear segment `k` produced by generator `g` at time `t`. For example, if cost curve for generator `g` is defined by the points `(100, 1400)`, `(110, 1600)`, `(130, 2200)` and `(135, 2400)`, and if the generator is producing 115 MW of power at time `t`, then `segprod[g,t,:]` equals `[10.0, 5.0, 0.0]`.| MW
`reserve[g,t]` | $r_g(t)$ | Amount of reserves provided by generator `g` at time `t`. | MW
`startup[g,t,s]` | $\delta^s_g(t)$ | True if generator `g` switches on at time `t` incurring start-up costs from start-up category `s`. | Binary
### Buses
Name | Symbol | Description | Unit
-----|:------:|-------------|:------:
`net_injection[b,t]` | $n_b(t)$ | Net injection at bus `b` at time `t`. | MW
`curtail[b,t]` | $s^+_b(t)$ | Amount of load curtailed at bus `b` at time `t` | MW
### Price-sensitive loads
Name | Symbol | Description | Unit
-----|:------:|-------------|:------:
`loads[s,t]` | $d_{s}(t)$ | Amount of power served to price-sensitive load `s` at time `t`. | MW
### Transmission lines
Name | Symbol | Description | Unit
-----|:------:|-------------|:------:
`flow[l,t]` | $f_l(t)$ | Power flow on line `l` at time `t`. | MW
`overflow[l,t]` | $f^+_l(t)$ | Amount of flow above the limit for line `l` at time `t`. | MW
```{warning}
Since transmission and N-1 security constraints are enforced in a lazy way, most of the `flow[l,t]` variables are never added to the model. Accessing `model[:flow][l,t]` without first checking that the variable exists will likely generate an error.
```
Objective function
------------------
$$
\begin{align}
\text{minimize} \;\; &
\sum_{t \in \mathcal{T}}
\sum_{g \in \mathcal{G}}
C^\text{min}_g(t) u_g(t) \\
&
+ \sum_{t \in \mathcal{T}}
\sum_{g \in \mathcal{G}}
\sum_{g \in \mathcal{K}_g}
C^k_g(t) p^k_g(t) \\
&
+ \sum_{t \in \mathcal{T}}
\sum_{g \in \mathcal{G}}
\sum_{s \in \mathcal{S}_g}
C^s_{g}(t) \delta^s_g(t) \\
&
+ \sum_{t \in \mathcal{T}}
\sum_{l \in \mathcal{L}}
C^\text{overflow}_{l}(t) f^+_l(t) \\
&
+ \sum_{t \in \mathcal{T}}
\sum_{b \in \mathcal{B}}
C^\text{curtail}(t) s^+_b(t) \\
&
- \sum_{t \in \mathcal{T}}
\sum_{s \in \mathcal{PS}}
R_{s}(t) d_{s}(t) \\
\end{align}
$$
where
- $\mathcal{B}$ is the set of buses
- $\mathcal{G}$ is the set of generators
- $\mathcal{L}$ is the set of transmission lines
- $\mathcal{PS}$ is the set of price-sensitive loads
- $\mathcal{S}_g$ is the set of start-up categories for generator $g$
- $\mathcal{T}$ is the set of time steps
- $C^\text{curtail}(t)$ is the curtailment penalty (in \$/MW)
- $C^\text{min}_g(t)$ is the cost of keeping generator $g$ on and producing at minimum power during time $t$ (in \$)
- $C^\text{overflow}_{l}(t)$ is the flow limit penalty for line $l$ at time $t$ (in \$/MW)
- $C^k_g(t)$ is the cost for generator $g$ to produce 1 MW of power at time $t$ under piecewise linear segment $k$
- $C^s_{g}(t)$ is the cost of starting up generator $g$ at time $t$ under start-up category $s$ (in \$)
- $R_{s}(t)$ is the revenue obtained from serving price-sensitive load $s$ at time $t$ (in \$/MW)
Constraints
-----------
TODO
Inspecting and modifying the model
----------------------------------
### Accessing decision variables
After building a model using `UnitCommitment.build_model`, it is possible to obtain a reference to the decision variables by calling `model[:varname][index]`. For example, `model[:is_on]["g1",1]` returns a direct reference to the JuMP variable indicating whether generator named "g1" is on at time 1. The script below illustrates how to build a model, solve it and display the solution without using the function `UnitCommitment.solution`.
```julia
using Cbc
using Printf
using JuMP
using UnitCommitment
# Load benchmark instance
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
# Build JuMP model
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# Solve the model
UnitCommitment.optimize!(model)
# Display commitment status
for g in instance.units
for t in 1:instance.time
@printf(
"%-10s %5d %5.1f %5.1f %5.1f\n",
g.name,
t,
value(model[:is_on][g.name, t]),
value(model[:switch_on][g.name, t]),
value(model[:switch_off][g.name, t]),
)
end
end
```
### Modifying the model
Since we now have a direct reference to the JuMP decision variables, it is possible to fix variables, change the coefficients in the objective function, or even add new constraints to the model before solving it. The script below shows how can this be accomplished. For more information on modifying an existing model, [see the JuMP documentation](https://jump.dev/JuMP.jl/stable/manual/variables/).
```julia
using Cbc
using JuMP
using UnitCommitment
# Load benchmark instance
instance = UnitCommitment.read_benchmark("matpower/case118/2017-02-01")
# Construct JuMP model
model = UnitCommitment.build_model(
instance=instance,
optimizer=Cbc.Optimizer,
)
# Fix a decision variable to 1.0
JuMP.fix(
model[:is_on]["g1",1],
1.0,
force=true,
)
# Change the objective function
JuMP.set_objective_coefficient(
model,
model[:switch_on]["g2",1],
1000.0,
)
# Create a new constraint
@constraint(
model,
model[:is_on]["g3",1] + model[:is_on]["g4",1] <= 1,
)
# Solve the model
UnitCommitment.optimize!(model)
```
References
----------
* [KnOsWa20] **Bernard Knueven, James Ostrowski and Jean-Paul Watson.** "On Mixed-Integer Programming Formulations for the Unit Commitment Problem". INFORMS Journal on Computing (2020). [DOI: 10.1287/ijoc.2019.0944](https://doi.org/10.1287/ijoc.2019.0944)

Binary file not shown.

26
mkdocs.yml Normal file
View File

@@ -0,0 +1,26 @@
site_name: UnitCommitment.jl
theme:
name: cinder
hljs_languages:
- julia
copyright: "Copyright © 2020, UChicago Argonne, LLC. All Rights Reserved."
repo_url: https://github.com/ANL-CEEESA/unitcommitment.jl
edit_uri: edit/dev/src/docs/
nav:
- Home: index.md
- Usage: usage.md
- Format: format.md
- Instances: instances.md
plugins:
- search
markdown_extensions:
- admonition
- mdx_math
- fenced_code
extra_javascript:
- https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-AMS-MML_HTMLorMML
- js/mathjax.js
docs_dir: src/docs
site_dir: docs
extra_css:
- "css/custom.css"

182
scripts/instances.txt Normal file
View File

@@ -0,0 +1,182 @@
matpower/case1888rte/2017-01-01
matpower/case1888rte/2017-01-02
matpower/case1888rte/2017-01-03
matpower/case1888rte/2017-01-04
matpower/case1888rte/2017-01-05
matpower/case1888rte/2017-01-06
matpower/case1888rte/2017-01-07
matpower/case1888rte/2017-01-08
matpower/case1888rte/2017-01-09
matpower/case1888rte/2017-01-10
matpower/case1888rte/2017-01-11
matpower/case1888rte/2017-01-12
matpower/case1888rte/2017-01-13
matpower/case1888rte/2017-01-14
matpower/case1888rte/2017-01-15
matpower/case1888rte/2017-01-16
matpower/case1888rte/2017-01-17
matpower/case1888rte/2017-01-18
matpower/case1888rte/2017-01-19
matpower/case1888rte/2017-01-20
matpower/case1888rte/2017-01-21
matpower/case1888rte/2017-01-22
matpower/case1888rte/2017-01-23
matpower/case1888rte/2017-01-24
matpower/case1888rte/2017-01-25
matpower/case1888rte/2017-01-26
matpower/case1888rte/2017-01-27
matpower/case1888rte/2017-01-28
matpower/case1888rte/2017-01-29
matpower/case1888rte/2017-01-30
matpower/case1888rte/2017-01-31
matpower/case1888rte/2017-02-01
matpower/case1888rte/2017-02-02
matpower/case1888rte/2017-02-03
matpower/case1888rte/2017-02-04
matpower/case1888rte/2017-02-05
matpower/case1888rte/2017-02-06
matpower/case1888rte/2017-02-07
matpower/case1888rte/2017-02-08
matpower/case1888rte/2017-02-09
matpower/case1888rte/2017-02-10
matpower/case1888rte/2017-02-11
matpower/case1888rte/2017-02-12
matpower/case1888rte/2017-02-13
matpower/case1888rte/2017-02-14
matpower/case1888rte/2017-02-15
matpower/case1888rte/2017-02-16
matpower/case1888rte/2017-02-17
matpower/case1888rte/2017-02-18
matpower/case1888rte/2017-02-19
matpower/case1888rte/2017-02-20
matpower/case1888rte/2017-02-21
matpower/case1888rte/2017-02-22
matpower/case1888rte/2017-02-23
matpower/case1888rte/2017-02-24
matpower/case1888rte/2017-02-25
matpower/case1888rte/2017-02-26
matpower/case1888rte/2017-02-27
matpower/case1888rte/2017-02-28
matpower/case1888rte/2017-03-01
matpower/case3375wp/2017-01-01
matpower/case3375wp/2017-01-02
matpower/case3375wp/2017-01-03
matpower/case3375wp/2017-01-04
matpower/case3375wp/2017-01-05
matpower/case3375wp/2017-01-06
matpower/case3375wp/2017-01-07
matpower/case3375wp/2017-01-08
matpower/case3375wp/2017-01-09
matpower/case3375wp/2017-01-10
matpower/case3375wp/2017-01-11
matpower/case3375wp/2017-01-12
matpower/case3375wp/2017-01-13
matpower/case3375wp/2017-01-14
matpower/case3375wp/2017-01-15
matpower/case3375wp/2017-01-16
matpower/case3375wp/2017-01-17
matpower/case3375wp/2017-01-18
matpower/case3375wp/2017-01-19
matpower/case3375wp/2017-01-20
matpower/case3375wp/2017-01-21
matpower/case3375wp/2017-01-22
matpower/case3375wp/2017-01-23
matpower/case3375wp/2017-01-24
matpower/case3375wp/2017-01-25
matpower/case3375wp/2017-01-26
matpower/case3375wp/2017-01-27
matpower/case3375wp/2017-01-28
matpower/case3375wp/2017-01-29
matpower/case3375wp/2017-01-30
matpower/case3375wp/2017-01-31
matpower/case3375wp/2017-02-01
matpower/case3375wp/2017-02-02
matpower/case3375wp/2017-02-03
matpower/case3375wp/2017-02-04
matpower/case3375wp/2017-02-05
matpower/case3375wp/2017-02-06
matpower/case3375wp/2017-02-07
matpower/case3375wp/2017-02-08
matpower/case3375wp/2017-02-09
matpower/case3375wp/2017-02-10
matpower/case3375wp/2017-02-11
matpower/case3375wp/2017-02-12
matpower/case3375wp/2017-02-13
matpower/case3375wp/2017-02-14
matpower/case3375wp/2017-02-15
matpower/case3375wp/2017-02-16
matpower/case3375wp/2017-02-17
matpower/case3375wp/2017-02-18
matpower/case3375wp/2017-02-19
matpower/case3375wp/2017-02-20
matpower/case3375wp/2017-02-21
matpower/case3375wp/2017-02-22
matpower/case3375wp/2017-02-23
matpower/case3375wp/2017-02-24
matpower/case3375wp/2017-02-25
matpower/case3375wp/2017-02-26
matpower/case3375wp/2017-02-27
matpower/case3375wp/2017-02-28
matpower/case3375wp/2017-03-01
matpower/case6468rte/2017-01-01
matpower/case6468rte/2017-01-02
matpower/case6468rte/2017-01-03
matpower/case6468rte/2017-01-04
matpower/case6468rte/2017-01-05
matpower/case6468rte/2017-01-06
matpower/case6468rte/2017-01-07
matpower/case6468rte/2017-01-08
matpower/case6468rte/2017-01-09
matpower/case6468rte/2017-01-10
matpower/case6468rte/2017-01-11
matpower/case6468rte/2017-01-12
matpower/case6468rte/2017-01-13
matpower/case6468rte/2017-01-14
matpower/case6468rte/2017-01-15
matpower/case6468rte/2017-01-16
matpower/case6468rte/2017-01-17
matpower/case6468rte/2017-01-18
matpower/case6468rte/2017-01-19
matpower/case6468rte/2017-01-20
matpower/case6468rte/2017-01-21
matpower/case6468rte/2017-01-22
matpower/case6468rte/2017-01-23
matpower/case6468rte/2017-01-24
matpower/case6468rte/2017-01-25
matpower/case6468rte/2017-01-26
matpower/case6468rte/2017-01-27
matpower/case6468rte/2017-01-28
matpower/case6468rte/2017-01-29
matpower/case6468rte/2017-01-30
matpower/case6468rte/2017-01-31
matpower/case6468rte/2017-02-01
matpower/case6468rte/2017-02-02
matpower/case6468rte/2017-02-03
matpower/case6468rte/2017-02-04
matpower/case6468rte/2017-02-05
matpower/case6468rte/2017-02-06
matpower/case6468rte/2017-02-07
matpower/case6468rte/2017-02-08
matpower/case6468rte/2017-02-09
matpower/case6468rte/2017-02-10
matpower/case6468rte/2017-02-11
matpower/case6468rte/2017-02-12
matpower/case6468rte/2017-02-13
matpower/case6468rte/2017-02-14
matpower/case6468rte/2017-02-15
matpower/case6468rte/2017-02-16
matpower/case6468rte/2017-02-17
matpower/case6468rte/2017-02-18
matpower/case6468rte/2017-02-19
matpower/case6468rte/2017-02-20
matpower/case6468rte/2017-02-21
matpower/case6468rte/2017-02-22
matpower/case6468rte/2017-02-23
matpower/case6468rte/2017-02-24
matpower/case6468rte/2017-02-25
matpower/case6468rte/2017-02-26
matpower/case6468rte/2017-02-27
matpower/case6468rte/2017-02-28
matpower/case6468rte/2017-03-01
test/case14

49
scripts/run_batch.sh Normal file
View File

@@ -0,0 +1,49 @@
#!/bin/bash
#SBATCH --array=1-180
#SBATCH --time=02:00:00
#SBATCH --account=def-alodi
#SBATCH --mem-per-cpu=1G
#SBATCH --cpus-per-task=4
#SBATCH --mail-user=aleksandr.kazachkov@polymtl.ca
#SBATCH --mail-type=BEGIN
#SBATCH --mail-type=END
#SBATCH --mail-type=FAIL
#SBATCH --array=182
#SBATCH --time=00:00:30
#SBATCH --mem-per-cpu=500M
#SBATCH --cpus-per-task=1
#SBATCH --time=01:00:00
#SBATCH --mem-per-cpu=1G
#SBATCH --cpus-per-task=4
MODE="tight"
if [ ! -z $1 ]; then
MODE=$1
fi
#CASE_NUM=`printf %03d $SLURM_ARRAY_TASK_ID`
PROJ_DIR="${REPOS_DIR}/UnitCommitment2.jl"
INST=$(sed -n "${SLURM_ARRAY_TASK_ID}p" ${PROJ_DIR}/scripts/instances.txt)
#DEST="${PROJ_DIR}/benchmark"
DEST="${HOME}/scratch/uc"
RESULTS_DIR="${DEST}/results_${MODE}"
NUM_SAMPLES=1
if [ $MODE == "sparse" ] || [ $MODE == "default" ] || [ $MODE == "tight" ]
then
echo "Running task $SLURM_ARRAY_TASK_ID for instance $INST with results sent to ${RESULTS_DIR}"
else
echo "Unrecognized mode: $1. Exiting."
exit
fi
cd ${PROJ_DIR}/benchmark
mkdir -p $(dirname ${RESULTS_DIR}/${INST})
for i in $(seq ${NUM_SAMPLES}); do
FILE=$INST.$i
#echo "Running $FILE at `date` using command julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR} 2&>1 | cat > ${RESULTS_DIR}/${FILE}.log"
#julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR} 2&>1 | cat > ${RESULTS_DIR}/${FILE}.log
echo "Running $FILE at `date` using command julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR} &> ${RESULTS_DIR}/${FILE}.log"
julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR} &> ${RESULTS_DIR}/${FILE}.log
#julia --project=${PROJ_DIR}/benchmark --sysimage=${PROJ_DIR}/build/sysimage.so ${PROJ_DIR}/benchmark/run.jl ${FILE} ${MODE} ${RESULTS_DIR}
done

View File

@@ -3,12 +3,18 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
module UnitCommitment module UnitCommitment
include("log.jl") include("log.jl")
include("instance.jl") include("dotdict.jl")
include("screening.jl") include("instance.jl")
include("model.jl") include("screening.jl")
include("sensitivity.jl") include("components.jl")
include("validate.jl") include("variables.jl")
include("convert.jl") include("constraints.jl")
include("initcond.jl") include("formulation.jl")
#include("model.jl")
include("model2.jl")
include("sensitivity.jl")
include("validate.jl")
include("convert.jl")
include("initcond.jl")
end end

46
src/components.jl Normal file
View File

@@ -0,0 +1,46 @@
##################################################
# Component types
abstract type UCComponentType end
abstract type RequiredConstraints <: UCComponentType end
abstract type SystemConstraints <: UCComponentType end
abstract type GenerationLimits <: UCComponentType end
abstract type PiecewiseProduction <: UCComponentType end
abstract type UpDownTime <: UCComponentType end
abstract type ReserveConstraints <: UCComponentType end
abstract type RampLimits <: UCComponentType end
abstract type StartupCosts <: UCComponentType end
abstract type ShutdownCosts <: UCComponentType end
##################################################
# Components
"""
Generic component of the unit commitment problem.
Elements
===
* `name`: name of the component
* `description`: gives a brief summary of what the component adds
* `type`: reference back to the UCComponentType being modeled
* `vars`: required variables
* `constrs`: constraints that are created by this function
* `add_component`: function to add constraints and update the objective to capture this component
* `params`: extra parameters the component might use
"""
mutable struct UCComponent
"Name of the component."
name::String
"Description of what the component adds."
description::String
"Which part of the unit commitment problem is modeled by this component."
type::Type{<:UCComponentType}
"Variables that are needed for the component (subset of `var_list`)."
vars::Union{Array{Symbol},Nothing}
"Equations that are modified for the component (subset of `constr_list`)."
constrs::Union{Array{Symbol},Nothing}
"Function to add constraints and objective coefficients needed for this component to the model. Signature should be (component, mip, model)."
add_component::Union{Function,Nothing}
"Extra parameters for the component."
params::Any
end # struct UCComponent
export UCComponent

31
src/constraints.jl Normal file
View File

@@ -0,0 +1,31 @@
##################################################
# Constraints
"""
List of constraints that the model will potentially have
"""
constr_list =
[
:startup_choose,
:startup_restrict,
:segprod_limit,
:segprod_limita,
:segprod_limitb,
:prod_above_def,
:prod_limit,
:str_prod_limit,
:binary_link,
:switch_on_off,
:ramp_up,
:ramp_down,
:str_ramp_up,
:str_ramp_down,
:startstop_limit,
:startup_limit,
:shutdown_limit,
:min_uptime,
:min_downtime,
:power_balance,
:net_injection_def,
:min_reserve
]

View File

@@ -4,26 +4,26 @@
using DataStructures, JSON, GZip using DataStructures, JSON, GZip
function _read_json(path::String)::OrderedDict function read_json(path::String)::OrderedDict
if endswith(path, ".gz") if endswith(path, ".gz")
file = GZip.gzopen(path) file = GZip.gzopen(path)
else else
file = open(path) file = open(path)
end end
return JSON.parse(file, dicttype = () -> DefaultOrderedDict(nothing)) return JSON.parse(file, dicttype=()->DefaultOrderedDict(nothing))
end end
function _read_egret_solution(path::String)::OrderedDict function read_egret_solution(path::String)::OrderedDict
egret = _read_json(path) egret = read_json(path)
T = length(egret["system"]["time_keys"]) T = length(egret["system"]["time_keys"])
solution = OrderedDict() solution = OrderedDict()
is_on = solution["Is on"] = OrderedDict() is_on = solution["Is on"] = OrderedDict()
production = solution["Production (MW)"] = OrderedDict() production = solution["Production (MW)"] = OrderedDict()
reserve = solution["Reserve (MW)"] = OrderedDict() reserve = solution["Reserve (MW)"] = OrderedDict()
production_cost = solution["Production cost (\$)"] = OrderedDict() production_cost = solution["Production cost (\$)"] = OrderedDict()
startup_cost = solution["Startup cost (\$)"] = OrderedDict() startup_cost = solution["Startup cost (\$)"] = OrderedDict()
for (gen_name, gen_dict) in egret["elements"]["generator"] for (gen_name, gen_dict) in egret["elements"]["generator"]
if endswith(gen_name, "_T") || endswith(gen_name, "_R") if endswith(gen_name, "_T") || endswith(gen_name, "_R")
gen_name = gen_name[1:end-2] gen_name = gen_name[1:end-2]
@@ -39,18 +39,18 @@ function _read_egret_solution(path::String)::OrderedDict
else else
reserve[gen_name] = zeros(T) reserve[gen_name] = zeros(T)
end end
startup_cost[gen_name] = zeros(T) startup_cost[gen_name] = zeros(T)
production_cost[gen_name] = zeros(T) production_cost[gen_name] = zeros(T)
if "commitment_cost" in keys(gen_dict) if "commitment_cost" in keys(gen_dict)
for t in 1:T for t in 1:T
x = gen_dict["commitment"]["values"][t] x = gen_dict["commitment"]["values"][t]
commitment_cost = gen_dict["commitment_cost"]["values"][t] commitment_cost = gen_dict["commitment_cost"]["values"][t]
prod_above_cost = gen_dict["production_cost"]["values"][t] prod_above_cost = gen_dict["production_cost"]["values"][t]
prod_base_cost = gen_dict["p_cost"]["values"][1][2] * x prod_base_cost = gen_dict["p_cost"]["values"][1][2] * x
startup_cost[gen_name][t] = commitment_cost - prod_base_cost startup_cost[gen_name][t] = commitment_cost - prod_base_cost
production_cost[gen_name][t] = prod_above_cost + prod_base_cost production_cost[gen_name][t] = prod_above_cost + prod_base_cost
end end
end end
end end
return solution return solution
end end

28
src/docs/css/custom.css Normal file
View File

@@ -0,0 +1,28 @@
.navbar-default {
border-bottom: 0px;
background-color: #fff;
box-shadow: 0px 0px 15px rgba(0, 0, 0, 0.2);
}
a, .navbar-default a {
color: #06a !important;
font-weight: normal;
}
.disabled > a {
color: #999 !important;
}
.navbar-default a:hover,
.navbar-default .active,
.active > a {
background-color: #f0f0f0 !important;
}
.icon-bar {
background-color: #666 !important;
}
.navbar-collapse {
border-color: #fff !important;
}

View File

@@ -1,18 +1,7 @@
```{sectnum}
---
start: 2
depth: 2
suffix: .
---
```
Data Format Data Format
=========== ===========
## 1. Input Data Format
Input Data Format
-----------------
Instances are specified by JSON files containing the following main sections: Instances are specified by JSON files containing the following main sections:
@@ -26,28 +15,27 @@ Instances are specified by JSON files containing the following main sections:
Each section is described in detail below. For a complete example, see [case14](https://github.com/ANL-CEEESA/UnitCommitment.jl/tree/dev/instances/matpower/case14). Each section is described in detail below. For a complete example, see [case14](https://github.com/ANL-CEEESA/UnitCommitment.jl/tree/dev/instances/matpower/case14).
### Parameters ### 1.1 Parameters
This section describes system-wide parameters, such as power balance penalties, optimization parameters, such as the length of the planning horizon and the time. This section describes system-wide parameters, such as power balance penalties, and optimization parameters, such as the length of the planning horizon.
| Key | Description | Default | Time series? | Key | Description | Default | Time series?
| :----------------------------- | :------------------------------------------------ | :------: | :------------: | :----------------------------- | :------------------------------------------------ | :------: | :------------:
| `Time horizon (h)` | Length of the planning horizon (in hours). | Required | N | `Time (h)` | Length of the planning horizon (in hours) | Required | N
| `Time step (min)` | Length of each time step (in minutes). Must be a divisor of 60 (e.g. 60, 30, 20, 15, etc). | `60` | N | `Power balance penalty ($/MW)` | Penalty for system-wide shortage or surplus in production (in $/MW). This is charged per time period. For example, if there is a shortage of 1 MW for three time periods, three times this amount will be charged. | `1000.0` | Y
| `Power balance penalty ($/MW)` | Penalty for system-wide shortage or surplus in production (in $/MW). This is charged per time step. For example, if there is a shortage of 1 MW for three time steps, three times this amount will be charged. | `1000.0` | Y
#### Example #### Example
```json ```json
{ {
"Parameters": { "Parameters": {
"Time horizon (h)": 4, "Time (h)": 4,
"Power balance penalty ($/MW)": 1000.0 "Power balance penalty ($/MW)": 1000.0
} }
} }
``` ```
### Buses ### 1.2 Buses
This section describes the characteristics of each bus in the system. This section describes the characteristics of each bus in the system.
@@ -76,40 +64,40 @@ This section describes the characteristics of each bus in the system.
``` ```
### Generators ### 1.3 Generators
This section describes all generators in the system, including thermal units, renewable units and virtual units. This section describes all generators in the system, including thermal units, renewable units and virtual units.
| Key | Description | Default | Time series? | Key | Description | Default | Time series?
| :------------------------ | :------------------------------------------------| ------- | :-----------: | :------------------------ | :------------------------------------------------| ------- | :-----------:
| `Bus` | Identifier of the bus where this generator is located (string). | Required | N | `Bus` | Identifier of the bus where this generator is located (string) | Required | N
| `Production cost curve (MW)` and `Production cost curve ($)` | Parameters describing the piecewise-linear production costs. See below for more details. | Required | Y | `Production cost curve (MW)` and `Production cost curve ($)` | Parameters describing the piecewise-linear production costs. See below for more details. | Required | Y
| `Startup costs ($)` and `Startup delays (h)` | Parameters describing how much it costs to start the generator after it has been shut down for a certain amount of time. If `Startup costs ($)` and `Startup delays (h)` are set to `[300.0, 400.0]` and `[1, 4]`, for example, and the generator is shut down at time `00:00` (h:min), then it costs \$300 to start up the generator at any time between `01:00` and `03:59`, and \$400 to start the generator at time `04:00` or any time after that. The number of startup cost points is unlimited, and may be different for each generator. Startup delays must be strictly increasing and the first entry must equal `Minimum downtime (h)`. | `[0.0]` and `[1]` | N | `Startup costs ($)` and `Startup delays (h)` | Parameters describing how much it costs to start the generator after it has been shut down for a certain amount of time. If `Startup costs ($)` and `Startup delays (h)` are set to `[300.0, 400.0]` and `[1, 4]`, for example, and the generator is shut down at time `t`, then it costs 300 to start up the generator at times `t+1`, `t+2` or `t+3`, and 400 to start the generator at time `t+4` or any time after that. The number of startup cost points is unlimited, and may be different for each generator. Startup delays must be strictly increasing. | `[0.0]` and `[1]` | N
| `Minimum uptime (h)` | Minimum amount of time the generator must stay operational after starting up (in hours). For example, if the generator starts up at time `00:00` (h:min) and `Minimum uptime (h)` is set to 4, then the generator can only shut down at time `04:00`. | `1` | N | `Minimum uptime (h)` | Minimum amount of time the generator must stay operational after starting up (in hours). For example, if the generator starts up at time 1 and `Minimum uptime (h)` is set to 4, then the generator can only shut down at time 5. | `1` | N
| `Minimum downtime (h)` | Minimum amount of time the generator must stay offline after shutting down (in hours). For example, if the generator shuts down at time `00:00` (h:min) and `Minimum downtime (h)` is set to 4, then the generator can only start producing power again at time `04:00`. | `1` | N | `Minimum downtime (h)` | Minimum amount of time the generator must stay offline after shutting down (in hours). For example, if the generator shuts down at time 1 and `Minimum downtime (h)` is set to 4, then the generator can only start producing power again at time 5. | `1` | N
| `Ramp up limit (MW)` | Maximum increase in production from one time step to the next (in MW). For example, if the generator is producing 100 MW at time step 1 and if this parameter is set to 40 MW, then the generator will produce at most 140 MW at time step 2. | `+inf` | N | `Ramp up limit (MW)` | Maximum increase in production from one time period to the next (in MW). For example, if the generator is producing 100 MW at time 1 and if this parameter is set to 40 MW, then the generator will produce at most 140 MW at time 2. | `+inf` | N
| `Ramp down limit (MW)` | Maximum decrease in production from one time step to the next (in MW). For example, if the generator is producing 100 MW at time step 1 and this parameter is set to 40 MW, then the generator will produce at least 60 MW at time step 2. | `+inf` | N | `Ramp down limit (MW)` | Maximum decrease in production from one time period to the next (in MW). For example, if the generator is producing 100 MW at time 1 and this parameter is set to 40 MW, then the generator will produce at least 60 MW at time 2. | `+inf` | N
| `Startup limit (MW)` | Maximum amount of power a generator can produce immediately after starting up (in MW). For example, if `Startup limit (MW)` is set to 100 MW and the unit is off at time step 1, then it may produce at most 100 MW at time step 2.| `+inf` | N | `Startup limit (MW)` | Maximum amount of power a generator can produce immediately after starting up (in MW). | `+inf` | N
| `Shutdown limit (MW)` | Maximum amount of power a generator can produce immediately before shutting down (in MW). Specifically, the generator can only shut down at time step `t+1` if its production at time step `t` is below this limit. | `+inf` | N | `Shutdown limit (MW)` | Maximum amount of power a generator can produce immediately before shutting down (in MW). Specifically, the generator can only shut down at time `t+1` if its production at time `t` is below this limit. | `+inf` | N
| `Initial status (h)` | If set to a positive number, indicates the amount of time (in hours) the generator has been on at the beginning of the simulation, and if set to a negative number, the amount of time the generator has been off. For example, if `Initial status (h)` is `-2`, this means that the generator was off since `-02:00` (h:min). The simulation starts at time `00:00`. If `Initial status (h)` is `3`, this means that the generator was on since `-03:00`. A value of zero is not acceptable. | Required | N | `Initial status (h)` | If set to a positive number, indicates the amount of time the generator has been on at the beginning of the simulation, and if set to a negative number, the amount of time the generator has been off. For example, if `Initial status (h)` is `-2`, this means that the generator was off at simulation time `-2` and `-1`. The simulation starts at time `0`. | Required | N
| `Initial power (MW)` | Amount of power the generator at time step `-1`, immediately before the planning horizon starts. | Required | N | `Initial power (MW)` | Amount of power the generator at time period `-1`, immediately before the planning horizon starts. | Required | N
| `Must run?` | If `true`, the generator should be committed, even if that is not economical (Boolean). | `false` | Y | `Must run?` | If `true`, the generator should be committed, even that is not economical (Boolean). | `false` | Y
| `Provides spinning reserves?` | If `true`, this generator may provide spinning reserves (Boolean). | `true` | Y | `Provides spinning reserves?` | If `true`, this generator may provide spinning reserves (Boolean). | `true` | Y
#### Production costs and limits #### Production costs and limits
Production costs are represented as piecewise-linear curves. Figure 1 shows an example cost curve with three segments, where it costs \$1400, \$1600, \$2200 and \$2400 to generate, respectively, 100, 110, 130 and 135 MW of power. To model this generator, `Production cost curve (MW)` should be set to `[100, 110, 130, 135]`, and `Production cost curve ($)` should be set to `[1400, 1600, 2200, 2400]`. Production costs are represented as piecewise-linear curves. Figure 1 shows an example cost curve with three segments, where it costs 1400, 1600, 2200 and 2400 dollars to generate, respectively, 100, 110, 130 and 135 MW of power. To model this generator, `Production cost curve (MW)` should be set to `[100, 110, 130, 135]`, and `Production cost curve ($)` should be set to `[1400, 1600, 2200, 2400]`.
Note that this curve also specifies the production limits. Specifically, the first point identifies the minimum power output when the unit is operational, while the last point identifies the maximum power output. Note that this curve also specifies the production limits. Specifically, the first point identifies the minimum power output when the unit is operational, while the last point identifies the maximum power output.
<center> <center>
<img src="../_static/cost_curve.png" style="max-width: 500px"/> <img src="../images/cost_curve.png" style="max-width: 500px"/>
<div><b>Figure 1.</b> Piecewise-linear production cost curve.</div> <div><b>Figure 1.</b> Piecewise-linear production cost curve.</div>
<br/> <br/>
</center> </center>
#### Additional remarks: #### Additional remarks:
* For time-dependent production limits or time-dependent production costs, the usage of nested arrays is allowed. For example, if `Production cost curve (MW)` is set to `[5.0, [10.0, 12.0, 15.0, 20.0]]`, then the unit may generate at most 10, 12, 15 and 20 MW of power during time steps 1, 2, 3 and 4, respectively. The minimum output for all time periods is fixed to at 5 MW. * For time-dependent production limits or time-dependent production costs, the usage of nested arrays is allowed. For example, if `Production cost curve (MW)` is set to `[5.0, [10.0, 12.0, 15.0, 20.0]]`, then the unit may generate at most 10, 12, 15 and 20 MW of power during time periods 1, 2, 3 and 4, respectively. The minimum output for all time periods is fixed to at 5 MW.
* There is no limit to the number of piecewise-linear segments, and different generators may have a different number of segments. * There is no limit to the number of piecewise-linear segments, and different generators may have a different number of segments.
* If `Production cost curve (MW)` and `Production cost curve ($)` both contain a single element, then the generator must produce exactly that amount of power when operational. To specify that the generator may produce any amount of power up to a certain limit `P`, the parameter `Production cost curve (MW)` should be set to `[0, P]`. * If `Production cost curve (MW)` and `Production cost curve ($)` both contain a single element, then the generator must produce exactly that amount of power when operational. To specify that the generator may produce any amount of power up to a certain limit `P`, the parameter `Production cost curve (MW)` should be set to `[0, P]`.
* Production cost curves must be convex. * Production cost curves must be convex.
@@ -145,7 +133,7 @@ Note that this curve also specifies the production limits. Specifically, the fir
} }
``` ```
### Price-sensitive loads ### 1.4 Price-sensitive loads
This section describes components in the system which may increase or reduce their energy consumption according to the energy prices. Fixed loads (as described in the `buses` section) are always served, regardless of the price, unless there is significant congestion in the system or insufficient production capacity. Price-sensitive loads, on the other hand, are only served if it is economical to do so. This section describes components in the system which may increase or reduce their energy consumption according to the energy prices. Fixed loads (as described in the `buses` section) are always served, regardless of the price, unless there is significant congestion in the system or insufficient production capacity. Price-sensitive loads, on the other hand, are only served if it is economical to do so.
@@ -169,7 +157,7 @@ This section describes components in the system which may increase or reduce the
} }
``` ```
### Transmission Lines ### 1.5 Transmission Lines
This section describes the characteristics of transmission system, such as its topology and the susceptance of each transmission line. This section describes the characteristics of transmission system, such as its topology and the susceptance of each transmission line.
@@ -179,9 +167,9 @@ This section describes the characteristics of transmission system, such as its t
| `Target bus` | Identifier of the bus where the transmission line reaches. | Required | N | `Target bus` | Identifier of the bus where the transmission line reaches. | Required | N
| `Reactance (ohms)` | Reactance of the transmission line (in ohms). | Required | N | `Reactance (ohms)` | Reactance of the transmission line (in ohms). | Required | N
| `Susceptance (S)` | Susceptance of the transmission line (in siemens). | Required | N | `Susceptance (S)` | Susceptance of the transmission line (in siemens). | Required | N
| `Normal flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in its regular, fully-operational state. | `+inf` | Y | `Normal flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in its regular, fully-operational state. May be `null` is there is no limit. | `+inf` | Y
| `Emergency flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in degraded state (for example, after the failure of another transmission line). | `+inf` | Y | `Emergency flow limit (MW)` | Maximum amount of power (in MW) allowed to flow through the line when the system is in degraded state (for example, after the failure of another transmission line). | `+inf` | Y
| `Flow limit penalty ($/MW)` | Penalty for violating the flow limits of the transmission line (in $/MW). This is charged per time step. For example, if there is a thermal violation of 1 MW for three time steps, then three times this amount will be charged. | `5000.0` | Y | `Flow limit penalty ($/MW)` | Penalty for violating the flow limits of the transmission line (in $/MW). This is charged per time period. For example, if there is a thermal violation of 1 MW for three time periods, three times this amount will be charged. | `5000.0` | Y
#### Example #### Example
@@ -202,7 +190,7 @@ This section describes the characteristics of transmission system, such as its t
``` ```
### Reserves ### 1.6 Reserves
This section describes the hourly amount of operating reserves required. This section describes the hourly amount of operating reserves required.
@@ -226,7 +214,7 @@ This section describes the hourly amount of operating reserves required.
} }
``` ```
### Contingencies ### 1.7 Contingencies
This section describes credible contingency scenarios in the optimization, such as the loss of a transmission line or generator. This section describes credible contingency scenarios in the optimization, such as the loss of a transmission line or generator.
@@ -251,11 +239,11 @@ This section describes credible contingency scenarios in the optimization, such
} }
``` ```
### Additional remarks ### 1.8 Additional remarks
#### Time series parameters #### Time series parameters
Many numerical properties in the JSON file can be specified either as a single floating point number if they are time-independent, or as an array containing exactly `T` elements, if they are time-dependent, where `T` is the number of time steps in the planning horizon. For example, both formats below are valid when `T=3`: Many numerical properties in the JSON file can be specified either as a single floating point number if they are time-independent, or as an array containing exactly `T` elements, where `T` is the length of the planning horizon, if they are time-dependent. For example, both formats below are valid when `T=3`:
```json ```json
{ {
@@ -264,29 +252,13 @@ Many numerical properties in the JSON file can be specified either as a single f
} }
``` ```
The value `T` depends on both `Time horizon (h)` and `Time step (min)`, as the table below illustrates. #### Current limitations
Time horizon (h) | Time step (min) | T * All reserves are system-wide (no zonal reserves)
:---------------:|:---------------:|:----:
24 | 60 | 24
24 | 15 | 96
24 | 5 | 288
36 | 60 | 36
36 | 15 | 144
36 | 5 | 432
Output Data Format
------------------
The output data format is also JSON-based, but it is not currently documented since we expect it to change significantly in a future version of the package.
Current limitations
-------------------
* All reserves are system-wide. Zonal reserves are not currently supported.
* Network topology remains the same for all time periods * Network topology remains the same for all time periods
* Only N-1 transmission contingencies are supported. Generator contingencies are not currently supported. * Only N-1 transmission contingencies are supported. Generator contingencies are not supported.
* Time-varying minimum production amounts are not currently compatible with ramp/startup/shutdown limits. * Time-varying minimum production amounts are not currently compatible with ramp/startup/shutdown limits.
## 2. Output Data Format
The output data format is also JSON-based, but it is not currently documented since we expect it to change significantly in a future version of the package.

File diff suppressed because one or more lines are too long

View File

Before

Width:  |  Height:  |  Size: 35 KiB

After

Width:  |  Height:  |  Size: 35 KiB

42
src/docs/index.md Normal file
View File

@@ -0,0 +1,42 @@
# UnitCommitment.jl
**UnitCommitment.jl** (UC.jl) is a Julia optimization package for the Security-Constrained Unit Commitment Problem (SCUC), a fundamental optimization problem in power systems used, for example, to clear the day-ahead electricity markets. The package provides benchmark instances for the problem and Julia/JuMP implementations of state-of-the-art mixed-integer programming formulations.
### Package Components
* **Data Format:** The package proposes an extensible and fully-documented JSON-based data specification format for SCUC, developed in collaboration with Independent System Operators (ISOs), which describes the most important aspects of the problem. The format supports all the most common generator characteristics (including ramping, piecewise-linear production cost curves and time-dependent startup costs), as well as operating reserves, price-sensitive loads, transmission networks and contingencies.
* **Benchmark Instances:** The package provides a diverse collection of large-scale benchmark instances collected from the literature and extended to make them more challenging and realistic.
* **Model Implementation**: The package provides a Julia/JuMP implementation of state-of-the-art formulations and solution methods for SCUC. Our goal is to keep this implementation up-to-date, as new methods are proposed in the literature.
* **Benchmark Tools:** The package provides automated benchmark scripts to accurately evaluate the performance impact of proposed code changes.
### Documentation
* [Usage](usage.md)
* [Data Format](format.md)
* [Instances](instances.md)
### Source code
* [https://github.com/ANL-CEEESA/unitcommitment.jl](https://github.com/ANL-CEEESA/unitcommitment.jl)
### Authors
* **Alinson Santos Xavier** (Argonne National Laboratory)
* **Feng Qiu** (Argonne National Laboratory)
### Acknowledgments
* We would like to thank **Aleksandr M. Kazachkov** (University of Florida), **Yonghong Chen** (Midcontinent Independent System Operator), **Feng Pan** (Pacific Northwest National Laboratory) for valuable feedback on early versions of this package.
* Based upon work supported by **Laboratory Directed Research and Development** (LDRD) funding from Argonne National Laboratory, provided by the Director, Office of Science, of the U.S. Department of Energy under Contract No. DE-AC02-06CH11357.
### Citing
If you use UnitCommitment.jl in your research, we request that you cite the package as follows:
* Alinson S. Xavier, Feng Qiu, "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment". Zenodo (2020). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874).
If you make use of the provided instances files, we request that you additionally cite the original sources, as described in the [instances page](instances.md).
### License
Released under the modified BSD license. See `LICENSE.md` for more details.

View File

@@ -1,13 +1,4 @@
```{sectnum} # Instances
---
start: 3
depth: 2
suffix: .
---
```
Instances
=========
UnitCommitment.jl provides a large collection of benchmark instances collected UnitCommitment.jl provides a large collection of benchmark instances collected
from the literature and converted to a [common data format](format.md). In some cases, as indicated below, the original instances have been extended, with realistic parameters, using data-driven methods. from the literature and converted to a [common data format](format.md). In some cases, as indicated below, the original instances have been extended, with realistic parameters, using data-driven methods.
@@ -16,9 +7,7 @@ If you use these instances in your research, we request that you cite UnitCommit
Raw instances files are [available at our GitHub repository](https://github.com/ANL-CEEESA/UnitCommitment.jl/tree/dev/instances). Benchmark instances can also be loaded with Raw instances files are [available at our GitHub repository](https://github.com/ANL-CEEESA/UnitCommitment.jl/tree/dev/instances). Benchmark instances can also be loaded with
`UnitCommitment.read_benchmark(name)`, as explained in the [usage section](usage.md). `UnitCommitment.read_benchmark(name)`, as explained in the [usage section](usage.md).
## 1. MATPOWER
MATPOWER
--------
[MATPOWER](https://github.com/MATPOWER/matpower) is an open-source package for solving power flow problems in MATLAB and Octave. It contains a number of power flow test cases, which have been widely used in the power systems literature. [MATPOWER](https://github.com/MATPOWER/matpower) is an open-source package for solving power flow problems in MATLAB and Octave. It contains a number of power flow test cases, which have been widely used in the power systems literature.
@@ -36,7 +25,7 @@ Because most MATPOWER test cases were originally designed for power flow studies
For each MATPOWER test case, UC.jl provides two variations (`2017-02-01` and `2017-08-01`) corresponding respectively to a winter and to a summer test case. For each MATPOWER test case, UC.jl provides two variations (`2017-02-01` and `2017-08-01`) corresponding respectively to a winter and to a summer test case.
### MATPOWER/UW-PSTCA ### 1.1 MATPOWER/UW-PSTCA
A variety of smaller IEEE test cases, [compiled by University of Washington](http://labs.ece.uw.edu/pstca/), corresponding mostly to small portions of the American Electric Power System in the 1960s. A variety of smaller IEEE test cases, [compiled by University of Washington](http://labs.ece.uw.edu/pstca/), corresponding mostly to small portions of the American Electric Power System in the 1960s.
@@ -54,7 +43,7 @@ A variety of smaller IEEE test cases, [compiled by University of Washington](htt
| `matpower/case300/2017-08-01` | 300 | 69 | 411 | 320 | [MTPWR, PSTCA] | `matpower/case300/2017-08-01` | 300 | 69 | 411 | 320 | [MTPWR, PSTCA]
### MATPOWER/Polish ### 1.2 MATPOWER/Polish
Test cases based on the Polish 400, 220 and 110 kV networks, originally provided by **Roman Korab** (Politechnika Śląska) and corrected by the MATPOWER team. Test cases based on the Polish 400, 220 and 110 kV networks, originally provided by **Roman Korab** (Politechnika Śląska) and corrected by the MATPOWER team.
@@ -77,7 +66,7 @@ Test cases based on the Polish 400, 220 and 110 kV networks, originally provided
| `matpower/case3375wp/2017-02-01` | 3374 | 590 | 4161 | 3245 | [MTPWR] | `matpower/case3375wp/2017-02-01` | 3374 | 590 | 4161 | 3245 | [MTPWR]
| `matpower/case3375wp/2017-08-01` | 3374 | 590 | 4161 | 3245 | [MTPWR] | `matpower/case3375wp/2017-08-01` | 3374 | 590 | 4161 | 3245 | [MTPWR]
### MATPOWER/PEGASE ### 1.3 MATPOWER/PEGASE
Test cases from the [Pan European Grid Advanced Simulation and State Estimation (PEGASE) project](https://cordis.europa.eu/project/id/211407), describing part of the European high voltage transmission network. Test cases from the [Pan European Grid Advanced Simulation and State Estimation (PEGASE) project](https://cordis.europa.eu/project/id/211407), describing part of the European high voltage transmission network.
@@ -94,7 +83,7 @@ Test cases from the [Pan European Grid Advanced Simulation and State Estimation
| `matpower/case13659pegase/2017-02-01` | 13659 | 4092 | 20467 | 13932 | [JoFlMa16, FlPaCa13, MTPWR] | `matpower/case13659pegase/2017-02-01` | 13659 | 4092 | 20467 | 13932 | [JoFlMa16, FlPaCa13, MTPWR]
| `matpower/case13659pegase/2017-08-01` | 13659 | 4092 | 20467 | 13932 | [JoFlMa16, FlPaCa13, MTPWR] | `matpower/case13659pegase/2017-08-01` | 13659 | 4092 | 20467 | 13932 | [JoFlMa16, FlPaCa13, MTPWR]
### MATPOWER/RTE ### 1.4 MATPOWER/RTE
Test cases from the R&D Division at [Reseau de Transport d'Electricite](https://www.rte-france.com) representing the size and complexity of the French very high voltage transmission network. Test cases from the R&D Division at [Reseau de Transport d'Electricite](https://www.rte-france.com) representing the size and complexity of the French very high voltage transmission network.
@@ -118,12 +107,11 @@ Test cases from the R&D Division at [Reseau de Transport d'Electricite](https://
| `matpower/case6515rte/2017-08-01` | 6515 | 1368 | 9037 | 6063 | [MTPWR, JoFlMa16] | `matpower/case6515rte/2017-08-01` | 6515 | 1368 | 9037 | 6063 | [MTPWR, JoFlMa16]
PGLIB-UC Instances ## 2. PGLIB-UC Instances
------------------
[PGLIB-UC](https://github.com/power-grid-lib/pglib-uc) is a benchmark library curated and maintained by the [IEEE PES Task Force on Benchmarks for Validation of Emerging Power System Algorithms](https://power-grid-lib.github.io/). These test cases have been used in [KnOsWa20]. [PGLIB-UC](https://github.com/power-grid-lib/pglib-uc) is a benchmark library curated and maintained by the [IEEE PES Task Force on Benchmarks for Validation of Emerging Power System Algorithms](https://power-grid-lib.github.io/). These test cases have been used in [KnOsWa20].
### PGLIB-UC/California ### 2.1 PGLIB-UC/California
Test cases based on publicly available data from the California ISO. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc). Test cases based on publicly available data from the California ISO. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
@@ -151,7 +139,7 @@ Test cases based on publicly available data from the California ISO. For more de
| `pglib-uc/ca/Scenario400_reserves_5` | 1 | 611 | 0 | 0 | [KnOsWa20] | `pglib-uc/ca/Scenario400_reserves_5` | 1 | 611 | 0 | 0 | [KnOsWa20]
### PGLIB-UC/FERC ### 2.2 PGLIB-UC/FERC
Test cases based on a publicly available [unit commitment test case produced by the Federal Energy Regulatory Commission](https://www.ferc.gov/industries-data/electric/power-sales-and-markets/increasing-efficiency-through-improved-software-1). For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc). Test cases based on a publicly available [unit commitment test case produced by the Federal Energy Regulatory Commission](https://www.ferc.gov/industries-data/electric/power-sales-and-markets/increasing-efficiency-through-improved-software-1). For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
@@ -183,7 +171,7 @@ Test cases based on a publicly available [unit commitment test case produced by
| `pglib-uc/ferc/2015-12-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12] | `pglib-uc/ferc/2015-12-01_lw` | 1 | 935 | 0 | 0 | [KnOsWa20, KrHiOn12]
### PGLIB-UC/RTS-GMLC ### 2.3 PGLIB-UC/RTS-GMLC
[RTS-GMLC](https://github.com/GridMod/RTS-GMLC) is an updated version of the RTS-96 test system produced by the United States Department of Energy's [Grid Modernization Laboratory Consortium](https://gmlc.doe.gov/). The PGLIB-UC/RTS-GMLC instances are modified versions of the original RTS-GMLC instances, with modified ramp-rates and without a transmission network. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc). [RTS-GMLC](https://github.com/GridMod/RTS-GMLC) is an updated version of the RTS-96 test system produced by the United States Department of Energy's [Grid Modernization Laboratory Consortium](https://gmlc.doe.gov/). The PGLIB-UC/RTS-GMLC instances are modified versions of the original RTS-GMLC instances, with modified ramp-rates and without a transmission network. For more details, see [PGLIB-UC case file overview](https://github.com/power-grid-lib/pglib-uc).
@@ -202,9 +190,7 @@ Test cases based on a publicly available [unit commitment test case produced by
| `pglib-uc/rts_gmlc/2020-11-25` | 1 | 154 | 0 | 0 | [BaBlEh19] | `pglib-uc/rts_gmlc/2020-11-25` | 1 | 154 | 0 | 0 | [BaBlEh19]
| `pglib-uc/rts_gmlc/2020-12-23` | 1 | 154 | 0 | 0 | [BaBlEh19] | `pglib-uc/rts_gmlc/2020-12-23` | 1 | 154 | 0 | 0 | [BaBlEh19]
## 3. OR-LIB/UC
OR-LIB/UC
---------
[OR-LIB](http://people.brunel.ac.uk/~mastjjb/jeb/info.html) is a collection of test data sets for a variety of operations research problems, including unit commitment. The UC instances in OR-LIB are synthetic instances generated by a [random problem generator](http://groups.di.unipi.it/optimize/Data/UC.html) developed by the [Operations Research Group at University of Pisa](http://groups.di.unipi.it/optimize/). These test cases have been used in [FrGe06] and many other publications. [OR-LIB](http://people.brunel.ac.uk/~mastjjb/jeb/info.html) is a collection of test data sets for a variety of operations research problems, including unit commitment. The UC instances in OR-LIB are synthetic instances generated by a [random problem generator](http://groups.di.unipi.it/optimize/Data/UC.html) developed by the [Operations Research Group at University of Pisa](http://groups.di.unipi.it/optimize/). These test cases have been used in [FrGe06] and many other publications.
@@ -253,9 +239,7 @@ OR-LIB/UC
| `or-lib/200_0_8_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] | `or-lib/200_0_8_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
| `or-lib/200_0_9_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06] | `or-lib/200_0_9_w` | 24 | 1 | 200 | 0 | 0 | [ORLIB, FrGe06]
## 4. Tejada19
Tejada19
--------
Test cases used in [TeLuSa19]. These instances are similar to OR-LIB/UC, in the sense that they use the same random problem generator, but are much larger. Test cases used in [TeLuSa19]. These instances are similar to OR-LIB/UC, in the sense that they use the same random problem generator, but are much larger.
@@ -311,9 +295,7 @@ Tejada19
| `tejada19/UC_168h_192g` | 168 | 1 | 192 | 0 | 0 | [TeLuSa19] | `tejada19/UC_168h_192g` | 168 | 1 | 192 | 0 | 0 | [TeLuSa19]
| `tejada19/UC_168h_199g` | 168 | 1 | 199 | 0 | 0 | [TeLuSa19] | `tejada19/UC_168h_199g` | 168 | 1 | 199 | 0 | 0 | [TeLuSa19]
## 5. References
References
----------
* [UCJL] **Alinson S. Xavier, Feng Qiu.** "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment". Zenodo (2020). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874) * [UCJL] **Alinson S. Xavier, Feng Qiu.** "UnitCommitment.jl: A Julia/JuMP Optimization Package for Security-Constrained Unit Commitment". Zenodo (2020). [DOI: 10.5281/zenodo.4269874](https://doi.org/10.5281/zenodo.4269874)

8
src/docs/js/mathjax.js Normal file
View File

@@ -0,0 +1,8 @@
MathJax.Hub.Config({
"tex2jax": { inlineMath: [ [ '$', '$' ] ] }
});
MathJax.Hub.Config({
config: ["MMLorHTML.js"],
jax: ["input/TeX", "output/HTML-CSS", "output/NativeMML"],
extensions: ["MathMenu.js", "MathZoom.js"]
});

View File

@@ -1,21 +1,11 @@
```{sectnum} # Usage
---
start: 1
depth: 2
suffix: .
---
```
Usage ## 1. Installation
=====
Installation UnitCommitment.jl was tested and developed with [Julia 1.5](https://julialang.org/). To install Julia, please follow the [installation guide on the official Julia website](https://julialang.org/downloads/platform.html). To install UnitCommitment.jl, run the Julia interpreter, type `]` to open the package manager, then type:
------------
UnitCommitment.jl was tested and developed with [Julia 1.6](https://julialang.org/). To install Julia, please follow the [installation guide on the official Julia website](https://julialang.org/downloads/platform.html). To install UnitCommitment.jl, run the Julia interpreter, type `]` to open the package manager, then type:
```text ```text
pkg> add UnitCommitment@0.2 pkg> add UnitCommitment
``` ```
To test that the package has been correctly installed, run: To test that the package has been correctly installed, run:
@@ -28,10 +18,9 @@ If all tests pass, the package should now be ready to be used by any Julia scrip
To solve the optimization models, a mixed-integer linear programming (MILP) solver is also required. Please see the [JuMP installation guide](https://jump.dev/JuMP.jl/stable/installation/) for more instructions on installing a solver. Typical open-source choices are [Cbc](https://github.com/JuliaOpt/Cbc.jl) and [GLPK](https://github.com/JuliaOpt/GLPK.jl). In the instructions below, Cbc will be used, but any other MILP solver listed in JuMP installation guide should also be compatible. To solve the optimization models, a mixed-integer linear programming (MILP) solver is also required. Please see the [JuMP installation guide](https://jump.dev/JuMP.jl/stable/installation/) for more instructions on installing a solver. Typical open-source choices are [Cbc](https://github.com/JuliaOpt/Cbc.jl) and [GLPK](https://github.com/JuliaOpt/GLPK.jl). In the instructions below, Cbc will be used, but any other MILP solver listed in JuMP installation guide should also be compatible.
Typical Usage ## 2. Typical Usage
-------------
### Solving user-provided instances ### 2.1 Solving user-provided instances
The first step to use UC.jl is to construct a JSON file describing your unit commitment instance. See the [data format page]() for a complete description of the data format UC.jl expects. The next steps, as shown below, are to read the instance from file, construct the optimization model, run the optimization and extract the optimal solution. The first step to use UC.jl is to construct a JSON file describing your unit commitment instance. See the [data format page]() for a complete description of the data format UC.jl expects. The next steps, as shown below, are to read the instance from file, construct the optimization model, run the optimization and extract the optimal solution.
@@ -44,22 +33,20 @@ using UnitCommitment
instance = UnitCommitment.read("/path/to/input.json") instance = UnitCommitment.read("/path/to/input.json")
# Construct optimization model # Construct optimization model
model = UnitCommitment.build_model( model = UnitCommitment.build_model(instance=instance,
instance=instance, optimizer=Cbc.Optimizer)
optimizer=Cbc.Optimizer,
)
# Solve model # Solve model
UnitCommitment.optimize!(model) UnitCommitment.optimize!(model)
# Extract solution # Extract solution and write it to a file
solution = UnitCommitment.solution(model) solution = UnitCommitment.get_solution(model)
open("/path/to/output.json", "w") do file
# Write solution to a file JSON.print(file, solution, 2)
UnitCommitment.write("/path/to/output.json", solution) end
``` ```
### Solving benchmark instances ### 2.2 Solving benchmark instances
As described in the [Instances page](instances.md), UnitCommitment.jl contains a number of benchmark instances collected from the literature. To solve one of these instances individually, instead of constructing your own, the function `read_benchmark` can be used: As described in the [Instances page](instances.md), UnitCommitment.jl contains a number of benchmark instances collected from the literature. To solve one of these instances individually, instead of constructing your own, the function `read_benchmark` can be used:
@@ -68,15 +55,15 @@ using UnitCommitment
instance = UnitCommitment.read_benchmark("matpower/case3375wp/2017-02-01") instance = UnitCommitment.read_benchmark("matpower/case3375wp/2017-02-01")
``` ```
Advanced usage ## 3. Advanced usage
--------------
### Modifying the formulation ### 3.1 Modifying the formulation
For the time being, the recommended way of modifying the MILP formulation used by UC.jl is to create a local copy of our git repository and directly modify the source code of the package. In a future version, it will be possible to switch between multiple formulations, or to simply add/remove constraints after the model has been generated. For the time being, the recommended way of modifying the MILP formulation used by UC.jl is to create a local copy of our git repository and directly modify the source code of the package. In a future version, it will be possible to switch between multiple formulations, or to simply add/remove constraints after the model has been generated.
### Generating initial conditions ### 3.2 Generating initial conditions
When creating random unit commitment instances for benchmark purposes, it is often hard to compute, in advance, sensible initial conditions for all generators. Setting initial conditions naively (for example, making all generators initially off and producing no power) can easily cause the instance to become infeasible due to excessive ramping. Initial conditions can also make it hard to modify existing instances. For example, increasing the system load without carefully modifying the initial conditions may make the problem infeasible or unrealistically challenging to solve. When creating random unit commitment instances for benchmark purposes, it is often hard to compute, in advance, sensible initial conditions for all generators. Setting initial conditions naively (for example, making all generators initially off and producing no power) can easily cause the instance to become infeasible due to excessive ramping. Initial conditions can also make it hard to modify existing instances. For example, increasing the system load without carefully modifying the initial conditions may make the problem infeasible or unrealistically challenging to solve.
@@ -97,11 +84,10 @@ model = UnitCommitment.build_model(instance, Cbc.Optimizer)
UnitCommitment.optimize!(model) UnitCommitment.optimize!(model)
``` ```
```{warning} !!! warning
The function `generate_initial_conditions!` may return different initial conditions after each call, even if the same instance and the same optimizer is provided. The particular algorithm may also change in a future version of UC.jl. For these reasons, it is recommended that you generate initial conditions exactly once for each instance and store them for later use. The function `generate_initial_conditions!` may return different initial conditions after each call, even if the same instance and the same optimizer is provided. The particular algorithm may also change in a future version of UC.jl. For these reasons, it is recommended that you generate initial conditions exactly once for each instance and store them for later use.
```
### Verifying solutions ### 3.3 Verifying solutions
When developing new formulations, it is very easy to introduce subtle errors in the model that result in incorrect solutions. To help with this, UC.jl includes a utility function that verifies if a given solution is feasible, and, if not, prints all the validation errors it found. The implementation of this function is completely independent from the implementation of the optimization model, and therefore can be used to validate it. The function can also be used to verify solutions produced by other optimization packages, as long as they follow the [UC.jl data format](format.md). When developing new formulations, it is very easy to introduce subtle errors in the model that result in incorrect solutions. To help with this, UC.jl includes a utility function that verifies if a given solution is feasible, and, if not, prints all the validation errors it found. The implementation of this function is completely independent from the implementation of the optimization model, and therefore can be used to validate it. The function can also be used to verify solutions produced by other optimization packages, as long as they follow the [UC.jl data format](format.md).

68
src/dotdict.jl Normal file
View File

@@ -0,0 +1,68 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
struct DotDict
inner::Dict
end
DotDict() = DotDict(Dict())
function Base.setproperty!(d::DotDict, key::Symbol, value)
setindex!(getfield(d, :inner), value, key)
end
function Base.getproperty(d::DotDict, key::Symbol)
(key == :inner ? getfield(d, :inner) : d.inner[key])
end
function Base.getindex(d::DotDict, key::Int64)
d.inner[Symbol(key)]
end
function Base.getindex(d::DotDict, key::Symbol)
d.inner[key]
end
function Base.keys(d::DotDict)
keys(d.inner)
end
function Base.values(d::DotDict)
values(d.inner)
end
function Base.iterate(d::DotDict)
iterate(values(d.inner))
end
function Base.iterate(d::DotDict, v::Int64)
iterate(values(d.inner), v)
end
function Base.length(d::DotDict)
length(values(d.inner))
end
function Base.show(io::IO, d::DotDict)
print(io, "DotDict with $(length(keys(d.inner))) entries:\n")
count = 0
for k in keys(d.inner)
count += 1
if count > 10
print(io, " ...\n")
break
end
print(io, " :$(k) => $(d.inner[k])\n")
end
end
function recursive_to_dot_dict(el)
if typeof(el) == Dict{String, Any}
return DotDict(Dict(Symbol(k) => recursive_to_dot_dict(el[k]) for k in keys(el)))
else
return el
end
end
export recursive_to_dot_dict

1711
src/formulation.jl Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -11,39 +11,39 @@ Generates feasible initial conditions for the given instance, by constructing
and solving a single-period mixed-integer optimization problem, using the given and solving a single-period mixed-integer optimization problem, using the given
optimizer. The instance is modified in-place. optimizer. The instance is modified in-place.
""" """
function generate_initial_conditions!( function generate_initial_conditions!(instance::UnitCommitmentInstance,
instance::UnitCommitmentInstance, optimizer)
optimizer,
)::Nothing
G = instance.units G = instance.units
B = instance.buses B = instance.buses
t = 1 t = 1
mip = JuMP.Model(optimizer) mip = JuMP.Model(optimizer)
# Decision variables # Decision variables
@variable(mip, x[G], Bin) @variable(mip, x[G], Bin)
@variable(mip, p[G] >= 0) @variable(mip, p[G] >= 0)
# Constraint: Minimum power # Constraint: Minimum power
@constraint(mip, min_power[g in G], p[g] >= g.min_power[t] * x[g]) @constraint(mip,
min_power[g in G],
p[g] >= g.min_power[t] * x[g])
# Constraint: Maximum power # Constraint: Maximum power
@constraint(mip, max_power[g in G], p[g] <= g.max_power[t] * x[g]) @constraint(mip,
max_power[g in G],
p[g] <= g.max_power[t] * x[g])
# Constraint: Production equals demand # Constraint: Production equals demand
@constraint( @constraint(mip,
mip, power_balance,
power_balance, sum(b.load[t] for b in B) == sum(p[g] for g in G))
sum(b.load[t] for b in B) == sum(p[g] for g in G)
)
# Constraint: Must run # Constraint: Must run
for g in G for g in G
if g.must_run[t] if g.must_run[t]
@constraint(mip, x[g] == 1) @constraint(mip, x[g] == 1)
end end
end end
# Objective function # Objective function
function cost_slope(g) function cost_slope(g)
mw = g.min_power[t] mw = g.min_power[t]
@@ -58,10 +58,12 @@ function generate_initial_conditions!(
return c / mw return c / mw
end end
end end
@objective(mip, Min, sum(p[g] * cost_slope(g) for g in G)) @objective(mip,
Min,
sum(p[g] * cost_slope(g) for g in G))
JuMP.optimize!(mip) JuMP.optimize!(mip)
for g in G for g in G
if JuMP.value(x[g]) > 0 if JuMP.value(x[g]) > 0
g.initial_power = JuMP.value(p[g]) g.initial_power = JuMP.value(p[g])
@@ -71,5 +73,4 @@ function generate_initial_conditions!(
g.initial_status = -24 g.initial_status = -24
end end
end end
return
end end

View File

@@ -5,35 +5,42 @@
using Printf using Printf
using JSON using JSON
using DataStructures using DataStructures
using GZip
import Base: getindex, time import Base: getindex, time
import GZip
mutable struct Bus abstract type UCElement end
abstract type Time <: UCElement end
mutable struct Bus <: UCElement
name::String name::String
offset::Int offset::Int
load::Vector{Float64} load::Array{Float64}
units::Vector units::Array
price_sensitive_loads::Vector price_sensitive_loads::Array
end end
mutable struct CostSegment
mw::Vector{Float64} mutable struct CostSegment <: UCElement
cost::Vector{Float64} mw::Array{Float64}
cost::Array{Float64}
end end
mutable struct StartupCategory
mutable struct StartupCategory <: UCElement
delay::Int delay::Int
cost::Float64 cost::Float64
end end
mutable struct Unit
mutable struct Unit <: UCElement
name::String name::String
bus::Bus bus::Bus
max_power::Vector{Float64} max_power::Array{Float64}
min_power::Vector{Float64} min_power::Array{Float64}
must_run::Vector{Bool} must_run::Array{Bool}
min_power_cost::Vector{Float64} min_power_cost::Array{Float64}
cost_segments::Vector{CostSegment} cost_segments::Array{CostSegment}
min_uptime::Int min_uptime::Int
min_downtime::Int min_downtime::Int
ramp_up_limit::Float64 ramp_up_limit::Float64
@@ -42,148 +49,135 @@ mutable struct Unit
shutdown_limit::Float64 shutdown_limit::Float64
initial_status::Union{Int,Nothing} initial_status::Union{Int,Nothing}
initial_power::Union{Float64,Nothing} initial_power::Union{Float64,Nothing}
provides_spinning_reserves::Vector{Bool} provides_spinning_reserves::Array{Bool}
startup_categories::Vector{StartupCategory} startup_categories::Array{StartupCategory}
end end # Unit
mutable struct TransmissionLine
mutable struct TransmissionLine <: UCElement
name::String name::String
offset::Int offset::Int
source::Bus source::Bus
target::Bus target::Bus
reactance::Float64 reactance::Float64
susceptance::Float64 susceptance::Float64
normal_flow_limit::Vector{Float64} normal_flow_limit::Array{Float64}
emergency_flow_limit::Vector{Float64} emergency_flow_limit::Array{Float64}
flow_limit_penalty::Vector{Float64} flow_limit_penalty::Array{Float64}
end end
mutable struct Reserves
spinning::Vector{Float64} mutable struct Reserves <: UCElement
spinning::Array{Float64}
end end
mutable struct Contingency
mutable struct Contingency <: UCElement
name::String name::String
lines::Vector{TransmissionLine} lines::Array{TransmissionLine}
units::Vector{Unit} units::Array{Unit}
end end
mutable struct PriceSensitiveLoad
mutable struct PriceSensitiveLoad <: UCElement
name::String name::String
bus::Bus bus::Bus
demand::Vector{Float64} demand::Array{Float64}
revenue::Vector{Float64} revenue::Array{Float64}
end end
mutable struct UnitCommitmentInstance mutable struct UnitCommitmentInstance
time::Int time::Int
power_balance_penalty::Vector{Float64} power_balance_penalty::Array{Float64}
units::Vector{Unit} "Penalty for failing to meet reserve requirement."
buses::Vector{Bus} shortfall_penalty::Array{Float64}
lines::Vector{TransmissionLine} units::Array{Unit}
buses::Array{Bus}
lines::Array{TransmissionLine}
reserves::Reserves reserves::Reserves
contingencies::Vector{Contingency} contingencies::Array{Contingency}
price_sensitive_loads::Vector{PriceSensitiveLoad} price_sensitive_loads::Array{PriceSensitiveLoad}
end end
function Base.show(io::IO, instance::UnitCommitmentInstance) function Base.show(io::IO, instance::UnitCommitmentInstance)
print(io, "UnitCommitmentInstance(") print(io, "UnitCommitmentInstance with ")
print(io, "$(length(instance.units)) units, ") print(io, "$(length(instance.units)) units, ")
print(io, "$(length(instance.buses)) buses, ") print(io, "$(length(instance.buses)) buses, ")
print(io, "$(length(instance.lines)) lines, ") print(io, "$(length(instance.lines)) lines, ")
print(io, "$(length(instance.contingencies)) contingencies, ") print(io, "$(length(instance.contingencies)) contingencies, ")
print( print(io, "$(length(instance.price_sensitive_loads)) price sensitive loads")
io,
"$(length(instance.price_sensitive_loads)) price sensitive loads, ",
)
print(io, "$(instance.time) time steps")
print(io, ")")
return
end end
function read_benchmark(name::AbstractString)::UnitCommitmentInstance
function read_benchmark(name::AbstractString) :: UnitCommitmentInstance
basedir = dirname(@__FILE__) basedir = dirname(@__FILE__)
return UnitCommitment.read("$basedir/../instances/$name.json.gz") return UnitCommitment.read("$basedir/../instances/$name.json.gz")
end end
function read(path::AbstractString)::UnitCommitmentInstance function read(path::AbstractString)::UnitCommitmentInstance
if endswith(path, ".gz") if endswith(path, ".gz")
return _read(gzopen(path)) return read(GZip.gzopen(path))
else else
return _read(open(path)) return read(open(path))
end end
end end
function _read(file::IO)::UnitCommitmentInstance
return _from_json(
JSON.parse(file, dicttype = () -> DefaultOrderedDict(nothing)),
)
end
function _from_json(json; repair = true) function read(file::IO)::UnitCommitmentInstance
return from_json(JSON.parse(file, dicttype=()->DefaultOrderedDict(nothing)))
end
function from_json(json; fix=true)
units = Unit[] units = Unit[]
buses = Bus[] buses = Bus[]
contingencies = Contingency[] contingencies = Contingency[]
lines = TransmissionLine[] lines = TransmissionLine[]
loads = PriceSensitiveLoad[] loads = PriceSensitiveLoad[]
T = json["Parameters"]["Time (h)"]
function scalar(x; default = nothing)
x !== nothing || return default name_to_bus = Dict{String, Bus}()
return x name_to_line = Dict{String, TransmissionLine}()
end name_to_unit = Dict{String, Unit}()
time_horizon = json["Parameters"]["Time (h)"] function timeseries(x; default=nothing)
if time_horizon === nothing
time_horizon = json["Parameters"]["Time horizon (h)"]
end
time_horizon !== nothing || error("Missing parameter: Time horizon (h)")
time_step = scalar(json["Parameters"]["Time step (min)"], default = 60)
(60 % time_step == 0) ||
error("Time step $time_step is not a divisor of 60")
time_multiplier = 60 ÷ time_step
T = time_horizon * time_multiplier
name_to_bus = Dict{String,Bus}()
name_to_line = Dict{String,TransmissionLine}()
name_to_unit = Dict{String,Unit}()
function timeseries(x; default = nothing)
x !== nothing || return default x !== nothing || return default
x isa Array || return [x for t in 1:T] x isa Array || return [x for t in 1:T]
return x return x
end end
function scalar(x; default=nothing)
x !== nothing || return default
x
end
# Read parameters # Read parameters
power_balance_penalty = timeseries( power_balance_penalty = timeseries(json["Parameters"]["Power balance penalty (\$/MW)"],
json["Parameters"]["Power balance penalty (\$/MW)"], default=[1000.0 for t in 1:T])
default = [1000.0 for t in 1:T], shortfall_penalty = timeseries(json["Parameters"]["Reserve shortfall penalty (\$/MW)"],
) default=[0. for t in 1:T])
# Read buses # Read buses
for (bus_name, dict) in json["Buses"] for (bus_name, dict) in json["Buses"]
bus = Bus( bus = Bus(bus_name,
bus_name, length(buses),
length(buses), timeseries(dict["Load (MW)"]),
timeseries(dict["Load (MW)"]), Unit[],
Unit[], PriceSensitiveLoad[])
PriceSensitiveLoad[],
)
name_to_bus[bus_name] = bus name_to_bus[bus_name] = bus
push!(buses, bus) push!(buses, bus)
end end
# Read units # Read units
for (unit_name, dict) in json["Generators"] for (unit_name, dict) in json["Generators"]
bus = name_to_bus[dict["Bus"]] bus = name_to_bus[dict["Bus"]]
# Read production cost curve # Read production cost curve
K = length(dict["Production cost curve (MW)"]) K = length(dict["Production cost curve (MW)"])
curve_mw = hcat( curve_mw = hcat([timeseries(dict["Production cost curve (MW)"][k]) for k in 1:K]...)
[timeseries(dict["Production cost curve (MW)"][k]) for k in 1:K]..., curve_cost = hcat([timeseries(dict["Production cost curve (\$)"][k]) for k in 1:K]...)
)
curve_cost = hcat(
[timeseries(dict["Production cost curve (\$)"][k]) for k in 1:K]...,
)
min_power = curve_mw[:, 1] min_power = curve_mw[:, 1]
max_power = curve_mw[:, K] max_power = curve_mw[:, K]
min_power_cost = curve_cost[:, 1] min_power_cost = curve_cost[:, 1]
@@ -191,152 +185,128 @@ function _from_json(json; repair = true)
for k in 2:K for k in 2:K
amount = curve_mw[:, k] - curve_mw[:, k-1] amount = curve_mw[:, k] - curve_mw[:, k-1]
cost = (curve_cost[:, k] - curve_cost[:, k-1]) ./ amount cost = (curve_cost[:, k] - curve_cost[:, k-1]) ./ amount
replace!(cost, NaN => 0.0) replace!(cost, NaN=>0.0)
push!(segments, CostSegment(amount, cost)) push!(segments, CostSegment(amount, cost))
end end
# Read startup costs # Read startup costs
startup_delays = scalar(dict["Startup delays (h)"], default = [1]) startup_delays = scalar(dict["Startup delays (h)"], default=[1])
startup_costs = scalar(dict["Startup costs (\$)"], default = [0.0]) startup_costs = scalar(dict["Startup costs (\$)"], default=[0.])
startup_categories = StartupCategory[] startup_categories = StartupCategory[]
for k in 1:length(startup_delays) for k in 1:length(startup_delays)
push!( push!(startup_categories, StartupCategory(startup_delays[k],
startup_categories, startup_costs[k]))
StartupCategory(
startup_delays[k] .* time_multiplier,
startup_costs[k],
),
)
end end
# Read and validate initial conditions # Read and validate initial conditions
initial_power = scalar(dict["Initial power (MW)"], default = nothing) initial_power = scalar(dict["Initial power (MW)"], default=nothing)
initial_status = scalar(dict["Initial status (h)"], default = nothing) initial_status = scalar(dict["Initial status (h)"], default=nothing)
if initial_power === nothing if initial_power === nothing
initial_status === nothing || initial_status === nothing || error("unit $unit_name has initial status but no initial power")
error("unit $unit_name has initial status but no initial power")
else else
initial_status !== nothing || initial_status !== nothing || error("unit $unit_name has initial power but no initial status")
error("unit $unit_name has initial power but no initial status") initial_status != 0 || error("unit $unit_name has invalid initial status")
initial_status != 0 ||
error("unit $unit_name has invalid initial status")
if initial_status < 0 && initial_power > 1e-3 if initial_status < 0 && initial_power > 1e-3
error("unit $unit_name has invalid initial power") error("unit $unit_name has invalid initial power")
end end
initial_status *= time_multiplier
end end
unit = Unit( unit = Unit(unit_name,
unit_name, bus,
bus, max_power,
max_power, min_power,
min_power, timeseries(dict["Must run?"], default=[false for t in 1:T]),
timeseries(dict["Must run?"], default = [false for t in 1:T]), min_power_cost,
min_power_cost, segments,
segments, scalar(dict["Minimum uptime (h)"], default=1),
scalar(dict["Minimum uptime (h)"], default = 1) * time_multiplier, scalar(dict["Minimum downtime (h)"], default=1),
scalar(dict["Minimum downtime (h)"], default = 1) * time_multiplier, scalar(dict["Ramp up limit (MW)"], default=1e6),
scalar(dict["Ramp up limit (MW)"], default = 1e6), scalar(dict["Ramp down limit (MW)"], default=1e6),
scalar(dict["Ramp down limit (MW)"], default = 1e6), scalar(dict["Startup limit (MW)"], default=1e6),
scalar(dict["Startup limit (MW)"], default = 1e6), scalar(dict["Shutdown limit (MW)"], default=1e6),
scalar(dict["Shutdown limit (MW)"], default = 1e6), initial_status,
initial_status, initial_power,
initial_power, timeseries(dict["Provides spinning reserves?"],
timeseries( default=[true for t in 1:T]),
dict["Provides spinning reserves?"], startup_categories)
default = [true for t in 1:T],
),
startup_categories,
)
push!(bus.units, unit) push!(bus.units, unit)
name_to_unit[unit_name] = unit name_to_unit[unit_name] = unit
push!(units, unit) push!(units, unit)
end end
# Read reserves # Read reserves
reserves = Reserves(zeros(T)) reserves = Reserves(zeros(T))
if "Reserves" in keys(json) if "Reserves" in keys(json)
reserves.spinning = reserves.spinning = timeseries(json["Reserves"]["Spinning (MW)"],
timeseries(json["Reserves"]["Spinning (MW)"], default = zeros(T)) default=zeros(T))
end end
# Read transmission lines # Read transmission lines
if "Transmission lines" in keys(json) if "Transmission lines" in keys(json)
for (line_name, dict) in json["Transmission lines"] for (line_name, dict) in json["Transmission lines"]
line = TransmissionLine( line = TransmissionLine(line_name,
line_name, length(lines) + 1,
length(lines) + 1, name_to_bus[dict["Source bus"]],
name_to_bus[dict["Source bus"]], name_to_bus[dict["Target bus"]],
name_to_bus[dict["Target bus"]], scalar(dict["Reactance (ohms)"]),
scalar(dict["Reactance (ohms)"]), scalar(dict["Susceptance (S)"]),
scalar(dict["Susceptance (S)"]), timeseries(dict["Normal flow limit (MW)"],
timeseries( default=[1e8 for t in 1:T]),
dict["Normal flow limit (MW)"], timeseries(dict["Emergency flow limit (MW)"],
default = [1e8 for t in 1:T], default=[1e8 for t in 1:T]),
), timeseries(dict["Flow limit penalty (\$/MW)"],
timeseries( default=[5000.0 for t in 1:T]))
dict["Emergency flow limit (MW)"],
default = [1e8 for t in 1:T],
),
timeseries(
dict["Flow limit penalty (\$/MW)"],
default = [5000.0 for t in 1:T],
),
)
name_to_line[line_name] = line name_to_line[line_name] = line
push!(lines, line) push!(lines, line)
end end
end end
# Read contingencies # Read contingencies
if "Contingencies" in keys(json) if "Contingencies" in keys(json)
for (cont_name, dict) in json["Contingencies"] for (cont_name, dict) in json["Contingencies"]
affected_units = Unit[] affected_units = Unit[]
affected_lines = TransmissionLine[] affected_lines = TransmissionLine[]
if "Affected lines" in keys(dict) if "Affected lines" in keys(dict)
affected_lines = affected_lines = [name_to_line[l] for l in dict["Affected lines"]]
[name_to_line[l] for l in dict["Affected lines"]]
end end
if "Affected units" in keys(dict) if "Affected units" in keys(dict)
affected_units = affected_units = [name_to_unit[u] for u in dict["Affected units"]]
[name_to_unit[u] for u in dict["Affected units"]]
end end
cont = Contingency(cont_name, affected_lines, affected_units) cont = Contingency(cont_name, affected_lines, affected_units)
push!(contingencies, cont) push!(contingencies, cont)
end end
end end
# Read price-sensitive loads # Read price-sensitive loads
if "Price-sensitive loads" in keys(json) if "Price-sensitive loads" in keys(json)
for (load_name, dict) in json["Price-sensitive loads"] for (load_name, dict) in json["Price-sensitive loads"]
bus = name_to_bus[dict["Bus"]] bus = name_to_bus[dict["Bus"]]
load = PriceSensitiveLoad( load = PriceSensitiveLoad(load_name,
load_name, bus,
bus, timeseries(dict["Demand (MW)"]),
timeseries(dict["Demand (MW)"]), timeseries(dict["Revenue (\$/MW)"]),
timeseries(dict["Revenue (\$/MW)"]), )
)
push!(bus.price_sensitive_loads, load) push!(bus.price_sensitive_loads, load)
push!(loads, load) push!(loads, load)
end end
end end
instance = UnitCommitmentInstance( instance = UnitCommitmentInstance(T,
T, power_balance_penalty,
power_balance_penalty, shortfall_penalty,
units, units,
buses, buses,
lines, lines,
reserves, reserves,
contingencies, contingencies,
loads, loads)
) if fix
if repair UnitCommitment.fix!(instance)
UnitCommitment.repair!(instance)
end end
return instance return instance
end end
""" """
slice(instance, range) slice(instance, range)
@@ -352,10 +322,7 @@ Example
modified = UnitCommitment.slice(instance, 1:2) modified = UnitCommitment.slice(instance, 1:2)
""" """
function slice( function slice(instance::UnitCommitmentInstance, range::UnitRange{Int})::UnitCommitmentInstance
instance::UnitCommitmentInstance,
range::UnitRange{Int},
)::UnitCommitmentInstance
modified = deepcopy(instance) modified = deepcopy(instance)
modified.time = length(range) modified.time = length(range)
modified.power_balance_penalty = modified.power_balance_penalty[range] modified.power_balance_penalty = modified.power_balance_penalty[range]
@@ -386,4 +353,5 @@ function slice(
return modified return modified
end end
export UnitCommitmentInstance export UnitCommitmentInstance

View File

@@ -7,47 +7,36 @@ using Base.CoreLogging, Logging, Printf
struct TimeLogger <: AbstractLogger struct TimeLogger <: AbstractLogger
initial_time::Float64 initial_time::Float64
file::Union{Nothing,IOStream} file::Union{Nothing, IOStream}
screen_log_level::Any screen_log_level
io_log_level::Any io_log_level
end end
function TimeLogger(; function TimeLogger(;
initial_time::Float64, initial_time::Float64,
file::Union{Nothing,IOStream} = nothing, file::Union{Nothing, IOStream} = nothing,
screen_log_level = CoreLogging.Info, screen_log_level = CoreLogging.Info,
io_log_level = CoreLogging.Info, io_log_level = CoreLogging.Info,
)::TimeLogger ) :: TimeLogger
return TimeLogger(initial_time, file, screen_log_level, io_log_level) return TimeLogger(initial_time, file, screen_log_level, io_log_level)
end end
min_enabled_level(logger::TimeLogger) = logger.io_log_level min_enabled_level(logger::TimeLogger) = logger.io_log_level
shouldlog(logger::TimeLogger, level, _module, group, id) = true shouldlog(logger::TimeLogger, level, _module, group, id) = true
function handle_message( function handle_message(logger::TimeLogger,
logger::TimeLogger, level,
level, message,
message, _module,
_module, group,
group, id,
id, filepath,
filepath, line;
line; kwargs...)
kwargs...,
)
elapsed_time = time() - logger.initial_time elapsed_time = time() - logger.initial_time
time_string = @sprintf("[%12.3f] ", elapsed_time) time_string = @sprintf("[%12.3f] ", elapsed_time)
if level >= Logging.Error
color = :light_red
elseif level >= Logging.Warn
color = :light_yellow
else
color = :light_green
end
if level >= logger.screen_log_level if level >= logger.screen_log_level
printstyled(time_string, color = color) print(time_string)
println(message) println(message)
end end
if logger.file !== nothing && level >= logger.io_log_level if logger.file !== nothing && level >= logger.io_log_level
@@ -58,7 +47,4 @@ function handle_message(
end end
end end
function _setup_logger() export TimeLogger
initial_time = time()
return global_logger(TimeLogger(initial_time = initial_time))
end

File diff suppressed because it is too large Load Diff

475
src/model2.jl Normal file
View File

@@ -0,0 +1,475 @@
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
# Writen by Alinson S. Xavier <axavier@anl.gov>
using JuMP, MathOptInterface, DataStructures
import JuMP: value, fix, set_name
# Extend some JuMP functions so that decision variables can be safely replaced by
# (constant) floating point numbers.
function value(x::Float64)
x
end
function fix(x::Float64, v::Float64; force)
abs(x - v) < 1e-6 || error("Value mismatch: $x != $v")
end
function set_name(x::Float64, n::String)
# nop
end
"""
Create a JuMP model using the variables and constraints defined by
the collection of `UCComponent`s in `formulation`.
Parameters
===
* `isf`: injection shift factors
* `lodf`: line outage distribution factors
"""
function build_model(;
filename::Union{String, Nothing}=nothing,
instance::Union{UnitCommitmentInstance, Nothing}=nothing,
isf::Union{Array{Float64,2}, Nothing}=nothing,
lodf::Union{Array{Float64,2}, Nothing}=nothing,
isf_cutoff::Float64=0.005,
lodf_cutoff::Float64=0.001,
optimizer=nothing,
model=nothing,
variable_names::Bool=false,
formulation::Vector{UCComponent} = UnitCommitment.DefaultFormulation,
) :: UnitCommitmentModel2
if (filename == nothing) && (instance == nothing)
error("Either filename or instance must be specified")
end
if filename != nothing
@info "Reading: $(filename)"
time_read = @elapsed begin
instance = UnitCommitment.read(filename)
end
@info @sprintf("Read problem in %.2f seconds", time_read)
end
if length(instance.buses) == 1
isf = zeros(0, 0)
lodf = zeros(0, 0)
else
if isf == nothing
@info "Computing injection shift factors..."
time_isf = @elapsed begin
isf = UnitCommitment.injection_shift_factors(lines=instance.lines,
buses=instance.buses)
end
@info @sprintf("Computed ISF in %.2f seconds", time_isf)
@info "Computing line outage factors..."
time_lodf = @elapsed begin
lodf = UnitCommitment.line_outage_factors(lines=instance.lines,
buses=instance.buses,
isf=isf)
end
@info @sprintf("Computed LODF in %.2f seconds", time_lodf)
@info @sprintf("Applying PTDF and LODF cutoffs (%.5f, %.5f)", isf_cutoff, lodf_cutoff)
isf[abs.(isf) .< isf_cutoff] .= 0
lodf[abs.(lodf) .< lodf_cutoff] .= 0
end
end
@info "Building model..."
time_model = @elapsed begin
if model == nothing
if optimizer == nothing
mip = Model()
else
mip = Model(optimizer)
end
else
mip = model
end
@info "About to build model"
model = UnitCommitmentModel2(mip, # JuMP.Model
DotDict(), # vars
DotDict(), # eqs
DotDict(), # exprs
instance, # UnitCommitmentInstance
isf, # injection shift factors
lodf, # line outage distribution factors
AffExpr(), # obj
formulation, # formulation
)
# Prepare variables
for var in get_required_variables(formulation)
add_variable(mip, model, instance, UnitCommitment.var_list[var])
end # prepare variables
# Prepare constraints
for constr in get_required_constraints(formulation)
add_constraint(mip, model, instance, constr)
end # prepare constraints
# Prepare expressions (in this case, affine expressions that are later used as part of constraints or objective)
# * :startup_cost => contribution to objective of startup costs
for field in [:startup_cost] #[:net_injection]
setproperty!(model.exprs, field, OrderedDict())
end
# Add components to mip
for c in formulation
c.add_component(c, mip, model)
end
# Add objective function
build_obj_function!(model)
end # end timing of building model
@info @sprintf("Built model in %.2f seconds", time_model)
if variable_names
set_variable_names!(model)
end
return model
end # build_model
"""
Add a particular variable to `model.vars`.
"""
function add_variable(mip::JuMP.Model,
model::UnitCommitmentModel2,
instance::UnitCommitmentInstance,
var::UCVariable)
setproperty!(model.vars, var.name, OrderedDict())
x = getproperty(model.vars, var.name)
if !isnothing(var.add_variable)
var.add_variable(var, x, mip, instance)
return
end
# The following is a bit complex-looking, but the idea is ultimately straightforward
# We want to loop over the possible index values for var,
# for every dimension of var (e.g., looping over units and time)
# The OrderedDict `ind_to_field` maps a UCElement to the corresponding field name within a UnitCommitmentInstance
# NB: this can be an array of field names, such as [:x, :y], which means we want to access instance.x.y
# Furthermore, `var` has an array `indices` of UCElement values, describing which index loops over
# So all we want is to extract the _length_ of the corresponding field of `instance`
# We create a Tuple so we can feed it to CartesianIndices
fields = UnitCommitment.ind_to_field(var.indices)
num_indices = UnitCommitment.num_indices(fields)
# There is some really complicated logic below that one day needs to be improved
# (we need to handle nested indices, and this is one way that hopefully works, but it is definitely not intuitive)
loop_primitive = UnitCommitment.loop_over_indices(UnitCommitment.get_indices_tuple(instance, fields))
indices = UnitCommitment.get_indices(loop_primitive) # returns an array of tuples? or a unit range maybe.
for ind in indices
# For each of the indices, check if the field corresponding to that index has a name
# Then we will index the variable by that name instead of the integer
curr_tuple = Tuple(ind)
new_tuple = ()
for i in 1:num_indices
curr_field = UnitCommitment.get_nested_field(instance, fields, i, curr_tuple)
if :name in propertynames(curr_field)
new_tuple = (new_tuple..., curr_field.name)
else
new_tuple = (new_tuple..., curr_tuple[i])
end
end
name = string(var.name, "[")
for (i,val) in enumerate(new_tuple)
name = string(name, val, i < num_indices ? "," : "")
end
name = string(name, "]")
if num_indices == 1
new_tuple = new_tuple[1]
end
x[new_tuple] = @variable(mip,
lower_bound=var.lb,
upper_bound=var.ub,
integer=var.integer,
base_name=name)
end
### DEBUG
#if var.name == :reserve_shortfall
# @show var.name, num_indices, loop_primitive, indices, x
# #@show JuMP.all_variables(mip)
#end
### DEBUG
end # add_variable
"""
Add constraint to `model.eqs` (set of affine expressions represent left-hand side of constraints).
"""
function add_constraint(mip::JuMP.Model,
model::UnitCommitmentModel2,
instance::UnitCommitmentInstance,
constr::Symbol)
setproperty!(model.eqs, constr, OrderedDict())
end # add_constraint
"""
Components of the objective include, summed over time:
* production cost above minimum
* minimum production cost if generator is on
* startup cost
* shutdown cost
* cost of not meeting shortfall
* penalty for not meeting or exceeding load (using curtai variable)
* shutdown cost
"""
function build_obj_function!(model::UnitCommitmentModel2)
@objective(model.mip, Min, model.obj)
end # build_obj_function
function enforce_transmission(;
model::UnitCommitmentModel2,
violation::Violation,
isf::Array{Float64,2},
lodf::Array{Float64,2})::Nothing
instance, mip, vars = model.instance, model.mip, model.vars
limit::Float64 = 0.0
if violation.outage_line == nothing
limit = violation.monitored_line.normal_flow_limit[violation.time]
@info @sprintf(" %8.3f MW overflow in %-5s time %3d (pre-contingency)",
violation.amount,
violation.monitored_line.name,
violation.time)
else
limit = violation.monitored_line.emergency_flow_limit[violation.time]
@info @sprintf(" %8.3f MW overflow in %-5s time %3d (outage: line %s)",
violation.amount,
violation.monitored_line.name,
violation.time,
violation.outage_line.name)
end
fm = violation.monitored_line.name
t = violation.time
flow = @variable(mip, base_name="flow[$fm,$t]")
# |flow| <= limit + overflow
overflow = vars.overflow[violation.monitored_line.name, violation.time]
@constraint(mip, flow <= limit + overflow)
@constraint(mip, -flow <= limit + overflow)
if violation.outage_line == nothing
@constraint(mip, flow == sum(vars.net_injection[b.name, violation.time] *
isf[violation.monitored_line.offset, b.offset]
for b in instance.buses
if b.offset > 0))
else
@constraint(mip, flow == sum(vars.net_injection[b.name, violation.time] * (
isf[violation.monitored_line.offset, b.offset] + (
lodf[violation.monitored_line.offset, violation.outage_line.offset] *
isf[violation.outage_line.offset, b.offset]
)
)
for b in instance.buses
if b.offset > 0))
end
nothing
end # enforce_transmission
function set_variable_names!(model::UnitCommitmentModel2)
@info "Setting variable and constraint names..."
time_varnames = @elapsed begin
#set_jump_names!(model.vars) # amk: already set
set_jump_names!(model.eqs)
end
@info @sprintf("Set names in %.2f seconds", time_varnames)
end # set_variable_names
function set_jump_names!(dict)
for name in keys(dict)
for idx in keys(dict[name])
idx_str = isa(idx, Tuple) ? join(map(string, idx), ",") : idx
set_name(dict[name][idx], "$name[$idx_str]")
end
end
end # set_jump_names
function get_solution(model::UnitCommitmentModel2)
instance, T = model.instance, model.instance.time
function timeseries(vars, collection)
return OrderedDict(b.name => [round(value(vars[b.name, t]), digits=5) for t in 1:T]
for b in collection)
end
function production_cost(g)
return [value(model.vars.is_on[g.name, t]) * g.min_power_cost[t] +
sum(Float64[value(model.vars.segprod[g.name, k, t]) * g.cost_segments[k].cost[t]
for k in 1:length(g.cost_segments)])
for t in 1:T]
end
function production(g)
return [value(model.vars.is_on[g.name, t]) * g.min_power[t] +
sum(Float64[value(model.vars.segprod[g.name, k, t])
for k in 1:length(g.cost_segments)])
for t in 1:T]
end
function startup_cost(g)
#S = length(g.startup_categories)
#return [sum(g.startup_categories[s].cost * value(model.vars.startup[g.name, s, t])
# for s in 1:S)
# for t in 1:T]
return [ value.(model.exprs.startup_cost[g.name, t]) for t in 1:T ]
end
sol = OrderedDict()
sol["Production (MW)"] = OrderedDict(g.name => production(g) for g in instance.units)
sol["Production cost (\$)"] = OrderedDict(g.name => production_cost(g) for g in instance.units)
sol["Startup cost (\$)"] = OrderedDict(g.name => startup_cost(g) for g in instance.units)
sol["Is on"] = timeseries(model.vars.is_on, instance.units)
sol["Switch on"] = timeseries(model.vars.switch_on, instance.units)
sol["Switch off"] = timeseries(model.vars.switch_off, instance.units)
sol["Reserve (MW)"] = timeseries(model.vars.reserve, instance.units)
sol["Net injection (MW)"] = timeseries(model.vars.net_injection, instance.buses)
sol["Load curtail (MW)"] = timeseries(model.vars.curtail, instance.buses)
if !isempty(instance.lines)
sol["Line overflow (MW)"] = timeseries(model.vars.overflow, instance.lines)
end
if !isempty(instance.price_sensitive_loads)
sol["Price-sensitive loads (MW)"] = timeseries(model.vars.loads, instance.price_sensitive_loads)
end
return sol
end # get_solution
function fix!(model::UnitCommitmentModel2, solution)::Nothing
vars, instance, T = model.vars, model.instance, model.instance.time
for g in instance.units
for t in 1:T
is_on = round(solution["Is on"][g.name][t])
production = round(solution["Production (MW)"][g.name][t], digits=5)
reserve = round(solution["Reserve (MW)"][g.name][t], digits=5)
JuMP.fix(vars.is_on[g.name, t], is_on, force=true)
JuMP.fix(vars.prod_above[g.name, t], production - is_on * g.min_power[t], force=true)
JuMP.fix(vars.reserve[g.name, t], reserve, force=true)
end
end
end # fix!
function set_warm_start!(model::UnitCommitmentModel2, solution)::Nothing
vars, instance, T = model.vars, model.instance, model.instance.time
for g in instance.units
for t in 1:T
JuMP.set_start_value(vars.is_on[g.name, t], solution["Is on"][g.name][t])
JuMP.set_start_value(vars.switch_on[g.name, t], solution["Switch on"][g.name][t])
JuMP.set_start_value(vars.switch_off[g.name, t], solution["Switch off"][g.name][t])
end
end
end # set_warm_start
function optimize!(model::UnitCommitmentModel2;
time_limit=3600,
gap_limit=1e-4,
two_phase_gap=true,
)::Nothing
function set_gap(gap)
try
JuMP.set_optimizer_attribute(model.mip, "MIPGap", gap)
@info @sprintf("MIP gap tolerance set to %f", gap)
catch
@warn "Could not change MIP gap tolerance"
end
end
instance = model.instance
initial_time = time()
large_gap = false
has_transmission = (length(model.isf) > 0)
if has_transmission && two_phase_gap
set_gap(1e-2)
large_gap = true
else
set_gap(gap_limit)
end
while true
time_elapsed = time() - initial_time
time_remaining = time_limit - time_elapsed
if time_remaining < 0
@info "Time limit exceeded"
break
end
@info @sprintf("Setting MILP time limit to %.2f seconds", time_remaining)
JuMP.set_time_limit_sec(model.mip, time_remaining)
@info "Solving MILP..."
JuMP.optimize!(model.mip)
has_transmission || break
violations = find_violations(model)
if isempty(violations)
@info "No violations found"
if large_gap
large_gap = false
set_gap(gap_limit)
else
break
end
else
enforce_transmission(model, violations)
end
end
nothing
end # optimize!
"""
Identify which transmission lines are violated.
See find_violations description from screening.jl.
"""
function find_violations(model::UnitCommitmentModel2)
instance, vars = model.instance, model.vars
length(instance.buses) > 1 || return []
violations = []
@info "Verifying transmission limits..."
time_screening = @elapsed begin
non_slack_buses = [b for b in instance.buses if b.offset > 0]
net_injections = [value(vars.net_injection[b.name, t])
for b in non_slack_buses, t in 1:instance.time]
overflow = [value(vars.overflow[lm.name, t])
for lm in instance.lines, t in 1:instance.time]
violations = UnitCommitment.find_violations(instance=instance,
net_injections=net_injections,
overflow=overflow,
isf=model.isf,
lodf=model.lodf)
end
@info @sprintf("Verified transmission limits in %.2f seconds", time_screening)
return violations
end # find_violations
function enforce_transmission(model::UnitCommitmentModel2, violations::Array{Violation, 1})
for v in violations
enforce_transmission(model=model,
violation=v,
isf=model.isf,
lodf=model.lodf)
end
end # enforce_transmission
export UnitCommitmentModel2, build_model, get_solution, optimize!

View File

@@ -4,44 +4,49 @@
# Copyright (C) 2019 Argonne National Laboratory # Copyright (C) 2019 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov> # Written by Alinson Santos Xavier <axavier@anl.gov>
using DataStructures using DataStructures
using Base.Threads using Base.Threads
struct Violation struct Violation
time::Int time::Int
monitored_line::TransmissionLine monitored_line::TransmissionLine
outage_line::Union{TransmissionLine,Nothing} outage_line::Union{TransmissionLine, Nothing}
amount::Float64 # Violation amount (in MW) amount::Float64 # Violation amount (in MW)
end end
function Violation(; function Violation(;
time::Int, time::Int,
monitored_line::TransmissionLine, monitored_line::TransmissionLine,
outage_line::Union{TransmissionLine,Nothing}, outage_line::Union{TransmissionLine, Nothing},
amount::Float64, amount::Float64,
)::Violation ) :: Violation
return Violation(time, monitored_line, outage_line, amount) return Violation(time, monitored_line, outage_line, amount)
end end
mutable struct ViolationFilter mutable struct ViolationFilter
max_per_line::Int max_per_line::Int
max_total::Int max_total::Int
queues::Dict{Int,PriorityQueue{Violation,Float64}} queues::Dict{Int, PriorityQueue{Violation, Float64}}
end end
function ViolationFilter(; function ViolationFilter(;
max_per_line::Int = 1, max_per_line::Int=1,
max_total::Int = 5, max_total::Int=5,
)::ViolationFilter )::ViolationFilter
return ViolationFilter(max_per_line, max_total, Dict()) return ViolationFilter(max_per_line, max_total, Dict())
end end
function _offer(filter::ViolationFilter, v::Violation)::Nothing
function offer(filter::ViolationFilter, v::Violation)::Nothing
if v.monitored_line.offset keys(filter.queues) if v.monitored_line.offset keys(filter.queues)
filter.queues[v.monitored_line.offset] = filter.queues[v.monitored_line.offset] = PriorityQueue{Violation, Float64}()
PriorityQueue{Violation,Float64}()
end end
q::PriorityQueue{Violation,Float64} = filter.queues[v.monitored_line.offset] q::PriorityQueue{Violation, Float64} = filter.queues[v.monitored_line.offset]
if length(q) < filter.max_per_line if length(q) < filter.max_per_line
enqueue!(q, v => v.amount) enqueue!(q, v => v.amount)
else else
@@ -50,12 +55,13 @@ function _offer(filter::ViolationFilter, v::Violation)::Nothing
enqueue!(q, v => v.amount) enqueue!(q, v => v.amount)
end end
end end
return nothing nothing
end end
function _query(filter::ViolationFilter)::Array{Violation,1}
function query(filter::ViolationFilter)::Array{Violation, 1}
violations = Array{Violation,1}() violations = Array{Violation,1}()
time_queue = PriorityQueue{Violation,Float64}() time_queue = PriorityQueue{Violation, Float64}()
for l in keys(filter.queues) for l in keys(filter.queues)
line_queue = filter.queues[l] line_queue = filter.queues[l]
while length(line_queue) > 0 while length(line_queue) > 0
@@ -76,67 +82,59 @@ function _query(filter::ViolationFilter)::Array{Violation,1}
return violations return violations
end end
""" """
function _find_violations( function find_violations(instance::UnitCommitmentInstance,
instance::UnitCommitmentInstance, net_injections::Array{Float64, 2};
net_injections::Array{Float64, 2}; isf::Array{Float64,2},
isf::Array{Float64,2}, lodf::Array{Float64,2},
lodf::Array{Float64,2}, max_per_line::Int = 1,
max_per_line::Int = 1, max_per_period::Int = 5,
max_per_period::Int = 5, ) :: Array{Violation, 1}
)::Array{Violation, 1}
Find transmission constraint violations (both pre-contingency, as well as Find transmission constraint violations (both pre-contingency, as well as post-contingency).
post-contingency).
The argument `net_injection` should be a (B-1) x T matrix, where B is the The argument `net_injection` should be a (B-1) x T matrix, where B is the number of buses
number of buses and T is the number of time periods. The arguments `isf` and and T is the number of time periods. The arguments `isf` and `lodf` can be computed using
`lodf` can be computed using UnitCommitment.injection_shift_factors and UnitCommitment.injection_shift_factors and UnitCommitment.line_outage_factors.
UnitCommitment.line_outage_factors. The argument `overflow` specifies how much The argument `overflow` specifies how much flow above the transmission limits (in MW) is allowed.
flow above the transmission limits (in MW) is allowed. It should be an L x T It should be an L x T matrix, where L is the number of transmission lines.
matrix, where L is the number of transmission lines.
""" """
function _find_violations(; function find_violations(;
instance::UnitCommitmentInstance, instance::UnitCommitmentInstance,
net_injections::Array{Float64,2}, net_injections::Array{Float64, 2},
overflow::Array{Float64,2}, overflow::Array{Float64, 2},
isf::Array{Float64,2}, isf::Array{Float64,2},
lodf::Array{Float64,2}, lodf::Array{Float64,2},
max_per_line::Int = 1, max_per_line::Int = 1,
max_per_period::Int = 5, max_per_period::Int = 5,
)::Array{Violation,1} )::Array{Violation, 1}
B = length(instance.buses) - 1 B = length(instance.buses) - 1
L = length(instance.lines) L = length(instance.lines)
T = instance.time T = instance.time
K = nthreads() K = nthreads()
size(net_injections) == (B, T) || error("net_injections has incorrect size") size(net_injections) == (B, T) || error("net_injections has incorrect size")
size(isf) == (L, B) || error("isf has incorrect size") size(isf) == (L, B) || error("isf has incorrect size")
size(lodf) == (L, L) || error("lodf has incorrect size") size(lodf) == (L, L) || error("lodf has incorrect size")
filters = Dict( filters = Dict(t => ViolationFilter(max_total=max_per_period,
t => ViolationFilter( max_per_line=max_per_line)
max_total = max_per_period, for t in 1:T)
max_per_line = max_per_line,
) for t in 1:T
)
pre_flow::Array{Float64} = zeros(L, K) # pre_flow[lm, thread] pre_flow::Array{Float64} = zeros(L, K) # pre_flow[lm, thread]
post_flow::Array{Float64} = zeros(L, L, K) # post_flow[lm, lc, thread] post_flow::Array{Float64} = zeros(L, L, K) # post_flow[lm, lc, thread]
pre_v::Array{Float64} = zeros(L, K) # pre_v[lm, thread] pre_v::Array{Float64} = zeros(L, K) # pre_v[lm, thread]
post_v::Array{Float64} = zeros(L, L, K) # post_v[lm, lc, thread] post_v::Array{Float64} = zeros(L, L, K) # post_v[lm, lc, thread]
normal_limits::Array{Float64,2} = [ normal_limits::Array{Float64,2} = [l.normal_flow_limit[t] + overflow[l.offset, t]
l.normal_flow_limit[t] + overflow[l.offset, t] for for l in instance.lines, t in 1:T]
l in instance.lines, t in 1:T
] emergency_limits::Array{Float64,2} = [l.emergency_flow_limit[t] + overflow[l.offset, t]
for l in instance.lines, t in 1:T]
emergency_limits::Array{Float64,2} = [
l.emergency_flow_limit[t] + overflow[l.offset, t] for
l in instance.lines, t in 1:T
]
is_vulnerable::Array{Bool} = zeros(Bool, L) is_vulnerable::Array{Bool} = zeros(Bool, L)
for c in instance.contingencies for c in instance.contingencies
is_vulnerable[c.lines[1].offset] = true is_vulnerable[c.lines[1].offset] = true
@@ -144,69 +142,57 @@ function _find_violations(;
@threads for t in 1:T @threads for t in 1:T
k = threadid() k = threadid()
# Pre-contingency flows # Pre-contingency flows
pre_flow[:, k] = isf * net_injections[:, t] pre_flow[:, k] = isf * net_injections[:, t]
# Post-contingency flows # Post-contingency flows
for lc in 1:L, lm in 1:L for lc in 1:L, lm in 1:L
post_flow[lm, lc, k] = post_flow[lm, lc, k] = pre_flow[lm, k] + pre_flow[lc, k] * lodf[lm, lc]
pre_flow[lm, k] + pre_flow[lc, k] * lodf[lm, lc]
end end
# Pre-contingency violations # Pre-contingency violations
for lm in 1:L for lm in 1:L
pre_v[lm, k] = max( pre_v[lm, k] = max(0.0,
0.0, pre_flow[lm, k] - normal_limits[lm, t],
pre_flow[lm, k] - normal_limits[lm, t], - pre_flow[lm, k] - normal_limits[lm, t])
-pre_flow[lm, k] - normal_limits[lm, t],
)
end end
# Post-contingency violations # Post-contingency violations
for lc in 1:L, lm in 1:L for lc in 1:L, lm in 1:L
post_v[lm, lc, k] = max( post_v[lm, lc, k] = max(0.0,
0.0, post_flow[lm, lc, k] - emergency_limits[lm, t],
post_flow[lm, lc, k] - emergency_limits[lm, t], - post_flow[lm, lc, k] - emergency_limits[lm, t])
-post_flow[lm, lc, k] - emergency_limits[lm, t],
)
end end
# Offer pre-contingency violations # Offer pre-contingency violations
for lm in 1:L for lm in 1:L
if pre_v[lm, k] > 1e-5 if pre_v[lm, k] > 1e-5
_offer( offer(filters[t], Violation(time=t,
filters[t], monitored_line=instance.lines[lm],
Violation( outage_line=nothing,
time = t, amount=pre_v[lm, k]))
monitored_line = instance.lines[lm],
outage_line = nothing,
amount = pre_v[lm, k],
),
)
end end
end end
# Offer post-contingency violations # Offer post-contingency violations
for lm in 1:L, lc in 1:L for lm in 1:L, lc in 1:L
if post_v[lm, lc, k] > 1e-5 && is_vulnerable[lc] if post_v[lm, lc, k] > 1e-5 && is_vulnerable[lc]
_offer( offer(filters[t], Violation(time=t,
filters[t], monitored_line=instance.lines[lm],
Violation( outage_line=instance.lines[lc],
time = t, amount=post_v[lm, lc, k]))
monitored_line = instance.lines[lm],
outage_line = instance.lines[lc],
amount = post_v[lm, lc, k],
),
)
end end
end end
end end
violations = Violation[] violations = Violation[]
for t in 1:instance.time for t in 1:instance.time
append!(violations, _query(filters[t])) append!(violations, query(filters[t]))
end end
return violations return violations
end end
export Violation, ViolationFilter, offer, query, find_violations

View File

@@ -5,38 +5,31 @@
using SparseArrays, Base.Threads, LinearAlgebra, JuMP using SparseArrays, Base.Threads, LinearAlgebra, JuMP
""" """
_injection_shift_factors(; buses, lines) injection_shift_factors(; buses, lines)
Returns a (B-1)xL matrix M, where B is the number of buses and L is the number Returns a (B-1)xL matrix M, where B is the number of buses and L is the number of transmission
of transmission lines. For a given bus b and transmission line l, the entry lines. For a given bus b and transmission line l, the entry M[l.offset, b.offset] indicates
M[l.offset, b.offset] indicates the amount of power (in MW) that flows through the amount of power (in MW) that flows through transmission line l when 1 MW of power is
transmission line l when 1 MW of power is injected at the slack bus (the bus injected at the slack bus (the bus that has offset zero) and withdrawn from b.
that has offset zero) and withdrawn from b.
""" """
function _injection_shift_factors(; function injection_shift_factors(; buses, lines)
buses::Array{Bus}, susceptance = susceptance_matrix(lines)
lines::Array{TransmissionLine}, incidence = reduced_incidence_matrix(lines = lines, buses = buses)
)
susceptance = _susceptance_matrix(lines)
incidence = _reduced_incidence_matrix(lines = lines, buses = buses)
laplacian = transpose(incidence) * susceptance * incidence laplacian = transpose(incidence) * susceptance * incidence
isf = susceptance * incidence * inv(Array(laplacian)) isf = susceptance * incidence * inv(Array(laplacian))
return isf return isf
end end
"""
_reduced_incidence_matrix(; buses::Array{Bus}, lines::Array{TransmissionLine})
Returns the incidence matrix for the network, with the column corresponding to
the slack bus is removed. More precisely, returns a (B-1) x L matrix, where B
is the number of buses and L is the number of lines. For each row, there is a 1
element and a -1 element, indicating the source and target buses, respectively,
for that line.
""" """
function _reduced_incidence_matrix(; reduced_incidence_matrix(; buses::Array{Bus}, lines::Array{TransmissionLine})
buses::Array{Bus},
lines::Array{TransmissionLine}, Returns the incidence matrix for the network, with the column corresponding to the slack
) bus is removed. More precisely, returns a (B-1) x L matrix, where B is the number of buses
and L is the number of lines. For each row, there is a 1 element and a -1 element, indicating
the source and target buses, respectively, for that line.
"""
function reduced_incidence_matrix(; buses::Array{Bus}, lines::Array{TransmissionLine})
matrix = spzeros(Float64, length(lines), length(buses) - 1) matrix = spzeros(Float64, length(lines), length(buses) - 1)
for line in lines for line in lines
if line.source.offset > 0 if line.source.offset > 0
@@ -46,34 +39,37 @@ function _reduced_incidence_matrix(;
matrix[line.offset, line.target.offset] = -1 matrix[line.offset, line.target.offset] = -1
end end
end end
return matrix matrix
end end
""" """
_susceptance_matrix(lines::Array{TransmissionLine}) susceptance_matrix(lines::Array{TransmissionLine})
Returns a LxL diagonal matrix, where each diagonal entry is the susceptance of Returns a LxL diagonal matrix, where each diagonal entry is the susceptance of the
the corresponding transmission line. corresponding transmission line.
""" """
function _susceptance_matrix(lines::Array{TransmissionLine}) function susceptance_matrix(lines::Array{TransmissionLine})
return Diagonal([l.susceptance for l in lines]) return Diagonal([l.susceptance for l in lines])
end end
""" """
_line_outage_factors(; buses, lines, isf) line_outage_factors(; buses, lines, isf)
Returns a LxL matrix containing the Line Outage Distribution Factors (LODFs) Returns a LxL matrix containing the Line Outage Distribution Factors (LODFs) for the
for the given network. This matrix how does the pre-contingency flow change given network. This matrix how does the pre-contingency flow change when each individual
when each individual transmission line is removed. transmission line is removed.
""" """
function _line_outage_factors(; function line_outage_factors(;
buses::Array{Bus,1}, buses::Array{Bus, 1},
lines::Array{TransmissionLine,1}, lines::Array{TransmissionLine, 1},
isf::Array{Float64,2}, isf::Array{Float64,2},
)::Array{Float64,2} ) :: Array{Float64,2}
n_lines, n_buses = size(isf) n_lines, n_buses = size(isf)
incidence = Array(_reduced_incidence_matrix(lines = lines, buses = buses)) incidence = Array(reduced_incidence_matrix(lines=lines,
buses=buses))
lodf::Array{Float64,2} = isf * transpose(incidence) lodf::Array{Float64,2} = isf * transpose(incidence)
m, n = size(lodf) m, n = size(lodf)
for i in 1:n for i in 1:n

View File

@@ -10,11 +10,14 @@ using JuMP
using MathOptInterface using MathOptInterface
using SparseArrays using SparseArrays
pkg = [:DataStructures, :JSON, :JuMP, :MathOptInterface, :SparseArrays] pkg = [:DataStructures,
:JSON,
:JuMP,
:MathOptInterface,
:SparseArrays,
]
@info "Building system image..." @info "Building system image..."
create_sysimage( create_sysimage(pkg,
pkg, precompile_statements_file="build/precompile.jl",
precompile_statements_file = "build/precompile.jl", sysimage_path="build/sysimage.so")
sysimage_path = "build/sysimage.so",
)

View File

@@ -7,20 +7,19 @@ using Printf
bin(x) = [xi > 0.5 for xi in x] bin(x) = [xi > 0.5 for xi in x]
""" """
repair!(instance) fix!(instance)
Verifies that the given unit commitment instance is valid and automatically Verifies that the given unit commitment instance is valid and automatically fixes
fixes some validation errors if possible, issuing a warning for each error some validation errors if possible, issuing a warning for each error found.
found. If a validation error cannot be automatically fixed, issues an If a validation error cannot be automatically fixed, issues an exception.
exception.
Returns the number of validation errors found. Returns the number of validation errors found.
""" """
function repair!(instance::UnitCommitmentInstance)::Int function fix!(instance::UnitCommitmentInstance)::Int
n_errors = 0 n_errors = 0
for g in instance.units for g in instance.units
# Startup costs and delays must be increasing # Startup costs and delays must be increasing
for s in 2:length(g.startup_categories) for s in 2:length(g.startup_categories)
if g.startup_categories[s].delay <= g.startup_categories[s-1].delay if g.startup_categories[s].delay <= g.startup_categories[s-1].delay
@@ -31,7 +30,7 @@ function repair!(instance::UnitCommitmentInstance)::Int
g.startup_categories[s].delay = new_value g.startup_categories[s].delay = new_value
n_errors += 1 n_errors += 1
end end
if g.startup_categories[s].cost < g.startup_categories[s-1].cost if g.startup_categories[s].cost < g.startup_categories[s-1].cost
prev_value = g.startup_categories[s].cost prev_value = g.startup_categories[s].cost
new_value = g.startup_categories[s-1].cost new_value = g.startup_categories[s-1].cost
@@ -40,8 +39,9 @@ function repair!(instance::UnitCommitmentInstance)::Int
g.startup_categories[s].cost = new_value g.startup_categories[s].cost = new_value
n_errors += 1 n_errors += 1
end end
end
end
for t in 1:instance.time for t in 1:instance.time
# Production cost curve should be convex # Production cost curve should be convex
for k in 2:length(g.cost_segments) for k in 2:length(g.cost_segments)
@@ -66,16 +66,19 @@ function repair!(instance::UnitCommitmentInstance)::Int
end end
end end
end end
return n_errors return n_errors
end end
function validate(instance_filename::String, solution_filename::String) function validate(instance_filename::String, solution_filename::String)
instance = UnitCommitment.read(instance_filename) instance = UnitCommitment.read(instance_filename)
solution = JSON.parse(open(solution_filename)) solution = JSON.parse(open(solution_filename))
return validate(instance, solution) return validate(instance, solution)
end end
""" """
validate(instance, solution)::Bool validate(instance, solution)::Bool
@@ -83,52 +86,52 @@ Verifies that the given solution is feasible for the problem. If feasible,
silently returns true. In infeasible, returns false and prints the validation silently returns true. In infeasible, returns false and prints the validation
errors to the screen. errors to the screen.
This function is implemented independently from the optimization model in This function is implemented independently from the optimization model in `model.jl`, and
`model.jl`, and therefore can be used to verify that the model is indeed therefore can be used to verify that the model is indeed producing valid solutions. It
producing valid solutions. It can also be used to verify the solutions produced can also be used to verify the solutions produced by other optimization packages.
by other optimization packages.
""" """
function validate( function validate(instance::UnitCommitmentInstance,
instance::UnitCommitmentInstance, solution::Union{Dict,OrderedDict};
solution::Union{Dict,OrderedDict}, )::Bool
)::Bool
err_count = 0 err_count = 0
err_count += _validate_units(instance, solution) err_count += validate_units(instance, solution)
err_count += _validate_reserve_and_demand(instance, solution) err_count += validate_reserve_and_demand(instance, solution)
if err_count > 0 if err_count > 0
@error "Found $err_count validation errors" @error "Found $err_count validation errors"
return false return false
end end
return true return true
end end
function _validate_units(instance, solution; tol = 0.01)
err_count = 0
function validate_units(instance, solution; tol=0.01)
err_count = 0
for unit in instance.units for unit in instance.units
production = solution["Production (MW)"][unit.name] production = solution["Production (MW)"][unit.name]
reserve = solution["Reserve (MW)"][unit.name] reserve = solution["Reserve (MW)"][unit.name]
actual_production_cost = solution["Production cost (\$)"][unit.name] actual_production_cost = solution["Production cost (\$)"][unit.name]
actual_startup_cost = solution["Startup cost (\$)"][unit.name] actual_startup_cost = solution["Startup cost (\$)"][unit.name]
is_on = bin(solution["Is on"][unit.name]) is_on = bin(solution["Is on"][unit.name])
switch_off = bin(solution["Switch off"][unit.name]) # some formulations may not use this
for t in 1:instance.time for t in 1:instance.time
# Auxiliary variables # Auxiliary variables
if t == 1 if t == 1
is_starting_up = (unit.initial_status < 0) && is_on[t] is_starting_up = (unit.initial_status < 0) && is_on[t]
is_shutting_down = (unit.initial_status > 0) && !is_on[t] is_shutting_down = (unit.initial_status > 0) && !is_on[t]
ramp_up = ramp_up = max(0, production[t] + reserve[t] - unit.initial_power)
max(0, production[t] + reserve[t] - unit.initial_power)
ramp_down = max(0, unit.initial_power - production[t]) ramp_down = max(0, unit.initial_power - production[t])
else else
is_starting_up = !is_on[t-1] && is_on[t] is_starting_up = !is_on[t-1] && is_on[t]
is_shutting_down = is_on[t-1] && !is_on[t] is_shutting_down = is_on[t-1] && !is_on[t]
ramp_up = max(0, production[t] + reserve[t] - production[t-1]) ramp_up = max(0, production[t] + reserve[t] - production[t-1])
ramp_down = max(0, production[t-1] - production[t]) #ramp_down = max(0, production[t-1] - production[t])
ramp_down = max(0, production[t-1] + reserve[t-1] - production[t])
end end
# Compute production costs # Compute production costs
production_cost, startup_cost = 0, 0 production_cost, startup_cost = 0, 0
if is_on[t] if is_on[t]
@@ -140,171 +143,130 @@ function _validate_units(instance, solution; tol = 0.01)
residual = max(0, residual - s.mw[t]) residual = max(0, residual - s.mw[t])
end end
end end
# Production should be non-negative # Production should be non-negative
if production[t] < -tol if production[t] < -tol
@error @sprintf( @error @sprintf("Unit %s produces negative amount of power at time %d (%.2f)",
"Unit %s produces negative amount of power at time %d (%.2f)", unit.name, t, production[t])
unit.name,
t,
production[t]
)
err_count += 1 err_count += 1
end end
# Verify must-run # Verify must-run
if !is_on[t] && unit.must_run[t] if !is_on[t] && unit.must_run[t]
@error @sprintf( @error @sprintf("Must-run unit %s is offline at time %d",
"Must-run unit %s is offline at time %d", unit.name, t)
unit.name,
t
)
err_count += 1 err_count += 1
end end
# Verify reserve eligibility # Verify reserve eligibility
if !unit.provides_spinning_reserves[t] && reserve[t] > tol if !unit.provides_spinning_reserves[t] && reserve[t] > tol
@error @sprintf( @error @sprintf("Unit %s is not eligible to provide spinning reserves at time %d",
"Unit %s is not eligible to provide spinning reserves at time %d", unit.name, t)
unit.name,
t
)
err_count += 1 err_count += 1
end end
# If unit is on, must produce at least its minimum power # If unit is on, must produce at least its minimum power
if is_on[t] && (production[t] < unit.min_power[t] - tol) if is_on[t] && (production[t] < unit.min_power[t] - tol)
@error @sprintf( @error @sprintf("Unit %s produces below its minimum limit at time %d (%.2f < %.2f)",
"Unit %s produces below its minimum limit at time %d (%.2f < %.2f)", unit.name, t, production[t], unit.min_power[t])
unit.name,
t,
production[t],
unit.min_power[t]
)
err_count += 1 err_count += 1
end end
# If unit is on, must produce at most its maximum power # If unit is on, must produce at most its maximum power
if is_on[t] && if is_on[t] && (production[t] + reserve[t] > unit.max_power[t] + tol)
(production[t] + reserve[t] > unit.max_power[t] + tol) @error @sprintf("Unit %s produces above its maximum limit at time %d (%.2f + %.2f> %.2f)",
@error @sprintf( unit.name, t, production[t], reserve[t], unit.max_power[t])
"Unit %s produces above its maximum limit at time %d (%.2f + %.2f> %.2f)",
unit.name,
t,
production[t],
reserve[t],
unit.max_power[t]
)
err_count += 1 err_count += 1
end end
# If unit is off, must produce zero # If unit is off, must produce zero
if !is_on[t] && production[t] + reserve[t] > tol if !is_on[t] && production[t] + reserve[t] > tol
@error @sprintf( @error @sprintf("Unit %s produces power at time %d while off",
"Unit %s produces power at time %d while off", unit.name, t)
unit.name,
t
)
err_count += 1 err_count += 1
end end
# Startup limit # Startup limit
if is_starting_up && (ramp_up > unit.startup_limit + tol) if is_starting_up && (ramp_up > unit.startup_limit + tol)
@error @sprintf( @error @sprintf("Unit %s exceeds startup limit at time %d (%.2f > %.2f)",
"Unit %s exceeds startup limit at time %d (%.2f > %.2f)", unit.name, t, ramp_up, unit.startup_limit)
unit.name,
t,
ramp_up,
unit.startup_limit
)
err_count += 1 err_count += 1
end end
# Shutdown limit # Shutdown limit
if is_shutting_down && (ramp_down > unit.shutdown_limit + tol) if is_shutting_down && (ramp_down > unit.shutdown_limit + tol)
@error @sprintf( @error @sprintf("Unit %s exceeds shutdown limit at time %d (%.2f > %.2f)\n\tproduction[t-1] = %.2f\n\treserve[t-1] = %.2f\n\tproduction[t] = %.2f\n\treserve[t] = %.2f\n\tis_on[t-1] = %d\n\tis_on[t] = %d",
"Unit %s exceeds shutdown limit at time %d (%.2f > %.2f)", unit.name, t, ramp_down, unit.shutdown_limit,
unit.name, (t == 1 ? unit.initial_power : production[t-1]), production[t],
t, (t == 1 ? 0. : reserve[t-1]), reserve[t],
ramp_down, (t == 1 ? unit.initial_status != nothing && unit.initial_status > 0 : is_on[t-1]), is_on[t]
unit.shutdown_limit )
)
err_count += 1 err_count += 1
end end
# Ramp-up limit # Ramp-up limit
if !is_starting_up && if !is_starting_up && !is_shutting_down && (ramp_up > unit.ramp_up_limit + tol)
!is_shutting_down && @error @sprintf("Unit %s exceeds ramp up limit at time %d (%.2f > %.2f)",
(ramp_up > unit.ramp_up_limit + tol) unit.name, t, ramp_up, unit.ramp_up_limit)
@error @sprintf(
"Unit %s exceeds ramp up limit at time %d (%.2f > %.2f)",
unit.name,
t,
ramp_up,
unit.ramp_up_limit
)
err_count += 1 err_count += 1
end end
# Ramp-down limit # Ramp-down limit
if !is_starting_up && if !is_starting_up && !is_shutting_down && (ramp_down > unit.ramp_down_limit + tol)
!is_shutting_down && @error @sprintf("Unit %s exceeds ramp down limit at time %d (%.2f > %.2f)\n\tproduction[t-1] = %.2f\n\treserve[t-1] = %.2f\n\tproduction[t] = %.2f\n\treserve[t] = %.2f\n\tis_on[t-1] = %d\n\tis_on[t] = %d",
(ramp_down > unit.ramp_down_limit + tol) unit.name, t, ramp_down, unit.ramp_down_limit,
@error @sprintf( (t == 1 ? unit.initial_power : production[t-1]), production[t],
"Unit %s exceeds ramp down limit at time %d (%.2f > %.2f)", (t == 1 ? 0. : reserve[t-1]), reserve[t],
unit.name, (t == 1 ? unit.initial_status != nothing && unit.initial_status > 0 : is_on[t-1]), is_on[t]
t, )
ramp_down,
unit.ramp_down_limit
)
err_count += 1 err_count += 1
end end
# Verify startup costs & minimum downtime # Verify startup costs & minimum downtime
if is_starting_up if is_starting_up
# Calculate how much time the unit has been offline # Calculate how much time the unit has been offline
time_down = 0 time_down = 0
for k in 1:(t-1) for k in 1:(t-1)
if !is_on[t-k] if !is_on[t - k]
time_down += 1 time_down += 1
else else
break break
end end
end end
if t == time_down + 1 if t == time_down + 1 && !switch_off[1]
# If unit has always been off, then the correct startup cost depends on how long was it off before t = 1
# Absent known initial conditions, we assume it was off for the minimum downtime
# TODO: verify the formulations are making the same assumption...
initial_down = unit.min_downtime initial_down = unit.min_downtime
if unit.initial_status < 0 if unit.initial_status < 0
initial_down = -unit.initial_status initial_down = -unit.initial_status
end end
time_down += initial_down time_down += initial_down
end end
# Calculate startup costs # Calculate startup costs
for c in unit.startup_categories for c in unit.startup_categories
if time_down >= c.delay if time_down >= c.delay
startup_cost = c.cost startup_cost = c.cost
end end
end end
# Check minimum downtime # Check minimum downtime
if time_down < unit.min_downtime if time_down < unit.min_downtime
@error @sprintf( @error @sprintf("Unit %s violates minimum downtime at time %d",
"Unit %s violates minimum downtime at time %d", unit.name, t)
unit.name,
t
)
err_count += 1 err_count += 1
end end
end end
# Verify minimum uptime # Verify minimum uptime
if is_shutting_down if is_shutting_down
# Calculate how much time the unit has been online # Calculate how much time the unit has been online
time_up = 0 time_up = 0
for k in 1:(t-1) for k in 1:(t-1)
if is_on[t-k] if is_on[t - k]
time_up += 1 time_up += 1
else else
break break
@@ -317,99 +279,69 @@ function _validate_units(instance, solution; tol = 0.01)
end end
time_up += initial_up time_up += initial_up
end end
if (t == time_up + 1) && (unit.initial_status > 0) if (t == time_up + 1) && (unit.initial_status > 0)
time_up += unit.initial_status time_up += unit.initial_status
end end
# Check minimum uptime # Check minimum uptime
if time_up < unit.min_uptime if time_up < unit.min_uptime
@error @sprintf( @error @sprintf("Unit %s violates minimum uptime at time %d",
"Unit %s violates minimum uptime at time %d", unit.name, t)
unit.name,
t
)
err_count += 1 err_count += 1
end end
end end
# Verify production costs # Verify production costs
if abs(actual_production_cost[t] - production_cost) > 1.00 if abs(actual_production_cost[t] - production_cost) > 1.00
@error @sprintf( @error @sprintf("Unit %s has unexpected production cost at time %d (%.2f should be %.2f)",
"Unit %s has unexpected production cost at time %d (%.2f should be %.2f)", unit.name, t, actual_production_cost[t], production_cost)
unit.name,
t,
actual_production_cost[t],
production_cost
)
err_count += 1 err_count += 1
end end
# Verify startup costs # Verify startup costs
if abs(actual_startup_cost[t] - startup_cost) > 1.00 if abs(actual_startup_cost[t] - startup_cost) > 1.00
@error @sprintf( @error @sprintf("Unit %s has unexpected startup cost at time %d (%.2f should be %.2f)",
"Unit %s has unexpected startup cost at time %d (%.2f should be %.2f)", unit.name, t, actual_startup_cost[t], startup_cost)
unit.name,
t,
actual_startup_cost[t],
startup_cost
)
err_count += 1 err_count += 1
end end
end end
end end
return err_count return err_count
end end
function _validate_reserve_and_demand(instance, solution, tol = 0.01)
function validate_reserve_and_demand(instance, solution, tol=0.01)
err_count = 0 err_count = 0
for t in 1:instance.time for t in 1:instance.time
load_curtail = 0 load_curtail = 0
fixed_load = sum(b.load[t] for b in instance.buses) fixed_load = sum(b.load[t] for b in instance.buses)
ps_load = 0 production = sum(solution["Production (MW)"][g.name][t]
if length(instance.price_sensitive_loads) > 0 for g in instance.units)
ps_load = sum(
solution["Price-sensitive loads (MW)"][ps.name][t] for
ps in instance.price_sensitive_loads
)
end
production =
sum(solution["Production (MW)"][g.name][t] for g in instance.units)
if "Load curtail (MW)" in keys(solution) if "Load curtail (MW)" in keys(solution)
load_curtail = sum( load_curtail = sum(solution["Load curtail (MW)"][b.name][t]
solution["Load curtail (MW)"][b.name][t] for for b in instance.buses)
b in instance.buses
)
end end
balance = fixed_load - load_curtail - production + ps_load balance = fixed_load - load_curtail - production
# Verify that production equals demand # Verify that production equals demand
if abs(balance) > tol if abs(balance) > tol
@error @sprintf( @error @sprintf("Non-zero power balance at time %d (%.2f - %.2f - %.2f != 0)",
"Non-zero power balance at time %d (%.2f + %.2f - %.2f - %.2f != 0)", t, fixed_load, load_curtail, production)
t,
fixed_load,
ps_load,
load_curtail,
production,
)
err_count += 1 err_count += 1
end end
# Verify spinning reserves # Verify spinning reserves
reserve = reserve = sum(solution["Reserve (MW)"][g.name][t] for g in instance.units)
sum(solution["Reserve (MW)"][g.name][t] for g in instance.units)
if reserve < instance.reserves.spinning[t] - tol if reserve < instance.reserves.spinning[t] - tol
@error @sprintf( @error @sprintf("Insufficient spinning reserves at time %d (%.2f should be %.2f)",
"Insufficient spinning reserves at time %d (%.2f should be %.2f)", t, reserve, instance.reserves.spinning[t])
t,
reserve,
instance.reserves.spinning[t],
)
err_count += 1 err_count += 1
end end
end end
return err_count return err_count
end end

471
src/variables.jl Normal file
View File

@@ -0,0 +1,471 @@
using DataStructures # for OrderedDict
using JuMP
##################################################
# Variables
#mutable struct UCVariable
# "Name of the variable."
# name::Symbol
# "What does the variable represent?"
# description::String
# "Global lower bound for the variable (may be adjusted later)."
# lb::Float64
# "Global upper bound for the variable (may be adjusted later)."
# ub::Float64
# "Is the variable integer-restricted?"
# integer::Bool
# "What are we indexing over?"*
# " Recursive structure, e.g., [X,Y] means Y is a field in X,"*
# " and [X,[Y1,Z],Y2] means Y1 and Y2 are fields in X and Z is a field in Y1.\n"*
# " [ X, [Y,A,B], [Y,A,A], [Z,[D,E],F], T ]\n"*
# " => [x, y1, y1.a, y1.b, y2, y2.a1, y2.a2, z, z.d, z.d.e, z.f, t]."
# indices::Vector
# "Function to add the variable; if this is missing, we will attempt to add the variable automatically using the `indices`. Signature should be (variable, model.vars.familyname, mip, instance)."
# add_variable::Union{Function,Nothing}
#end # UCVariable
# TODO Above did not work for some reason
mutable struct UCVariable
name::Symbol
description::String
lb::Float64
ub::Float64
integer::Bool
indices::Vector
add_variable::Union{Function,Nothing}
end
"""
It holds that x(t,t') = 0 if t' does not belong to 𝒢 = [t+DT, t+TC-1].
This is because DT is the minimum downtime, so there is no way x(t,t')=1 for t'<t+DT
and TC is the "time until cold" => if the generator starts afterwards, always has max cost.
"""
function add_downtime_arcs(var::UCVariable,
x::OrderedDict,
mip::JuMP.Model,
instance::UnitCommitmentInstance)
T = instance.time
for g in instance.units
S = length(g.startup_categories)
if S == 0
continue
end
DT = g.min_downtime # minimum time offline
TC = g.startup_categories[S].delay # time offline until totally cold
for t1 = 1:T-1
for t2 = t1+1:T
# It holds that x(t,t') = 0 if t' does not belong to 𝒢 = [t+DT, t+TC-1]
# This is because DT is the minimum downtime, so there is no way x(t,t')=1 for t'<t+DT
# and TC is the "time until cold" => if the generator starts afterwards, always has max cost
if (t2 < t1 + DT) || (t2 >= t1 + TC)
continue
end
name = string(var.name, "[", g.name, ",", t1, ",", t2, "]")
x[g.name, t1, t2] = @variable(mip,
lower_bound=var.lb,
upper_bound=var.ub,
integer=var.integer,
base_name=name)
end # loop over time 2
end # loop over time 1
end # loop over units
end # add_downtime_arcs
"""
If there is a penalty specified for not meeting the reserve, then we add a reserve shortfall variable.
"""
function add_reserve_shortfall(var::UCVariable,
x::OrderedDict,
mip::JuMP.Model,
instance::UnitCommitmentInstance)
T = instance.time
for t = 1:T
if instance.shortfall_penalty[t] > 1e-7
name = string(var.name, "[", t, "]")
x[t] = @variable(mip,
lower_bound=var.lb,
upper_bound=var.ub,
integer=var.integer,
base_name=name)
end
end # loop over time
end # add_reserve_shortfall
"""
Variables that the model may (or may not) use.
Note the relationship
r_g(t) = bar{p}_g(t) - p_g(t)
= bar{p}'_g(t) - p'_g(t)
"""
var_list = OrderedDict{Symbol,UCVariable}(
:prod
=> UCVariable(:prod,
"[gen, t]; power from generator gen at time t; p_g(t) = p'_g(t) + g.min_power[t] * u_g(t)",
0., Inf, false,
[Unit, Time], nothing),
:prod_above
=> UCVariable(:prod_above,
"[gen, t]; production above minimum required level; p'_g(t)",
0., Inf, false,
[Unit, Time], nothing ),
:max_power_avail
=> UCVariable(:max_power_avail,
"[gen, t]; maximum power available from generator gen at time t; bar{p}_g(t) = p_g(t) + r_g(t)",
0., Inf, false,
[Unit, Time], nothing),
:max_power_avail_above
=> UCVariable(:max_power_avail_above,
"[gen, t]; maximum power available above minimum from generator gen at time t; bar{p}'_g(t)",
0., Inf, false,
[Unit, Time], nothing),
:segprod
=> UCVariable(:segprod,
"[gen, seg, t]; how much generator gen produces on segment seg in time t; p_g^l(t)",
0., Inf, false,
[ [Unit, CostSegment], Time], nothing),
:reserve
=> UCVariable(:reserve,
"[gen, t]; reserves provided by gen at t; r_g(t)",
0., Inf, false,
[Unit, Time], nothing),
:reserve_shortfall
=> UCVariable(:reserve_shortfall,
"[t]; reserve shortfall at gen at t; s_R(t)",
0., Inf, false,
[Time], add_reserve_shortfall),
:is_on
=> UCVariable(:is_on,
"[gen, t]; is gen on at t; u_g(t)",
0., 1., true,
[Unit, Time], nothing),
:switch_on
=> UCVariable(:switch_on,
"[gen, t]; indicator that gen will be turned on at t; v_g(t)",
0., 1., true,
[Unit, Time], nothing),
:switch_off
=> UCVariable(:switch_off,
"[gen, t]; indicator that gen will be turned off at t; w_g(t)",
0., 1., true,
[Unit, Time], nothing),
:net_injection
=> UCVariable(:net_injection,
"[bus.name, t]",
-1e100, Inf, false,
[Bus, Time], nothing),
:curtail
=> UCVariable(:curtail,
"[bus.name, t]; upper bound is max load at the bus at time t",
0., Inf, false,
[Bus, Time], nothing),
:flow
=> UCVariable(:flow,
"[violation.monitored_line.name, t]",
-1e100, Inf, false,
[Violation, Time], nothing),
:overflow
=> UCVariable(:overflow,
"[transmission_line.name, t]; how much flow above the transmission limits (in MW) is allowed",
0., Inf, false,
[TransmissionLine, Time], nothing),
:loads
=> UCVariable(:loads,
"[price_sensitive_load.name, t]; production to meet demand at a set price, if it is economically sensible, independent of the rest of the demand; upper bound is demand at this price at time t",
0., Inf, false,
[PriceSensitiveLoad, Time], nothing),
:startup
=> UCVariable(:startup,
"[gen, startup_category, t]; indicator that generator g starts up in startup_category at time t; 𝛿_g^s(t)",
0., 1., true,
[ [Unit, StartupCategory], Time], nothing),
:downtime_arc
=> UCVariable(:downtime_arc,
"[gen, t, t']; indicator for shutdown at t and starting at t'",
0., 1., true,
[Unit, Time, Time], add_downtime_arcs),
) # var_list
#var_symbol_list =
# [
# :prod_above, # [gen, t], ≥ 0
# :segprod, # [gen, t, segment], ≥ 0
# :reserve, # [gen, t], ≥ 0
# :is_on, # [gen, t], binary
# :switch_on, # [gen, t], binary
# :switch_off, # [gen, t], binary
# :net_injection, # [bus.name, t], urs?
# :curtail, # [bus.name, t], domain [0, b.load[t]]
# :overflow, # [transmission_line.name, t], ≥ 0
# :loads, # [price_sensitive_load.name, t], domain [0, ps.demand[t]]
# :startup # [gen, t, startup_category], binary
# ]
"""
For a particular UCElement, which is the field in UnitCommitmentInstance that this corresponds to?
This is used to determine indexing and ranges, e.g., `is_on` is indexed over Unit and Time,
so the variable `is_on` will range in the first index from 1 to length(instance.units)
and on the second index from 1 to instance.time.
"""
ind_to_field_dict = OrderedDict{Type{<:UCElement},Symbol}(
Time => :time,
Bus => :buses,
Unit => :units,
TransmissionLine => :lines,
PriceSensitiveLoad => :price_sensitive_loads,
CostSegment => :cost_segments,
StartupCategory => :startup_categories,
) # ind_to_field_dict
"""
Take indices and convert them to fields of UnitCommitmentInstance.
"""
function ind_to_field(index::Union{Vector,Type{<:UCElement}}) :: Union{Vector,Symbol}
if isa(index, Type{<:UCElement})
return ind_to_field_dict[index]
else
return [ ind_to_field(t) for t in index ]
end
end # ind_to_field
function num_indices(v) :: Int64
if !isa(v, Array)
return 1
else
return sum(num_indices(v[i]) for i in 1:length(v))
end
end # num_indices
"""
Can return
* UnitRange -> iterate over this range
* Array{UnitRange} -> cross product of the ranges in the array
* Tuple(UnitRange, Array{UnitRange}) -> the array length should be the same as the range of the UnitRange
"""
function get_indices_tuple(obj::Any, fields::Union{Symbol,Vector,Nothing} = nothing)
if isa(fields, Symbol)
return get_indices_tuple(getfield(obj,fields))
end
if fields == nothing || (isa(fields,Array) && length(fields) == 0)
if isa(obj, Array)
return UnitRange(1,length(obj))
elseif isa(obj, Int)
return UnitRange(1,obj)
else
return UnitRange{Int64}(0:-1)
#return UnitRange(1,1)
end
end
if isa(obj,Array)
indices = (
UnitRange(1,length(obj)),
([
isa(f,Array) ? get_indices_tuple(getfield(x, f[1]), f[2:end]) : get_indices_tuple(getfield(x, f))
for x in obj
] for f in fields)...
)
# more_indices = ([
# isa(f,Array) ? get_indices_tuple(getfield(x, f[1]), f[2:end]) : get_indices_tuple(getfield(x, f))
# for x in obj
# ] for f in fields
# )
# indices = (UnitRange(1,length(obj)),more_indices...)
else
indices = ()
for f in fields
if isa(f,Array)
indices = (indices..., get_indices_tuple(getfield(obj, f[1]), f[2:end]))
else
indices = (indices..., get_indices_tuple(obj,f))
end
end
# indices = (
# isa(f,Array) ? get_indices_tuple(getfield(obj, f[1]), f[2:end]) : get_indices_tuple(getfield(obj, f))
# for f in fields
# )
# (
# isa(f,Array) ? get_indices_tuple(getfield(obj, f[1]), f[2:end]) : get_indices_tuple(getfield(obj, f))
# for f in fields
# )
# indices = (indices...,)
end # check if obj is Array or not
return indices
end # get_indices_tuple
function loop_over_indices(indices::Any)
loop = nothing
should_print = false
if isa(indices, UnitRange)
loop = indices
elseif isa(indices, Array{UnitRange{Int64}}) || isa(indices, Tuple{Int, UnitRange})
loop = Base.product(Tuple(indices)...)
elseif isa(indices, Tuple{UnitRange, Array})
loop = ()
for t in zip(indices...)
loop = (loop..., loop_over_indices(t)...)
end
elseif isa(indices,Tuple)
loop = ()
for i in indices
loop = (loop..., loop_over_indices(i))
end
loop = Base.product(loop...)
else
error("Why are we here?")
#loop = Base.product(loop_over_indices(indices)...)
end
if should_print
for i in loop
@show i
end
end
return loop
end # loop_over_indices
function expand_tuple(x::Tuple)
y = ()
for i in x
if isa(i, Tuple)
y = (y..., expand_tuple(i)...)
else
y = (y..., i)
end
end
return y
end # expand_tuple
function expand_tuple(X::Array{<:Tuple})
return [ expand_tuple(x) for x in X ]
end # expand_tuple
function get_indices(x::Array)
return expand_tuple(x)
end
function get_indices(x::Base.Iterators.ProductIterator)
return get_indices(collect(x))
end
"""
Access `t.f`, special terminal case of `get_nested_field`.
"""
function get_nested_field(t::Any, f::Symbol)
return getfield(t,f)
end # get_nested_field
"""
Access `t.f`, where `f` could be a subfield.
"""
function get_nested_field(t::Any, f::Vector{Symbol})
if length(f) > 1
return get_nested_field(getfield(t,f[1]), f[2:end])
else
return getfield(t,f[1])
end
end # get_nested_field
"""
Given a set of indices of UCVariable, e.g., [[X,Y],T],
and a UnitCommitmentInstance instance,
if we want to access the field corresponding to Y,
then we call get_nested_field(instance, [[X,Y],T], 2, (4,3)),
which will return instance.X[4].Y[3] if Y is a vector,
and just instance.X[4].Y otherwise.
===
Termination Conditions
If `i` <= 0, then we only care about instance, and not the field.
If `field` is a Symbol and `i` >= 1, then we want to explore instance.field (index t[i] or t).
Note that if `i` >= 1, then `field` must be a symbol.
If `i` == 1, then `t` can be an Int.
===
Parameters
* instance::Any --> all fields will be from instance, or nested fields of fields of instance.
* field::Union{Vector,Symbol,Nothing} --> either the field we want to access, or a vector of fields, and we will want field[i].
* i::Int --> which field to access.
* t::Tuple --> how to go through the fields of instance to get the right field, length needs to be at least `i`.
"""
function get_nested_field(instance::Any, field::Union{Vector,Symbol,Nothing}, i::Int, t::Union{Tuple, Int})
# Check data
if isa(field, Vector)
if i >= 2 && (!isa(t,Tuple) || length(t) < i)
error("Tuple of indices to get nested field needs to be at least the length of the index we want to get.")
end
end
if isa(field, Symbol) || i <= 0
# i = 0 can happen in the recursive call
# What it means is that we do not want a field of the instance, but the instance itself
# TODO handle other iterable types and empty arrays
f = (isa(field, Symbol) && i >= 1) ? getfield(instance, field) : instance
if isa(f,Vector)
if length(f) == 0
error("Trying to iterate over empty field!")
else
return isa(t,Int) ? f[t] : f[t[i]]
end
else
return f
end
end # check termination conditions (f is field or i <= 0)
# Loop over the fields until we find where index i is located
# It may be nested inside an array, so that is why we recurse
start_ind = 0
for f in field
curr_len = isa(f, Vector) ? length(f) : 1
if start_ind + curr_len >= i
if isa(f, Vector)
new_field_is_iterable = isa(getfield(instance, f[1]), Vector)
if new_field_is_iterable
return get_nested_field(getfield(instance, f[1])[t[start_ind+1]], f[2:end], i - start_ind - 1, isa(t,Tuple) ? t[start_ind+2:end] : t)
else
return get_nested_field(getfield(instance, f[1]), f[2:end], i - start_ind - 1, isa(t,Tuple) ? t[start_ind+2:end] : t)
end
else
# f is hopefully a symbol...
return get_nested_field(instance, f, 1, isa(t,Tuple) ? t[start_ind+1] : t)
end
end
start_ind += curr_len
end
return nothing
end # get_nested_field
#"""
#Get ranges for the indices of a UCVariable along dimension `i`,
#making sure that the right fields ranges are calculated via `get_nested_field` and `ind_to_field`.
#"""
#function get_range(arr::UCVariable, instance::UnitCommitmentInstance, i::Int) :: UnitRange
# arr = ind_to_field[var.indices[i]]
# f = get_nested_field(instance, arr)
# if isa(f, Array)
# return 1:length(f)
# elseif isa(f, Int)
# return 1:f
# else
# error("Unknown type to generate UnitRange from: ", typeof(f))
# end
#end # get_range
export UCVariable

View File

@@ -6,17 +6,14 @@ using UnitCommitment
@testset "convert" begin @testset "convert" begin
@testset "EGRET solution" begin @testset "EGRET solution" begin
solution = solution = UnitCommitment.read_egret_solution("fixtures/egret_output.json.gz")
UnitCommitment._read_egret_solution("fixtures/egret_output.json.gz")
for attr in ["Is on", "Production (MW)", "Production cost (\$)"] for attr in ["Is on", "Production (MW)", "Production cost (\$)"]
@test attr in keys(solution) @test attr in keys(solution)
@test "115_STEAM_1" in keys(solution[attr]) @test "115_STEAM_1" in keys(solution[attr])
@test length(solution[attr]["115_STEAM_1"]) == 48 @test length(solution[attr]["115_STEAM_1"]) == 48
end end
@test solution["Production cost (\$)"]["315_CT_6"][15:20] == @test solution["Production cost (\$)"]["315_CT_6"][15:20] == [0., 0., 884.44, 1470.71, 1470.71, 884.44]
[0.0, 0.0, 884.44, 1470.71, 1470.71, 884.44] @test solution["Startup cost (\$)"]["315_CT_6"][15:20] == [0., 0., 5665.23, 0., 0., 0.]
@test solution["Startup cost (\$)"]["315_CT_6"][15:20] ==
[0.0, 0.0, 5665.23, 0.0, 0.0, 0.0]
@test length(keys(solution["Is on"])) == 154 @test length(keys(solution["Is on"])) == 154
end end
end end

View File

@@ -8,21 +8,21 @@ using UnitCommitment, Cbc, JuMP
# Load instance # Load instance
instance = UnitCommitment.read("$(pwd())/fixtures/case118-initcond.json.gz") instance = UnitCommitment.read("$(pwd())/fixtures/case118-initcond.json.gz")
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0) optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
# All units should have unknown initial conditions # All units should have unknown initial conditions
for g in instance.units for g in instance.units
@test g.initial_power === nothing @test g.initial_power === nothing
@test g.initial_status === nothing @test g.initial_status === nothing
end end
# Generate initial conditions # Generate initial conditions
UnitCommitment.generate_initial_conditions!(instance, optimizer) UnitCommitment.generate_initial_conditions!(instance, optimizer)
# All units should now have known initial conditions # All units should now have known initial conditions
for g in instance.units for g in instance.units
@test g.initial_power !== nothing @test g.initial_power !== nothing
@test g.initial_status !== nothing @test g.initial_status !== nothing
end end
# TODO: Check that initial conditions are feasible # TODO: Check that initial conditions are feasible
end end

View File

@@ -15,46 +15,46 @@ using UnitCommitment, LinearAlgebra, Cbc, JuMP, JSON, GZip
@test length(instance.price_sensitive_loads) == 1 @test length(instance.price_sensitive_loads) == 1
@test instance.time == 4 @test instance.time == 4
@test instance.lines[5].name == "l5" @test instance.lines[5].name == "l5"
@test instance.lines[5].source.name == "b2" @test instance.lines[5].source.name == "b2"
@test instance.lines[5].target.name == "b5" @test instance.lines[5].target.name == "b5"
@test instance.lines[5].reactance 0.17388 @test instance.lines[5].reactance 0.17388
@test instance.lines[5].susceptance 10.037550333 @test instance.lines[5].susceptance 10.037550333
@test instance.lines[5].normal_flow_limit == [1e8 for t in 1:4] @test instance.lines[5].normal_flow_limit == [1e8 for t in 1:4]
@test instance.lines[5].emergency_flow_limit == [1e8 for t in 1:4] @test instance.lines[5].emergency_flow_limit == [1e8 for t in 1:4]
@test instance.lines[5].flow_limit_penalty == [5e3 for t in 1:4] @test instance.lines[5].flow_limit_penalty == [5e3 for t in 1:4]
@test instance.lines[1].name == "l1" @test instance.lines[1].name == "l1"
@test instance.lines[1].source.name == "b1" @test instance.lines[1].source.name == "b1"
@test instance.lines[1].target.name == "b2" @test instance.lines[1].target.name == "b2"
@test instance.lines[1].reactance 0.059170 @test instance.lines[1].reactance 0.059170
@test instance.lines[1].susceptance 29.496860773945 @test instance.lines[1].susceptance 29.496860773945
@test instance.lines[1].normal_flow_limit == [300.0 for t in 1:4] @test instance.lines[1].normal_flow_limit == [300.0 for t in 1:4]
@test instance.lines[1].emergency_flow_limit == [400.0 for t in 1:4] @test instance.lines[1].emergency_flow_limit == [400.0 for t in 1:4]
@test instance.lines[1].flow_limit_penalty == [1e3 for t in 1:4] @test instance.lines[1].flow_limit_penalty == [1e3 for t in 1:4]
@test instance.buses[9].name == "b9" @test instance.buses[9].name == "b9"
@test instance.buses[9].load == [35.36638, 33.25495, 31.67138, 31.14353] @test instance.buses[9].load == [35.36638, 33.25495, 31.67138, 31.14353]
unit = instance.units[1] unit = instance.units[1]
@test unit.name == "g1" @test unit.name == "g1"
@test unit.bus.name == "b1" @test unit.bus.name == "b1"
@test unit.ramp_up_limit == 1e6 @test unit.ramp_up_limit == 1e6
@test unit.ramp_down_limit == 1e6 @test unit.ramp_down_limit == 1e6
@test unit.startup_limit == 1e6 @test unit.startup_limit == 1e6
@test unit.shutdown_limit == 1e6 @test unit.shutdown_limit == 1e6
@test unit.must_run == [false for t in 1:4] @test unit.must_run == [false for t in 1:4]
@test unit.min_power_cost == [1400.0 for t in 1:4] @test unit.min_power_cost == [1400. for t in 1:4]
@test unit.min_uptime == 1 @test unit.min_uptime == 1
@test unit.min_downtime == 1 @test unit.min_downtime == 1
@test unit.provides_spinning_reserves == [true for t in 1:4] @test unit.provides_spinning_reserves == [true for t in 1:4]
for t in 1:1 for t in 1:1
@test unit.cost_segments[1].mw[t] == 10.0 @test unit.cost_segments[1].mw[t] == 10.0
@test unit.cost_segments[2].mw[t] == 20.0 @test unit.cost_segments[2].mw[t] == 20.0
@test unit.cost_segments[3].mw[t] == 5.0 @test unit.cost_segments[3].mw[t] == 5.0
@test unit.cost_segments[1].cost[t] 20.0 @test unit.cost_segments[1].cost[t] 20.0
@test unit.cost_segments[2].cost[t] 30.0 @test unit.cost_segments[2].cost[t] 30.0
@test unit.cost_segments[3].cost[t] 40.0 @test unit.cost_segments[3].cost[t] 40.0
end end
@test length(unit.startup_categories) == 3 @test length(unit.startup_categories) == 3
@test unit.startup_categories[1].delay == 1 @test unit.startup_categories[1].delay == 1
@@ -63,62 +63,48 @@ using UnitCommitment, LinearAlgebra, Cbc, JuMP, JSON, GZip
@test unit.startup_categories[1].cost == 1000.0 @test unit.startup_categories[1].cost == 1000.0
@test unit.startup_categories[2].cost == 1500.0 @test unit.startup_categories[2].cost == 1500.0
@test unit.startup_categories[3].cost == 2000.0 @test unit.startup_categories[3].cost == 2000.0
unit = instance.units[2] unit = instance.units[2]
@test unit.name == "g2" @test unit.name == "g2"
@test unit.must_run == [false for t in 1:4] @test unit.must_run == [false for t in 1:4]
unit = instance.units[3] unit = instance.units[3]
@test unit.name == "g3" @test unit.name == "g3"
@test unit.bus.name == "b3" @test unit.bus.name == "b3"
@test unit.ramp_up_limit == 70.0 @test unit.ramp_up_limit == 70.0
@test unit.ramp_down_limit == 70.0 @test unit.ramp_down_limit == 70.0
@test unit.startup_limit == 70.0 @test unit.startup_limit == 70.0
@test unit.shutdown_limit == 70.0 @test unit.shutdown_limit == 70.0
@test unit.must_run == [true for t in 1:4] @test unit.must_run == [true for t in 1:4]
@test unit.min_power_cost == [0.0 for t in 1:4] @test unit.min_power_cost == [0. for t in 1:4]
@test unit.min_uptime == 1 @test unit.min_uptime == 1
@test unit.min_downtime == 1 @test unit.min_downtime == 1
@test unit.provides_spinning_reserves == [true for t in 1:4] @test unit.provides_spinning_reserves == [true for t in 1:4]
for t in 1:4 for t in 1:4
@test unit.cost_segments[1].mw[t] 33 @test unit.cost_segments[1].mw[t] 33
@test unit.cost_segments[2].mw[t] 33 @test unit.cost_segments[2].mw[t] 33
@test unit.cost_segments[3].mw[t] 34 @test unit.cost_segments[3].mw[t] 34
@test unit.cost_segments[1].cost[t] 33.75 @test unit.cost_segments[1].cost[t] 33.75
@test unit.cost_segments[2].cost[t] 38.04 @test unit.cost_segments[2].cost[t] 38.04
@test unit.cost_segments[3].cost[t] 44.77853 @test unit.cost_segments[3].cost[t] 44.77853
end end
@test instance.reserves.spinning == zeros(4) @test instance.reserves.spinning == zeros(4)
@test instance.contingencies[1].lines == [instance.lines[1]] @test instance.contingencies[1].lines == [instance.lines[1]]
@test instance.contingencies[1].units == [] @test instance.contingencies[1].units == []
load = instance.price_sensitive_loads[1] load = instance.price_sensitive_loads[1]
@test load.name == "ps1" @test load.name == "ps1"
@test load.bus.name == "b3" @test load.bus.name == "b3"
@test load.revenue == [100.0 for t in 1:4] @test load.revenue == [100. for t in 1:4]
@test load.demand == [50.0 for t in 1:4] @test load.demand == [50. for t in 1:4]
end end
@testset "read sub-hourly" begin
instance = UnitCommitment.read_benchmark("test/case14-sub-hourly")
@test instance.time == 4
unit = instance.units[1]
@test unit.name == "g1"
@test unit.min_uptime == 2
@test unit.min_downtime == 2
@test length(unit.startup_categories) == 3
@test unit.startup_categories[1].delay == 2
@test unit.startup_categories[2].delay == 4
@test unit.startup_categories[3].delay == 6
@test unit.initial_status == -200
end
@testset "slice" begin @testset "slice" begin
instance = UnitCommitment.read_benchmark("test/case14") instance = UnitCommitment.read_benchmark("test/case14")
modified = UnitCommitment.slice(instance, 1:2) modified = UnitCommitment.slice(instance, 1:2)
# Should update all time-dependent fields # Should update all time-dependent fields
@test modified.time == 2 @test modified.time == 2
@test length(modified.power_balance_penalty) == 2 @test length(modified.power_balance_penalty) == 2
@@ -146,13 +132,11 @@ using UnitCommitment, LinearAlgebra, Cbc, JuMP, JSON, GZip
@test length(ps.demand) == 2 @test length(ps.demand) == 2
@test length(ps.revenue) == 2 @test length(ps.revenue) == 2
end end
# Should be able to build model without errors # Should be able to build model without errors
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0) optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
model = build_model( model = build_model(instance=modified,
instance = modified, optimizer=optimizer,
optimizer = optimizer, variable_names=true)
variable_names = true,
)
end end
end end

View File

@@ -2,38 +2,55 @@
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. # Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, LinearAlgebra, Cbc, JuMP using UnitCommitment, LinearAlgebra, JuMP
USE_GUROBI = (Base.find_package("Gurobi") != nothing)
USE_CBC = !USE_GUROBI
if USE_GUROBI
using Gurobi
else
using Cbc
end
NUM_THREADS = 4
LOG_LEVEL = 1
@testset "Model" begin @testset "Model" begin
@testset "Run" begin @testset "Run" begin
instance = UnitCommitment.read_benchmark("test/case14") instance = UnitCommitment.read_benchmark("test/case14")
#instance = UnitCommitment.read_benchmark("matpower/case3375wp/2017-02-01")
#instance = UnitCommitment.read_benchmark("matpower/case1888rte/2017-02-01")
for line in instance.lines, t in 1:4 for line in instance.lines, t in 1:4
line.normal_flow_limit[t] = 10.0 line.normal_flow_limit[t] = 10.0
end end
optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0) #for formulation in [UnitCommitment.DefaultFormulation, UnitCommitment.TightFormulation]
model = build_model( for formulation in [UnitCommitment.TightFormulation]
instance = instance, @info string("Running test of ", formulation)
optimizer = optimizer, if USE_CBC
variable_names = true, optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => LOG_LEVEL)
) end
@test name(model[:is_on]["g1", 1]) == "is_on[g1,1]" if USE_GUROBI
optimizer = optimizer_with_attributes(Gurobi.Optimizer, "Threads" => NUM_THREADS)
end
model = build_model(instance=instance,
optimizer=optimizer,
variable_names=true,
formulation=formulation)
# Optimize and retrieve solution JuMP.write_to_file(model.mip, "test.mps")
UnitCommitment.optimize!(model)
solution = UnitCommitment.solution(model)
# Write solution to a file # Optimize and retrieve solution
filename = tempname() UnitCommitment.optimize!(model)
UnitCommitment.write(filename, solution) solution = get_solution(model)
loaded = JSON.parsefile(filename)
@test length(loaded["Is on"]) == 6 # Verify solution
@test UnitCommitment.validate(instance, solution)
# Verify solution # Reoptimize with fixed solution
@test UnitCommitment.validate(instance, solution) UnitCommitment.fix!(model, solution)
UnitCommitment.optimize!(model)
@test UnitCommitment.validate(instance, solution)
# Reoptimize with fixed solution #@show solution
UnitCommitment.fix!(model, solution) end # loop over components
UnitCommitment.optimize!(model) end # end testset Run
@test UnitCommitment.validate(instance, solution) end # end test
end
end

View File

@@ -3,9 +3,6 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
using Test using Test
using UnitCommitment
UnitCommitment._setup_logger()
@testset "UnitCommitment" begin @testset "UnitCommitment" begin
include("instance_test.jl") include("instance_test.jl")

View File

@@ -3,110 +3,73 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
using UnitCommitment, Test, LinearAlgebra using UnitCommitment, Test, LinearAlgebra
import UnitCommitment: Violation, _offer, _query
@testset "Screening" begin @testset "Screening" begin
@testset "Violation filter" begin @testset "Violation filter" begin
instance = UnitCommitment.read_benchmark("test/case14") instance = UnitCommitment.read_benchmark("test/case14")
filter = UnitCommitment.ViolationFilter(max_per_line = 1, max_total = 2) filter = ViolationFilter(max_per_line=1, max_total=2)
_offer( offer(filter, Violation(time=1,
filter, monitored_line=instance.lines[1],
Violation( outage_line=nothing,
time = 1, amount=100.))
monitored_line = instance.lines[1],
outage_line = nothing, offer(filter, Violation(time=1,
amount = 100.0, monitored_line=instance.lines[1],
), outage_line=instance.lines[1],
) amount=300.))
_offer(
filter, offer(filter, Violation(time=1,
Violation( monitored_line=instance.lines[1],
time = 1, outage_line=instance.lines[5],
monitored_line = instance.lines[1], amount=500.))
outage_line = instance.lines[1],
amount = 300.0, offer(filter, Violation(time=1,
), monitored_line=instance.lines[1],
) outage_line=instance.lines[4],
_offer( amount=400.))
filter,
Violation( offer(filter, Violation(time=1,
time = 1, monitored_line=instance.lines[2],
monitored_line = instance.lines[1], outage_line=instance.lines[1],
outage_line = instance.lines[5], amount=200.))
amount = 500.0,
), offer(filter, Violation(time=1,
) monitored_line=instance.lines[2],
_offer( outage_line=instance.lines[8],
filter, amount=100.))
Violation(
time = 1,
monitored_line = instance.lines[1],
outage_line = instance.lines[4],
amount = 400.0,
),
)
_offer(
filter,
Violation(
time = 1,
monitored_line = instance.lines[2],
outage_line = instance.lines[1],
amount = 200.0,
),
)
_offer(
filter,
Violation(
time = 1,
monitored_line = instance.lines[2],
outage_line = instance.lines[8],
amount = 100.0,
),
)
actual = _query(filter) actual = query(filter)
expected = [ expected = [Violation(time=1,
Violation( monitored_line=instance.lines[2],
time = 1, outage_line=instance.lines[1],
monitored_line = instance.lines[2], amount=200.),
outage_line = instance.lines[1], Violation(time=1,
amount = 200.0, monitored_line=instance.lines[1],
), outage_line=instance.lines[5],
Violation( amount=500.)]
time = 1,
monitored_line = instance.lines[1],
outage_line = instance.lines[5],
amount = 500.0,
),
]
@test actual == expected @test actual == expected
end end
@testset "find_violations" begin @testset "find_violations" begin
instance = UnitCommitment.read_benchmark("test/case14") instance = UnitCommitment.read_benchmark("test/case14")
for line in instance.lines, t in 1:instance.time for line in instance.lines, t in 1:instance.time
line.normal_flow_limit[t] = 1.0 line.normal_flow_limit[t] = 1.0
line.emergency_flow_limit[t] = 1.0 line.emergency_flow_limit[t] = 1.0
end end
isf = UnitCommitment._injection_shift_factors( isf = UnitCommitment.injection_shift_factors(lines=instance.lines,
lines = instance.lines, buses=instance.buses)
buses = instance.buses, lodf = UnitCommitment.line_outage_factors(lines=instance.lines,
) buses=instance.buses,
lodf = UnitCommitment._line_outage_factors( isf=isf)
lines = instance.lines,
buses = instance.buses,
isf = isf,
)
inj = [1000.0 for b in 1:13, t in 1:instance.time] inj = [1000.0 for b in 1:13, t in 1:instance.time]
overflow = [0.0 for l in instance.lines, t in 1:instance.time] overflow = [0.0 for l in instance.lines, t in 1:instance.time]
violations = UnitCommitment._find_violations( violations = UnitCommitment.find_violations(instance=instance,
instance = instance, net_injections=inj,
net_injections = inj, overflow=overflow,
overflow = overflow, isf=isf,
isf = isf, lodf=lodf)
lodf = lodf,
)
@test length(violations) == 20 @test length(violations) == 20
end end
end end

View File

@@ -7,139 +7,109 @@ using UnitCommitment, Test, LinearAlgebra
@testset "Sensitivity" begin @testset "Sensitivity" begin
@testset "Susceptance matrix" begin @testset "Susceptance matrix" begin
instance = UnitCommitment.read_benchmark("test/case14") instance = UnitCommitment.read_benchmark("test/case14")
actual = UnitCommitment._susceptance_matrix(instance.lines) actual = UnitCommitment.susceptance_matrix(instance.lines)
@test size(actual) == (20, 20) @test size(actual) == (20, 20)
expected = Diagonal([ expected = Diagonal([29.5, 7.83, 8.82, 9.9, 10.04,
29.5, 10.2, 41.45, 8.35, 3.14, 6.93,
7.83, 8.77, 6.82, 13.4, 9.91, 15.87,
8.82, 20.65, 6.46, 9.09, 8.73, 5.02])
9.9, @test round.(actual, digits=2) == expected
10.04,
10.2,
41.45,
8.35,
3.14,
6.93,
8.77,
6.82,
13.4,
9.91,
15.87,
20.65,
6.46,
9.09,
8.73,
5.02,
])
@test round.(actual, digits = 2) == expected
end end
@testset "Reduced incidence matrix" begin @testset "Reduced incidence matrix" begin
instance = UnitCommitment.read_benchmark("test/case14") instance = UnitCommitment.read_benchmark("test/case14")
actual = UnitCommitment._reduced_incidence_matrix( actual = UnitCommitment.reduced_incidence_matrix(lines=instance.lines,
lines = instance.lines, buses=instance.buses)
buses = instance.buses,
)
@test size(actual) == (20, 13) @test size(actual) == (20, 13)
@test actual[1, 1] == -1.0 @test actual[1, 1] == -1.0
@test actual[3, 1] == 1.0 @test actual[3, 1] == 1.0
@test actual[4, 1] == 1.0 @test actual[4, 1] == 1.0
@test actual[5, 1] == 1.0 @test actual[5, 1] == 1.0
@test actual[3, 2] == -1.0 @test actual[3, 2] == -1.0
@test actual[6, 2] == 1.0 @test actual[6, 2] == 1.0
@test actual[4, 3] == -1.0 @test actual[4, 3] == -1.0
@test actual[6, 3] == -1.0 @test actual[6, 3] == -1.0
@test actual[7, 3] == 1.0 @test actual[7, 3] == 1.0
@test actual[8, 3] == 1.0 @test actual[8, 3] == 1.0
@test actual[9, 3] == 1.0 @test actual[9, 3] == 1.0
@test actual[2, 4] == -1.0 @test actual[2, 4] == -1.0
@test actual[5, 4] == -1.0 @test actual[5, 4] == -1.0
@test actual[7, 4] == -1.0 @test actual[7, 4] == -1.0
@test actual[10, 4] == 1.0 @test actual[10, 4] == 1.0
@test actual[10, 5] == -1.0 @test actual[10, 5] == -1.0
@test actual[11, 5] == 1.0 @test actual[11, 5] == 1.0
@test actual[12, 5] == 1.0 @test actual[12, 5] == 1.0
@test actual[13, 5] == 1.0 @test actual[13, 5] == 1.0
@test actual[8, 6] == -1.0 @test actual[8, 6] == -1.0
@test actual[14, 6] == 1.0 @test actual[14, 6] == 1.0
@test actual[15, 6] == 1.0 @test actual[15, 6] == 1.0
@test actual[14, 7] == -1.0 @test actual[14, 7] == -1.0
@test actual[9, 8] == -1.0 @test actual[9, 8] == -1.0
@test actual[15, 8] == -1.0 @test actual[15, 8] == -1.0
@test actual[16, 8] == 1.0 @test actual[16, 8] == 1.0
@test actual[17, 8] == 1.0 @test actual[17, 8] == 1.0
@test actual[16, 9] == -1.0 @test actual[16, 9] == -1.0
@test actual[18, 9] == 1.0 @test actual[18, 9] == 1.0
@test actual[11, 10] == -1.0 @test actual[11, 10] == -1.0
@test actual[18, 10] == -1.0 @test actual[18, 10] == -1.0
@test actual[12, 11] == -1.0 @test actual[12, 11] == -1.0
@test actual[19, 11] == 1.0 @test actual[19, 11] == 1.0
@test actual[13, 12] == -1.0 @test actual[13, 12] == -1.0
@test actual[19, 12] == -1.0 @test actual[19, 12] == -1.0
@test actual[20, 12] == 1.0 @test actual[20, 12] == 1.0
@test actual[17, 13] == -1.0 @test actual[17, 13] == -1.0
@test actual[20, 13] == -1.0 @test actual[20, 13] == -1.0
end end
@testset "Injection Shift Factors (ISF)" begin @testset "Injection Shift Factors (ISF)" begin
instance = UnitCommitment.read_benchmark("test/case14") instance = UnitCommitment.read_benchmark("test/case14")
actual = UnitCommitment._injection_shift_factors( actual = UnitCommitment.injection_shift_factors(lines=instance.lines,
lines = instance.lines, buses=instance.buses)
buses = instance.buses,
)
@test size(actual) == (20, 13) @test size(actual) == (20, 13)
@test round.(actual, digits = 2) == [ @test round.(actual, digits=2) == [
-0.84 -0.75 -0.67 -0.61 -0.63 -0.66 -0.66 -0.65 -0.65 -0.64 -0.63 -0.63 -0.64 -0.84 -0.75 -0.67 -0.61 -0.63 -0.66 -0.66 -0.65 -0.65 -0.64 -0.63 -0.63 -0.64;
-0.16 -0.25 -0.33 -0.39 -0.37 -0.34 -0.34 -0.35 -0.35 -0.36 -0.37 -0.37 -0.36 -0.16 -0.25 -0.33 -0.39 -0.37 -0.34 -0.34 -0.35 -0.35 -0.36 -0.37 -0.37 -0.36;
0.03 -0.53 -0.15 -0.1 -0.12 -0.14 -0.14 -0.14 -0.13 -0.13 -0.12 -0.12 -0.13 0.03 -0.53 -0.15 -0.1 -0.12 -0.14 -0.14 -0.14 -0.13 -0.13 -0.12 -0.12 -0.13;
0.06 -0.14 -0.32 -0.22 -0.25 -0.3 -0.3 -0.29 -0.28 -0.27 -0.25 -0.26 -0.27 0.06 -0.14 -0.32 -0.22 -0.25 -0.3 -0.3 -0.29 -0.28 -0.27 -0.25 -0.26 -0.27;
0.08 -0.07 -0.2 -0.29 -0.26 -0.22 -0.22 -0.22 -0.23 -0.25 -0.26 -0.26 -0.24 0.08 -0.07 -0.2 -0.29 -0.26 -0.22 -0.22 -0.22 -0.23 -0.25 -0.26 -0.26 -0.24;
0.03 0.47 -0.15 -0.1 -0.12 -0.14 -0.14 -0.14 -0.13 -0.13 -0.12 -0.12 -0.13 0.03 0.47 -0.15 -0.1 -0.12 -0.14 -0.14 -0.14 -0.13 -0.13 -0.12 -0.12 -0.13;
0.08 0.31 0.5 -0.3 -0.03 0.36 0.36 0.28 0.23 0.1 -0.0 0.02 0.17 0.08 0.31 0.5 -0.3 -0.03 0.36 0.36 0.28 0.23 0.1 -0.0 0.02 0.17;
0.0 0.01 0.02 -0.01 -0.22 -0.63 -0.63 -0.45 -0.41 -0.32 -0.24 -0.25 -0.36 0.0 0.01 0.02 -0.01 -0.22 -0.63 -0.63 -0.45 -0.41 -0.32 -0.24 -0.25 -0.36;
0.0 0.01 0.01 -0.01 -0.12 -0.17 -0.17 -0.26 -0.24 -0.18 -0.14 -0.14 -0.21 0.0 0.01 0.01 -0.01 -0.12 -0.17 -0.17 -0.26 -0.24 -0.18 -0.14 -0.14 -0.21;
-0.0 -0.02 -0.03 0.02 -0.66 -0.2 -0.2 -0.29 -0.36 -0.5 -0.63 -0.61 -0.43 -0.0 -0.02 -0.03 0.02 -0.66 -0.2 -0.2 -0.29 -0.36 -0.5 -0.63 -0.61 -0.43;
-0.0 -0.01 -0.02 0.01 0.21 -0.12 -0.12 -0.17 -0.28 -0.53 0.18 0.15 -0.03 -0.0 -0.01 -0.02 0.01 0.21 -0.12 -0.12 -0.17 -0.28 -0.53 0.18 0.15 -0.03;
-0.0 -0.0 -0.0 0.0 0.03 -0.02 -0.02 -0.03 -0.02 0.01 -0.52 -0.17 -0.09 -0.0 -0.0 -0.0 0.0 0.03 -0.02 -0.02 -0.03 -0.02 0.01 -0.52 -0.17 -0.09;
-0.0 -0.01 -0.01 0.01 0.11 -0.06 -0.06 -0.09 -0.05 0.02 -0.28 -0.59 -0.31 -0.0 -0.01 -0.01 0.01 0.11 -0.06 -0.06 -0.09 -0.05 0.02 -0.28 -0.59 -0.31;
-0.0 -0.0 -0.0 -0.0 -0.0 -0.0 -1.0 -0.0 -0.0 -0.0 -0.0 -0.0 0.0 -0.0 -0.0 -0.0 -0.0 -0.0 -0.0 -1.0 -0.0 -0.0 -0.0 -0.0 -0.0 0.0 ;
0.0 0.01 0.02 -0.01 -0.22 0.37 0.37 -0.45 -0.41 -0.32 -0.24 -0.25 -0.36 0.0 0.01 0.02 -0.01 -0.22 0.37 0.37 -0.45 -0.41 -0.32 -0.24 -0.25 -0.36;
0.0 0.01 0.02 -0.01 -0.21 0.12 0.12 0.17 -0.72 -0.47 -0.18 -0.15 0.03 0.0 0.01 0.02 -0.01 -0.21 0.12 0.12 0.17 -0.72 -0.47 -0.18 -0.15 0.03;
0.0 0.01 0.01 -0.01 -0.14 0.08 0.08 0.12 0.07 -0.03 -0.2 -0.24 -0.6 0.0 0.01 0.01 -0.01 -0.14 0.08 0.08 0.12 0.07 -0.03 -0.2 -0.24 -0.6 ;
0.0 0.01 0.02 -0.01 -0.21 0.12 0.12 0.17 0.28 -0.47 -0.18 -0.15 0.03 0.0 0.01 0.02 -0.01 -0.21 0.12 0.12 0.17 0.28 -0.47 -0.18 -0.15 0.03;
-0.0 -0.0 -0.0 0.0 0.03 -0.02 -0.02 -0.03 -0.02 0.01 0.48 -0.17 -0.09 -0.0 -0.0 -0.0 0.0 0.03 -0.02 -0.02 -0.03 -0.02 0.01 0.48 -0.17 -0.09;
-0.0 -0.01 -0.01 0.01 0.14 -0.08 -0.08 -0.12 -0.07 0.03 0.2 0.24 -0.4 -0.0 -0.01 -0.01 0.01 0.14 -0.08 -0.08 -0.12 -0.07 0.03 0.2 0.24 -0.4 ]
]
end end
@testset "Line Outage Distribution Factors (LODF)" begin @testset "Line Outage Distribution Factors (LODF)" begin
instance = UnitCommitment.read_benchmark("test/case14") instance = UnitCommitment.read_benchmark("test/case14")
isf_before = UnitCommitment._injection_shift_factors( isf_before = UnitCommitment.injection_shift_factors(lines=instance.lines,
lines = instance.lines, buses=instance.buses)
buses = instance.buses, lodf = UnitCommitment.line_outage_factors(lines=instance.lines,
) buses=instance.buses,
lodf = UnitCommitment._line_outage_factors( isf=isf_before)
lines = instance.lines,
buses = instance.buses,
isf = isf_before,
)
for contingency in instance.contingencies for contingency in instance.contingencies
for lc in contingency.lines for lc in contingency.lines
prev_susceptance = lc.susceptance prev_susceptance = lc.susceptance
lc.susceptance = 0.0 lc.susceptance = 0.0
isf_after = UnitCommitment._injection_shift_factors( isf_after = UnitCommitment.injection_shift_factors(lines=instance.lines,
lines = instance.lines, buses=instance.buses)
buses = instance.buses,
)
lc.susceptance = prev_susceptance lc.susceptance = prev_susceptance
for lm in instance.lines for lm in instance.lines
expected = isf_after[lm.offset, :] expected = isf_after[lm.offset, :]
actual = actual = isf_before[lm.offset, :] +
isf_before[lm.offset, :] + lodf[lm.offset, lc.offset] * isf_before[lc.offset, :]
lodf[lm.offset, lc.offset] * isf_before[lc.offset, :]
@test norm(expected - actual) < 1e-6 @test norm(expected - actual) < 1e-6
end end
end end
end end
end end
end end

View File

@@ -4,40 +4,36 @@
using UnitCommitment, JSON, GZip, DataStructures using UnitCommitment, JSON, GZip, DataStructures
function parse_case14() parse_case14() = JSON.parse(GZip.gzopen("../instances/test/case14.json.gz"),
return JSON.parse( dicttype=()->DefaultOrderedDict(nothing))
GZip.gzopen("../instances/test/case14.json.gz"),
dicttype = () -> DefaultOrderedDict(nothing),
)
end
@testset "Validation" begin @testset "Validation" begin
@testset "repair!" begin @testset "fix!" begin
@testset "Cost curve should be convex" begin @testset "Cost curve should be convex" begin
json = parse_case14() json = parse_case14()
json["Generators"]["g1"]["Production cost curve (MW)"] = json["Generators"]["g1"]["Production cost curve (MW)"] = [100, 150, 200]
[100, 150, 200] json["Generators"]["g1"]["Production cost curve (\$)"] = [10, 25, 30]
json["Generators"]["g1"]["Production cost curve (\$)"] = instance = UnitCommitment.from_json(json, fix=false)
[10, 25, 30] @test UnitCommitment.fix!(instance) == 4
instance = UnitCommitment._from_json(json, repair = false)
@test UnitCommitment.repair!(instance) == 4
end end
@testset "Startup limit must be greater than Pmin" begin @testset "Startup limit must be greater than Pmin" begin
json = parse_case14() json = parse_case14()
json["Generators"]["g1"]["Production cost curve (MW)"] = [100, 150] json["Generators"]["g1"]["Production cost curve (MW)"] = [100, 150]
json["Generators"]["g1"]["Production cost curve (\$)"] = [100, 150] json["Generators"]["g1"]["Production cost curve (\$)"] = [100, 150]
json["Generators"]["g1"]["Startup limit (MW)"] = 80 json["Generators"]["g1"]["Startup limit (MW)"] = 80
instance = UnitCommitment._from_json(json, repair = false) instance = UnitCommitment.from_json(json, fix=false)
@test UnitCommitment.repair!(instance) == 1 @test UnitCommitment.fix!(instance) == 1
end end
@testset "Startup costs and delays must be increasing" begin @testset "Startup costs and delays must be increasing" begin
json = parse_case14() json = parse_case14()
json["Generators"]["g1"]["Startup costs (\$)"] = [300, 200, 100] json["Generators"]["g1"]["Startup costs (\$)"] = [300, 200, 100]
json["Generators"]["g1"]["Startup delays (h)"] = [8, 4, 2] json["Generators"]["g1"]["Startup delays (h)"] = [8, 4, 2]
instance = UnitCommitment._from_json(json, repair = false) instance = UnitCommitment.from_json(json, fix=false)
@test UnitCommitment.repair!(instance) == 4 @test UnitCommitment.fix!(instance) == 4
end end
end end
end end