Compare commits
54 Commits
feature/re
...
feature/st
| Author | SHA1 | Date | |
|---|---|---|---|
|
8231f9da32
|
|||
|
48ccf0d180
|
|||
|
7f475a0914
|
|||
|
4b0fc7327c
|
|||
|
dde0d40282
|
|||
|
74606897cd
|
|||
|
07ca3abb4f
|
|||
|
da158eb961
|
|||
| e7eec937cb | |||
|
19bec961bd
|
|||
|
8f52c04702
|
|||
|
19a34fb5d2
|
|||
| e915a57e58 | |||
| 57b7d09c08 | |||
| a03b9169fd | |||
| ee58af73f0 | |||
| 92d30460b9 | |||
| 9ebb2e49f9 | |||
| 505e3a8e1e | |||
| d4fa75297f | |||
| 881957d6b5 | |||
| 86cf7f5bd9 | |||
| a8c7047e2d | |||
| 099e0fae3a | |||
| 1b8f392852 | |||
| 7a95aa66f6 | |||
| 40d28c727a | |||
| a9ac164833 | |||
| e244ded51d | |||
| 7180651cfa | |||
| 0c9465411f | |||
| 658d5ddbdc | |||
| 399db41f86 | |||
| e407a53ecf | |||
| 33ab4c5f76 | |||
| c9391dd299 | |||
| 6c70d9acd5 | |||
| 339255bf9b | |||
| ca187fe78e | |||
| c256cd8b75 | |||
| 05d48e2cbf | |||
| 9446b1921d | |||
| 1b0cc141bb | |||
| a333ab0b04 | |||
| 630ae49d4a | |||
| 9df416ed75 | |||
| 849f902562 | |||
| 1990563476 | |||
| 7e783c8b91 | |||
| 93cc6fbf32 | |||
| a7938b7260 | |||
| 56ef1f7bc2 | |||
| b00b24ffbc | |||
| 823db2838b |
27
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Lint
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@latest
|
||||
with:
|
||||
version: '1'
|
||||
- uses: actions/checkout@v1
|
||||
- name: Format check
|
||||
shell: julia --color=yes {0}
|
||||
run: |
|
||||
using Pkg
|
||||
Pkg.add(PackageSpec(name="JuliaFormatter", version="0.14.4"))
|
||||
using JuliaFormatter
|
||||
format("src", verbose=true)
|
||||
format("test", verbose=true)
|
||||
out = String(read(Cmd(`git diff`)))
|
||||
if isempty(out)
|
||||
exit(0)
|
||||
end
|
||||
@error "Some files have not been formatted !!!"
|
||||
write(stdout, out)
|
||||
exit(1)
|
||||
15
.github/workflows/tagbot.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: TagBot
|
||||
on:
|
||||
issue_comment:
|
||||
types:
|
||||
- created
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
TagBot:
|
||||
if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: JuliaRegistries/TagBot@v1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
ssh: ${{ secrets.DOCUMENTER_KEY }}
|
||||
8
.github/workflows/test.yml
vendored
@@ -1,14 +1,16 @@
|
||||
name: Build & Test
|
||||
on:
|
||||
- push
|
||||
- pull_request
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '45 10 * * *'
|
||||
jobs:
|
||||
test:
|
||||
name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
version: ['1.3', '1.4', '1.5', 'nightly']
|
||||
version: ['1.6', '1.7', '1.8']
|
||||
os:
|
||||
- ubuntu-latest
|
||||
arch:
|
||||
|
||||
5
.gitignore
vendored
@@ -8,3 +8,8 @@ instances/*.py
|
||||
notebooks
|
||||
.idea
|
||||
*.lp
|
||||
Manifest.toml
|
||||
data
|
||||
build
|
||||
benchmark
|
||||
**/*.log
|
||||
|
||||
52
CHANGELOG.md
@@ -1,28 +1,58 @@
|
||||
# Version 0.5.0 (TBD)
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
- The format is based on [Keep a Changelog][changelog].
|
||||
- This project adheres to [Semantic Versioning][semver].
|
||||
- For versions before 1.0, we follow the [Pkg.jl convention][pkjjl]
|
||||
that `0.a.b` is compatible with `0.a.c`.
|
||||
|
||||
[changelog]: https://keepachangelog.com/en/1.0.0/
|
||||
[semver]: https://semver.org/spec/v2.0.0.html
|
||||
[pkjjl]: https://pkgdocs.julialang.org/v1/compatibility/#compat-pre-1.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
- Allow product disposal at collection centers
|
||||
- Implement stochastic optimization
|
||||
|
||||
## [0.5.2] -- 2022-08-26
|
||||
### Changed
|
||||
- Update to JuMP 1.x
|
||||
|
||||
## [0.5.1] -- 2021-07-23
|
||||
### Added
|
||||
- Allow user to specify locations as unique identifiers, instead of latitude and longitude (e.g. `us-state:IL` or `2018-us-county:17043`)
|
||||
- Add what-if scenarios.
|
||||
- Add products report.
|
||||
|
||||
## [0.5.0] -- 2021-01-06
|
||||
### Added
|
||||
- Allow plants to store input material for processing in later years
|
||||
|
||||
# Version 0.4.0 (Sep 18, 2020)
|
||||
|
||||
## [0.4.0] -- 2020-09-18
|
||||
### Added
|
||||
- Generate simplified solution reports (CSV)
|
||||
|
||||
# Version 0.3.3 (Aug 13, 2020)
|
||||
|
||||
## [0.3.3] -- 2020-10-13
|
||||
### Added
|
||||
- Add option to write solution to JSON file in RELOG.solve
|
||||
- Improve error message when instance is infeasible
|
||||
- Make output file more readable
|
||||
|
||||
# Version 0.3.2 (Aug 7, 2020)
|
||||
|
||||
## [0.3.2] -- 2020-10-07
|
||||
### Added
|
||||
- Add "building period" parameter
|
||||
|
||||
# Version 0.3.1 (July 17, 2020)
|
||||
|
||||
## [0.3.1] -- 2020-07-17
|
||||
### Fixed
|
||||
- Fix expansion cost breakdown
|
||||
|
||||
# Version 0.3.0 (June 25, 2020)
|
||||
|
||||
## [0.3.0] -- 2020-06-25
|
||||
### Added
|
||||
- Track emissions and energy (transportation and plants)
|
||||
|
||||
### Changed
|
||||
- Minor changes to input file format:
|
||||
- Make all dictionary keys lowercase
|
||||
- Rename "outputs (tonne)" to "outputs (tonne/tonne)"
|
||||
|
||||
28
Makefile
@@ -1,25 +1,19 @@
|
||||
JULIA := julia --color=yes --project=@.
|
||||
SRC_FILES := $(wildcard src/*.jl test/*.jl)
|
||||
VERSION := 0.5
|
||||
|
||||
all: docs test
|
||||
|
||||
build/sysimage.so: src/sysimage.jl Project.toml Manifest.toml
|
||||
mkdir -p build
|
||||
$(JULIA) src/sysimage.jl
|
||||
|
||||
build/test.log: $(SRC_FILES) build/sysimage.so
|
||||
cd test; $(JULIA) --sysimage ../build/sysimage.so runtests.jl
|
||||
|
||||
clean:
|
||||
rm -rf build/*
|
||||
rm -rfv build Manifest.toml test/Manifest.toml deps/formatter/build deps/formatter/Manifest.toml
|
||||
|
||||
docs:
|
||||
mkdocs build -d ../docs/$(VERSION)/
|
||||
cd docs; julia --project=. make.jl; cd ..
|
||||
rsync -avP --delete-after docs/build/ ../docs/$(VERSION)/
|
||||
|
||||
test: build/test.log
|
||||
format:
|
||||
cd deps/formatter; ../../juliaw format.jl
|
||||
|
||||
test-watch:
|
||||
bash -c "while true; do make test --quiet; sleep 1; done"
|
||||
test: test/Manifest.toml
|
||||
./juliaw test/runtests.jl
|
||||
|
||||
.PHONY: docs test
|
||||
test/Manifest.toml: test/Project.toml
|
||||
julia --project=test -e "using Pkg; Pkg.instantiate()"
|
||||
|
||||
.PHONY: docs test format
|
||||
|
||||
441
Manifest.toml
@@ -1,441 +0,0 @@
|
||||
# This file is machine-generated - editing it directly is not advised
|
||||
|
||||
[[Base64]]
|
||||
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
|
||||
|
||||
[[BenchmarkTools]]
|
||||
deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"]
|
||||
git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260"
|
||||
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
|
||||
version = "0.5.0"
|
||||
|
||||
[[BinaryProvider]]
|
||||
deps = ["Libdl", "Logging", "SHA"]
|
||||
git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058"
|
||||
uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
|
||||
version = "0.5.10"
|
||||
|
||||
[[Bzip2_jll]]
|
||||
deps = ["Libdl", "Pkg"]
|
||||
git-tree-sha1 = "03a44490020826950c68005cafb336e5ba08b7e8"
|
||||
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
|
||||
version = "1.0.6+4"
|
||||
|
||||
[[CEnum]]
|
||||
git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9"
|
||||
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
|
||||
version = "0.4.1"
|
||||
|
||||
[[CSV]]
|
||||
deps = ["CategoricalArrays", "DataFrames", "Dates", "Mmap", "Parsers", "PooledArrays", "SentinelArrays", "Tables", "Unicode"]
|
||||
git-tree-sha1 = "a390152e6850405a48ca51bd7ca33d11a21d6230"
|
||||
uuid = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
|
||||
version = "0.7.7"
|
||||
|
||||
[[Calculus]]
|
||||
deps = ["LinearAlgebra"]
|
||||
git-tree-sha1 = "f641eb0a4f00c343bbc32346e1217b86f3ce9dad"
|
||||
uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
|
||||
version = "0.5.1"
|
||||
|
||||
[[CategoricalArrays]]
|
||||
deps = ["DataAPI", "Future", "JSON", "Missings", "Printf", "Statistics", "StructTypes", "Unicode"]
|
||||
git-tree-sha1 = "2ac27f59196a68070e132b25713f9a5bbc5fa0d2"
|
||||
uuid = "324d7699-5711-5eae-9e2f-1d82baa6b597"
|
||||
version = "0.8.3"
|
||||
|
||||
[[Cbc]]
|
||||
deps = ["BinaryProvider", "Libdl", "MathOptInterface", "MathProgBase", "SparseArrays", "Test"]
|
||||
git-tree-sha1 = "62d80f448b5d77b3f0a59cecf6197aad2a3aa280"
|
||||
uuid = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
|
||||
version = "0.6.7"
|
||||
|
||||
[[Clp]]
|
||||
deps = ["BinaryProvider", "CEnum", "Clp_jll", "Libdl", "MathOptInterface", "SparseArrays"]
|
||||
git-tree-sha1 = "581763750759c1e38df2a35a0b3bdee496a062c7"
|
||||
uuid = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
|
||||
version = "0.8.1"
|
||||
|
||||
[[Clp_jll]]
|
||||
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
|
||||
git-tree-sha1 = "79263d9383ca89b35f31c33ab5b880536a8413a4"
|
||||
uuid = "06985876-5285-5a41-9fcb-8948a742cc53"
|
||||
version = "1.17.6+6"
|
||||
|
||||
[[CodecBzip2]]
|
||||
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
|
||||
git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7"
|
||||
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
|
||||
version = "0.7.2"
|
||||
|
||||
[[CodecZlib]]
|
||||
deps = ["TranscodingStreams", "Zlib_jll"]
|
||||
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
|
||||
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
|
||||
version = "0.7.0"
|
||||
|
||||
[[CoinUtils_jll]]
|
||||
deps = ["CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
|
||||
git-tree-sha1 = "ee1f06ab89337b7f194c29377ab174e752cdf60d"
|
||||
uuid = "be027038-0da8-5614-b30d-e42594cb92df"
|
||||
version = "2.11.3+3"
|
||||
|
||||
[[CommonSubexpressions]]
|
||||
deps = ["MacroTools", "Test"]
|
||||
git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7"
|
||||
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
|
||||
version = "0.3.0"
|
||||
|
||||
[[Compat]]
|
||||
deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"]
|
||||
git-tree-sha1 = "7c7f4cda0d58ec999189d70f5ee500348c4b4df1"
|
||||
uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
|
||||
version = "3.16.0"
|
||||
|
||||
[[CompilerSupportLibraries_jll]]
|
||||
deps = ["Libdl", "Pkg"]
|
||||
git-tree-sha1 = "7c4f882c41faa72118841185afc58a2eb00ef612"
|
||||
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
|
||||
version = "0.3.3+0"
|
||||
|
||||
[[CoordinateTransformations]]
|
||||
deps = ["LinearAlgebra", "StaticArrays"]
|
||||
git-tree-sha1 = "c230b1d94db9fdd073168830437e64b9db627fcb"
|
||||
uuid = "150eb455-5306-5404-9cee-2592286d6298"
|
||||
version = "0.6.0"
|
||||
|
||||
[[DataAPI]]
|
||||
git-tree-sha1 = "176e23402d80e7743fc26c19c681bfb11246af32"
|
||||
uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a"
|
||||
version = "1.3.0"
|
||||
|
||||
[[DataFrames]]
|
||||
deps = ["CategoricalArrays", "Compat", "DataAPI", "Future", "InvertedIndices", "IteratorInterfaceExtensions", "Missings", "PooledArrays", "Printf", "REPL", "Reexport", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"]
|
||||
git-tree-sha1 = "a7c1c9a6e47a92321bbc9d500dab9b04cc4a6a39"
|
||||
uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
|
||||
version = "0.21.7"
|
||||
|
||||
[[DataStructures]]
|
||||
deps = ["InteractiveUtils", "OrderedCollections"]
|
||||
git-tree-sha1 = "88d48e133e6d3dd68183309877eac74393daa7eb"
|
||||
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
|
||||
version = "0.17.20"
|
||||
|
||||
[[DataValueInterfaces]]
|
||||
git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6"
|
||||
uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464"
|
||||
version = "1.0.0"
|
||||
|
||||
[[Dates]]
|
||||
deps = ["Printf"]
|
||||
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
|
||||
|
||||
[[DelimitedFiles]]
|
||||
deps = ["Mmap"]
|
||||
uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
|
||||
|
||||
[[DiffResults]]
|
||||
deps = ["StaticArrays"]
|
||||
git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc"
|
||||
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
|
||||
version = "1.0.2"
|
||||
|
||||
[[DiffRules]]
|
||||
deps = ["NaNMath", "Random", "SpecialFunctions"]
|
||||
git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1"
|
||||
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
|
||||
version = "1.0.1"
|
||||
|
||||
[[Distributed]]
|
||||
deps = ["Random", "Serialization", "Sockets"]
|
||||
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
|
||||
|
||||
[[ForwardDiff]]
|
||||
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"]
|
||||
git-tree-sha1 = "1d090099fb82223abc48f7ce176d3f7696ede36d"
|
||||
uuid = "f6369f11-7733-5829-9624-2563aa707210"
|
||||
version = "0.10.12"
|
||||
|
||||
[[Future]]
|
||||
deps = ["Random"]
|
||||
uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820"
|
||||
|
||||
[[GZip]]
|
||||
deps = ["Libdl"]
|
||||
git-tree-sha1 = "039be665faf0b8ae36e089cd694233f5dee3f7d6"
|
||||
uuid = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
|
||||
version = "0.5.1"
|
||||
|
||||
[[Geodesy]]
|
||||
deps = ["CoordinateTransformations", "Dates", "LinearAlgebra", "StaticArrays", "Test"]
|
||||
git-tree-sha1 = "f80ea86cb88db337a1906e245e495592f0b5cc25"
|
||||
uuid = "0ef565a4-170c-5f04-8de2-149903a85f3d"
|
||||
version = "0.5.0"
|
||||
|
||||
[[HTTP]]
|
||||
deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"]
|
||||
git-tree-sha1 = "c7ec02c4c6a039a98a15f955462cd7aea5df4508"
|
||||
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
|
||||
version = "0.8.19"
|
||||
|
||||
[[IniFile]]
|
||||
deps = ["Test"]
|
||||
git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8"
|
||||
uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f"
|
||||
version = "0.5.0"
|
||||
|
||||
[[InteractiveUtils]]
|
||||
deps = ["Markdown"]
|
||||
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
|
||||
|
||||
[[InvertedIndices]]
|
||||
deps = ["Test"]
|
||||
git-tree-sha1 = "15732c475062348b0165684ffe28e85ea8396afc"
|
||||
uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f"
|
||||
version = "1.0.0"
|
||||
|
||||
[[IteratorInterfaceExtensions]]
|
||||
git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856"
|
||||
uuid = "82899510-4779-5014-852e-03e436cf321d"
|
||||
version = "1.0.0"
|
||||
|
||||
[[JSON]]
|
||||
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
|
||||
git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4"
|
||||
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
|
||||
version = "0.21.1"
|
||||
|
||||
[[JSONSchema]]
|
||||
deps = ["BinaryProvider", "HTTP", "JSON"]
|
||||
git-tree-sha1 = "b0a7f9328967df5213691d318a03cf70ea8c76b1"
|
||||
uuid = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
|
||||
version = "0.2.0"
|
||||
|
||||
[[JuMP]]
|
||||
deps = ["Calculus", "DataStructures", "ForwardDiff", "LinearAlgebra", "MathOptInterface", "MutableArithmetics", "NaNMath", "Random", "SparseArrays", "Statistics"]
|
||||
git-tree-sha1 = "cbab42e2e912109d27046aa88f02a283a9abac7c"
|
||||
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
|
||||
version = "0.21.3"
|
||||
|
||||
[[LibGit2]]
|
||||
deps = ["Printf"]
|
||||
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
|
||||
|
||||
[[Libdl]]
|
||||
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
|
||||
|
||||
[[LinearAlgebra]]
|
||||
deps = ["Libdl"]
|
||||
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
|
||||
|
||||
[[Logging]]
|
||||
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
|
||||
|
||||
[[MacroTools]]
|
||||
deps = ["Markdown", "Random"]
|
||||
git-tree-sha1 = "f7d2e3f654af75f01ec49be82c231c382214223a"
|
||||
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
|
||||
version = "0.5.5"
|
||||
|
||||
[[Markdown]]
|
||||
deps = ["Base64"]
|
||||
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
|
||||
|
||||
[[MathOptInterface]]
|
||||
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "JSON", "JSONSchema", "LinearAlgebra", "MutableArithmetics", "OrderedCollections", "SparseArrays", "Test", "Unicode"]
|
||||
git-tree-sha1 = "27f2ef85879b8f1d144266ab44f076ba0dfbd8a1"
|
||||
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
|
||||
version = "0.9.13"
|
||||
|
||||
[[MathProgBase]]
|
||||
deps = ["LinearAlgebra", "SparseArrays"]
|
||||
git-tree-sha1 = "9abbe463a1e9fc507f12a69e7f29346c2cdc472c"
|
||||
uuid = "fdba3010-5040-5b88-9595-932c9decdf73"
|
||||
version = "0.7.8"
|
||||
|
||||
[[MbedTLS]]
|
||||
deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"]
|
||||
git-tree-sha1 = "426a6978b03a97ceb7ead77775a1da066343ec6e"
|
||||
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
|
||||
version = "1.0.2"
|
||||
|
||||
[[MbedTLS_jll]]
|
||||
deps = ["Libdl", "Pkg"]
|
||||
git-tree-sha1 = "c0b1286883cac4e2b617539de41111e0776d02e8"
|
||||
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
|
||||
version = "2.16.8+0"
|
||||
|
||||
[[Missings]]
|
||||
deps = ["DataAPI"]
|
||||
git-tree-sha1 = "ed61674a0864832495ffe0a7e889c0da76b0f4c8"
|
||||
uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28"
|
||||
version = "0.4.4"
|
||||
|
||||
[[Mmap]]
|
||||
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
|
||||
|
||||
[[MutableArithmetics]]
|
||||
deps = ["LinearAlgebra", "SparseArrays", "Test"]
|
||||
git-tree-sha1 = "6cf09794783b9de2e662c4e8b60d743021e338d0"
|
||||
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
|
||||
version = "0.2.10"
|
||||
|
||||
[[NaNMath]]
|
||||
git-tree-sha1 = "c84c576296d0e2fbb3fc134d3e09086b3ea617cd"
|
||||
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
|
||||
version = "0.3.4"
|
||||
|
||||
[[OpenBLAS32_jll]]
|
||||
deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"]
|
||||
git-tree-sha1 = "793b33911239d2651c356c823492b58d6490d36a"
|
||||
uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2"
|
||||
version = "0.3.9+4"
|
||||
|
||||
[[OpenSpecFun_jll]]
|
||||
deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"]
|
||||
git-tree-sha1 = "d51c416559217d974a1113522d5919235ae67a87"
|
||||
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
|
||||
version = "0.5.3+3"
|
||||
|
||||
[[OrderedCollections]]
|
||||
git-tree-sha1 = "16c08bf5dba06609fe45e30860092d6fa41fde7b"
|
||||
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
|
||||
version = "1.3.1"
|
||||
|
||||
[[Osi_jll]]
|
||||
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
|
||||
git-tree-sha1 = "bd436a97280df40938e66ae8d18e57aceb072856"
|
||||
uuid = "7da25872-d9ce-5375-a4d3-7a845f58efdd"
|
||||
version = "0.108.5+3"
|
||||
|
||||
[[PackageCompiler]]
|
||||
deps = ["Libdl", "Pkg", "UUIDs"]
|
||||
git-tree-sha1 = "98aa9c653e1dc3473bb5050caf8501293db9eee1"
|
||||
uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
|
||||
version = "1.2.1"
|
||||
|
||||
[[Parsers]]
|
||||
deps = ["Dates", "Test"]
|
||||
git-tree-sha1 = "8077624b3c450b15c087944363606a6ba12f925e"
|
||||
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
|
||||
version = "1.0.10"
|
||||
|
||||
[[Pkg]]
|
||||
deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"]
|
||||
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
|
||||
|
||||
[[PooledArrays]]
|
||||
deps = ["DataAPI"]
|
||||
git-tree-sha1 = "b1333d4eced1826e15adbdf01a4ecaccca9d353c"
|
||||
uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
|
||||
version = "0.5.3"
|
||||
|
||||
[[Printf]]
|
||||
deps = ["Unicode"]
|
||||
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
|
||||
|
||||
[[ProgressBars]]
|
||||
deps = ["Printf"]
|
||||
git-tree-sha1 = "e66732bbdaad368cfc642cef1f639df5812dc818"
|
||||
uuid = "49802e3a-d2f1-5c88-81d8-b72133a6f568"
|
||||
version = "0.6.0"
|
||||
|
||||
[[REPL]]
|
||||
deps = ["InteractiveUtils", "Markdown", "Sockets"]
|
||||
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
|
||||
|
||||
[[Random]]
|
||||
deps = ["Serialization"]
|
||||
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
|
||||
|
||||
[[Reexport]]
|
||||
deps = ["Pkg"]
|
||||
git-tree-sha1 = "7b1d07f411bc8ddb7977ec7f377b97b158514fe0"
|
||||
uuid = "189a3867-3050-52da-a836-e630ba90ab69"
|
||||
version = "0.2.0"
|
||||
|
||||
[[SHA]]
|
||||
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
|
||||
|
||||
[[SentinelArrays]]
|
||||
deps = ["Dates", "Random"]
|
||||
git-tree-sha1 = "7a74946ace3b34fbb6c10e61b6e250b33d7e758c"
|
||||
uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c"
|
||||
version = "1.2.15"
|
||||
|
||||
[[Serialization]]
|
||||
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
|
||||
|
||||
[[SharedArrays]]
|
||||
deps = ["Distributed", "Mmap", "Random", "Serialization"]
|
||||
uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
|
||||
|
||||
[[Sockets]]
|
||||
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
|
||||
|
||||
[[SortingAlgorithms]]
|
||||
deps = ["DataStructures", "Random", "Test"]
|
||||
git-tree-sha1 = "03f5898c9959f8115e30bc7226ada7d0df554ddd"
|
||||
uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c"
|
||||
version = "0.3.1"
|
||||
|
||||
[[SparseArrays]]
|
||||
deps = ["LinearAlgebra", "Random"]
|
||||
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
|
||||
|
||||
[[SpecialFunctions]]
|
||||
deps = ["OpenSpecFun_jll"]
|
||||
git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020"
|
||||
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
|
||||
version = "0.10.3"
|
||||
|
||||
[[StaticArrays]]
|
||||
deps = ["LinearAlgebra", "Random", "Statistics"]
|
||||
git-tree-sha1 = "016d1e1a00fabc556473b07161da3d39726ded35"
|
||||
uuid = "90137ffa-7385-5640-81b9-e52037218182"
|
||||
version = "0.12.4"
|
||||
|
||||
[[Statistics]]
|
||||
deps = ["LinearAlgebra", "SparseArrays"]
|
||||
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
|
||||
|
||||
[[StructTypes]]
|
||||
deps = ["Dates", "UUIDs"]
|
||||
git-tree-sha1 = "1ed04f622a39d2e5a6747c3a70be040c00333933"
|
||||
uuid = "856f2bd8-1eba-4b0a-8007-ebc267875bd4"
|
||||
version = "1.1.0"
|
||||
|
||||
[[TableTraits]]
|
||||
deps = ["IteratorInterfaceExtensions"]
|
||||
git-tree-sha1 = "b1ad568ba658d8cbb3b892ed5380a6f3e781a81e"
|
||||
uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c"
|
||||
version = "1.0.0"
|
||||
|
||||
[[Tables]]
|
||||
deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "TableTraits", "Test"]
|
||||
git-tree-sha1 = "b7f762e9820b7fab47544c36f26f54ac59cf8abf"
|
||||
uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
|
||||
version = "1.0.5"
|
||||
|
||||
[[Test]]
|
||||
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
|
||||
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
|
||||
|
||||
[[TranscodingStreams]]
|
||||
deps = ["Random", "Test"]
|
||||
git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
|
||||
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
|
||||
version = "0.9.5"
|
||||
|
||||
[[UUIDs]]
|
||||
deps = ["Random", "SHA"]
|
||||
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
|
||||
|
||||
[[Unicode]]
|
||||
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
|
||||
|
||||
[[Zlib_jll]]
|
||||
deps = ["Libdl", "Pkg"]
|
||||
git-tree-sha1 = "fdd89e5ab270ea0f2a0174bd9093e557d06d4bfa"
|
||||
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
|
||||
version = "1.2.11+16"
|
||||
36
Project.toml
@@ -1,39 +1,45 @@
|
||||
name = "RELOG"
|
||||
uuid = "a2afcdf7-cf04-4913-85f9-c0d81ddf2008"
|
||||
authors = ["Alinson S Xavier <axavier@anl.gov>"]
|
||||
version = "0.5.0"
|
||||
version = "0.5.2"
|
||||
|
||||
[deps]
|
||||
CRC = "44b605c4-b955-5f2b-9b6d-d2bd01d3d205"
|
||||
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
|
||||
Cbc = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
|
||||
Clp = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
|
||||
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
|
||||
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
|
||||
Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
|
||||
GZip = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
|
||||
Geodesy = "0ef565a4-170c-5f04-8de2-149903a85f3d"
|
||||
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
|
||||
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
|
||||
JSONSchema = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
|
||||
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
|
||||
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
|
||||
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
|
||||
PackageCompiler = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
|
||||
OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
|
||||
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
|
||||
ProgressBars = "49802e3a-d2f1-5c88-81d8-b72133a6f568"
|
||||
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
|
||||
Shapefile = "8e980c4a-a4fe-5da2-b3a7-4b4b0353a2f4"
|
||||
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
|
||||
StochasticPrograms = "8b8459f2-c380-502b-8633-9aed2d6c2b35"
|
||||
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
|
||||
ZipFile = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
|
||||
|
||||
[compat]
|
||||
CSV = "0.7"
|
||||
Cbc = "0.6"
|
||||
Clp = "0.8"
|
||||
DataFrames = "0.21"
|
||||
DataStructures = "0.17"
|
||||
CRC = "4"
|
||||
CSV = "0.10"
|
||||
DataFrames = "1"
|
||||
DataStructures = "0.18"
|
||||
GZip = "0.5"
|
||||
Geodesy = "0.5"
|
||||
Geodesy = "1"
|
||||
JSON = "0.21"
|
||||
JSONSchema = "0.2"
|
||||
JuMP = "0.21"
|
||||
MathOptInterface = "0.9"
|
||||
PackageCompiler = "1"
|
||||
ProgressBars = "0.6"
|
||||
JSONSchema = "1"
|
||||
JuMP = "1"
|
||||
MathOptInterface = "1"
|
||||
OrderedCollections = "1"
|
||||
ProgressBars = "1"
|
||||
Shapefile = "0.8"
|
||||
ZipFile = "0.10"
|
||||
julia = "1"
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
|
||||
|
||||
<img src="https://anl-ceeesa.github.io/RELOG/0.5/images/ex_transportation.png" width="1000px"/>
|
||||
<img src="https://anl-ceeesa.github.io/RELOG/0.5/assets/ex_transportation.png" width="1000px"/>
|
||||
|
||||
### Documentation
|
||||
|
||||
@@ -26,8 +26,10 @@
|
||||
|
||||
### Authors
|
||||
|
||||
* **Alinson S. Xavier,** Argonne National Laboratory <<axavier@anl.gov>>
|
||||
* **Nwike Iloeje,** Argonne National Laboratory <<ciloeje@anl.gov>>
|
||||
* **Alinson S. Xavier** <<axavier@anl.gov>>
|
||||
* **Nwike Iloeje** <<ciloeje@anl.gov>>
|
||||
* **John Atkins**
|
||||
* **Kyle Sun**
|
||||
|
||||
### License
|
||||
|
||||
|
||||
5
deps/formatter/Project.toml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
[deps]
|
||||
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
|
||||
|
||||
[compat]
|
||||
JuliaFormatter = "0.14.4"
|
||||
8
deps/formatter/format.jl
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
using JuliaFormatter
|
||||
format(
|
||||
[
|
||||
"../../src",
|
||||
"../../test",
|
||||
],
|
||||
verbose=true,
|
||||
)
|
||||
4
docs/Project.toml
Normal file
@@ -0,0 +1,4 @@
|
||||
[deps]
|
||||
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
|
||||
RELOG = "a2afcdf7-cf04-4913-85f9-c0d81ddf2008"
|
||||
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
|
||||
19
docs/make.jl
Normal file
@@ -0,0 +1,19 @@
|
||||
using Documenter, RELOG
|
||||
|
||||
function make()
|
||||
makedocs(
|
||||
sitename="RELOG",
|
||||
pages=[
|
||||
"Home" => "index.md",
|
||||
"usage.md",
|
||||
"format.md",
|
||||
"reports.md",
|
||||
"model.md",
|
||||
],
|
||||
format = Documenter.HTML(
|
||||
assets=["assets/custom.css"],
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
make()
|
||||
36
docs/src/assets/custom.css
Normal file
@@ -0,0 +1,36 @@
|
||||
@media screen and (min-width: 1056px) {
|
||||
#documenter .docs-main {
|
||||
max-width: 65rem !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
tbody, thead, pre {
|
||||
border: 1px solid rgba(0, 0, 0, 0.25);
|
||||
}
|
||||
|
||||
table td, th {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
table p {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
table td code {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
table tr,
|
||||
table th {
|
||||
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
table tr:last-child {
|
||||
border-bottom: 0;
|
||||
}
|
||||
|
||||
code {
|
||||
background-color: transparent;
|
||||
color: rgb(232, 62, 140);
|
||||
}
|
||||
|
Before Width: | Height: | Size: 52 KiB After Width: | Height: | Size: 52 KiB |
|
Before Width: | Height: | Size: 37 KiB After Width: | Height: | Size: 37 KiB |
|
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 91 KiB After Width: | Height: | Size: 91 KiB |
|
Before Width: | Height: | Size: 586 KiB After Width: | Height: | Size: 586 KiB |
|
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 29 KiB |
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 32 KiB |
@@ -11,7 +11,7 @@ RELOG accepts as input a JSON file with three sections: `parameters`, `products`
|
||||
The **parameters** section describes details about the simulation itself.
|
||||
|
||||
| Key | Description
|
||||
|:--------------------------|---------------|
|
||||
|:--------------------------|:---------------|
|
||||
|`time horizon (years)` | Number of years in the simulation.
|
||||
|`building period (years)` | List of years in which we are allowed to open new plants. For example, if this parameter is set to `[1,2,3]`, we can only open plants during the first three years. By default, this equals `[1]`; that is, plants can only be opened during the first year. |
|
||||
|
||||
@@ -31,16 +31,18 @@ The **parameters** section describes details about the simulation itself.
|
||||
The **products** section describes all products and subproducts in the simulation. The field `instance["Products"]` is a dictionary mapping the name of the product to a dictionary which describes its characteristics. Each product description contains the following keys:
|
||||
|
||||
| Key | Description
|
||||
|:--------------------------------------|---------------|
|
||||
|:--------------------------------------|:---------------|
|
||||
|`transportation cost ($/km/tonne)` | The cost to transport this product. Must be a time series.
|
||||
|`transportation energy (J/km/tonne)` | The energy required to transport this product. Must be a time series. Optional.
|
||||
|`transportation emissions (tonne/km/tonne)` | A dictionary mapping the name of each greenhouse gas, produced to transport one tonne of this product along one kilometer, to the amount of gas produced (in tonnes). Must be a time series. Optional.
|
||||
|`initial amounts` | A dictionary mapping the name of each location to its description (see below). If this product is not initially available, this key may be omitted. Must be a time series.
|
||||
| `disposal limit (tonne)` | Total amount of product that can be disposed of across all collection centers. If omitted, all product must be processed. This parameter has no effect on product disposal at plants.
|
||||
| `disposal cost ($/tonne)` | Cost of disposing one tonne of this product at a collection center. If omitted, defaults to zero. This parameter has no effect on product disposal costs at plants.
|
||||
|
||||
Each product may have some amount available at the beginning of each time period. In this case, the key `initial amounts` maps to a dictionary with the following keys:
|
||||
|
||||
| Key | Description
|
||||
|:------------------------|---------------|
|
||||
|:------------------------|:---------------|
|
||||
| `latitude (deg)` | The latitude of the location.
|
||||
| `longitude (deg)` | The longitude of the location.
|
||||
| `amount (tonne)` | The amount of the product initially available at the location. Must be a time series.
|
||||
@@ -73,7 +75,9 @@ Each product may have some amount available at the beginning of each time period
|
||||
"transportation emissions (tonne/km/tonne)": {
|
||||
"CO2": [0.052, 0.050],
|
||||
"CH4": [0.003, 0.002]
|
||||
}
|
||||
},
|
||||
"disposal cost ($/tonne)": [-10.0, -12.0],
|
||||
"disposal limit (tonne)": [1.0, 1.0],
|
||||
},
|
||||
"P2": {
|
||||
"transportation cost ($/km/tonne)": [0.022, 0.020]
|
||||
@@ -93,7 +97,7 @@ Each product may have some amount available at the beginning of each time period
|
||||
The **plants** section describes the available types of reverse manufacturing plants, their potential locations and associated costs, as well as their inputs and outputs. The field `instance["Plants"]` is a dictionary mapping the name of the plant to a dictionary with the following keys:
|
||||
|
||||
| Key | Description
|
||||
|:------------------------|---------------|
|
||||
|:------------------------|:---------------|
|
||||
| `input` | The name of the product that this plant takes as input. Only one input is accepted per plant.
|
||||
| `outputs (tonne/tonne)` | A dictionary specifying how many tonnes of each product is produced for each tonnes of input. For example, if the plant outputs 0.5 tonnes of P2 and 0.25 tonnes of P3 for each tonnes of P1 provided, then this entry should be `{"P2": 0.5, "P3": 0.25}`. If the plant does not output anything, this key may be omitted.
|
||||
|`energy (GJ/tonne)` | The energy required to process 1 tonne of the input. Must be a time series. Optional.
|
||||
@@ -113,14 +117,14 @@ Each type of plant is associated with a set of potential locations where it can
|
||||
The `storage` dictionary should contain the following keys:
|
||||
|
||||
| Key | Description
|
||||
|:------------------------|---------------|
|
||||
|:------------------------|:---------------|
|
||||
| `cost ($/tonne)` | The cost to store a tonne of input product for one time period. Must be a time series.
|
||||
| `limit (tonne)` | The maximum amount of input product this plant can have in storage at any given time.
|
||||
|
||||
The keys in the `disposal` dictionary should be the names of the products. The values are dictionaries with the following keys:
|
||||
|
||||
| Key | Description
|
||||
|:------------------------|---------------|
|
||||
|:------------------------|:---------------|
|
||||
| `cost ($/tonne)` | The cost to dispose of the product. Must be a time series.
|
||||
| `limit (tonne)` | The maximum amount that can be disposed of. If an unlimited amount can be disposed, this key may be omitted. Must be a time series.
|
||||
|
||||
@@ -128,7 +132,7 @@ The keys in the `disposal` dictionary should be the names of the products. The v
|
||||
The keys in the `capacities (tonne)` dictionary should be the amounts (in tonnes). The values are dictionaries with the following keys:
|
||||
|
||||
| Key | Description
|
||||
|:--------------------------------------|---------------|
|
||||
|:--------------------------------------|:---------------|
|
||||
| `opening cost ($)` | The cost to open a plant of this size.
|
||||
| `fixed operating cost ($)` | The cost to keep the plant open, even if the plant doesn't process anything. Must be a time series.
|
||||
| `variable operating cost ($/tonne)` | The cost that the plant incurs to process each tonne of input. Must be a time series.
|
||||
@@ -182,6 +186,38 @@ The keys in the `capacities (tonne)` dictionary should be the amounts (in tonnes
|
||||
}
|
||||
```
|
||||
|
||||
### Geographic database
|
||||
|
||||
Instead of specifying locations using latitudes and longitudes, it is also possible to specify them using unique identifiers, such as the name of a US state, or the county FIPS code. This works anywhere `latitude (deg)` and `longitude (deg)` are expected. For example, instead of:
|
||||
```json
|
||||
{
|
||||
"initial amounts": {
|
||||
"C1": {
|
||||
"latitude (deg)": 37.27182,
|
||||
"longitude (deg)": -119.2704,
|
||||
"amount (tonne)": [934.56, 934.56]
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
is is possible to write:
|
||||
```json
|
||||
{
|
||||
"initial amounts": {
|
||||
"C1": {
|
||||
"location": "us-state:CA",
|
||||
"amount (tonne)": [934.56, 934.56]
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
Location names follow the format `db:id`, where `db` is the name of the database and `id` is the identifier for a specific location. RELOG currently includes the following databases:
|
||||
|
||||
Database | Description | Examples
|
||||
:--------|:------------|:---------
|
||||
`us-state`| List of states of the United States. | `us-state:IL` (State of Illinois)
|
||||
`2018-us-county` | List of United States counties, as of 2018. IDs are 5-digit FIPS codes. | `2018-us-county:17043` (DuPage county in Illinois)
|
||||
|
||||
### Current limitations
|
||||
|
||||
* Each plant can only be opened exactly once. After open, the plant remains open until the end of the simulation.
|
||||
@@ -192,4 +228,3 @@ The keys in the `capacities (tonne)` dictionary should be the amounts (in tonnes
|
||||
## Output Data Format (JSON)
|
||||
|
||||
To be documented.
|
||||
|
||||
@@ -1,25 +1,29 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
|
||||
|
||||
**RELOG** is an open-source supply chain optimization package focusing on reverse logistics and reverse manufacturing. The package uses Mixed-Integer Linear Programming to determine where to build recycling plants, what size should these plants have and which customers should be served by which plants. The package supports custom reverse logistics pipelines, with multiple types of plants, multiple types of product and multiple time periods.
|
||||
|
||||
<img src="images/ex_transportation.png" width="1000px"/>
|
||||
```@raw html
|
||||
<center>
|
||||
<img src="assets/ex_transportation.png" width="1000px"/>
|
||||
</center>
|
||||
```
|
||||
|
||||
|
||||
### Table of Contents
|
||||
|
||||
* [Usage](usage.md)
|
||||
* [Input and Output Data Formats](format.md)
|
||||
* [Simplified Solution Reports](reports.md)
|
||||
* [Optimization Model](model.md)
|
||||
```@contents
|
||||
Pages = ["usage.md", "format.md", "reports.md", "model.md"]
|
||||
Depth = 3
|
||||
```
|
||||
|
||||
|
||||
### Source Code
|
||||
|
||||
* [https://github.com/ANL-CEEESA/RELOG](https://github.com/ANL-CEEESA/RELOG)
|
||||
|
||||
### Authors
|
||||
* **Alinson S. Xavier,** Argonne National Laboratory <<axavier@anl.gov>>
|
||||
* **Nwike Iloeje,** Argonne National Laboratory <<ciloeje@anl.gov>>
|
||||
* **Alinson S. Xavier,** Argonne National Laboratory <axavier@anl.gov>
|
||||
* **Nwike Iloeje,** Argonne National Laboratory <ciloeje@anl.gov>
|
||||
|
||||
### License
|
||||
|
||||
@@ -6,53 +6,65 @@ In this page, we describe the precise mathematical optimization model used by RE
|
||||
|
||||
### Sets
|
||||
|
||||
* $L$ - Set of locations holding the original material to be recycled
|
||||
* $M$ - Set of materials recovered during the reverse manufacturing process
|
||||
* $P$ - Set of potential plants to open
|
||||
* $T = \{ 1, \ldots, t^{max} \} $ - Set of time periods
|
||||
Symbol | Description
|
||||
:-------|:------------
|
||||
$L$ | Set of locations holding the original material to be recycled
|
||||
$M$ | Set of materials recovered during the reverse manufacturing process
|
||||
$P$ | Set of potential plants to open
|
||||
$T = \{ 1, \ldots, t^{max} \}$ | Set of time periods
|
||||
|
||||
### Constants
|
||||
|
||||
**Plants:**
|
||||
#### Plants
|
||||
|
||||
* $c^\text{disp}_{pmt}$ - Cost of disposing one tonne of material $m$ at plant $p$ during time $t$ (`$/tonne/km`)
|
||||
* $c^\text{exp}_{pt}$ - Cost of adding one tonne of capacity to plant $p$ at time $t$ (`$/tonne`)
|
||||
* $c^\text{open}_{pt}$ - Cost of opening plant $p$ at time $t$, at minimum capacity (`$`)
|
||||
* $c^\text{f-base}_{pt}$ - Fixed cost of keeping plant $p$ open during time period $t$ (`$`)
|
||||
* $c^\text{f-exp}_{pt}$ - Increase in fixed cost for each additional tonne of capacity (`$/tonne`)
|
||||
* $c^\text{var}_{pt}$ - Variable cost of processing one tonne of input at plant $p$ at time $t$ (`$/tonne`)
|
||||
* $c^\text{store}_{pt}$ - Cost of storing one tonne of original material at plant $p$ at time $t$ (`$/tonne`)
|
||||
* $m^\text{min}_p$ - Minimum capacity of plant $p$ (`tonne`)
|
||||
* $m^\text{max}_p$ - Maximum capacity of plant $p$ (`tonne`)
|
||||
* $m^\text{disp}_{pmt}$ - Maximum amount of material $m$ that plant $p$ can dispose of during time $t$ (`tonne`)
|
||||
* $m^\text{store}_p$ - Maximum amount of original material that plant $p$ can store for later processing.
|
||||
Symbol | Description | Unit
|
||||
:-------|:------------|:---
|
||||
$c^\text{disp}_{pmt}$ | Cost of disposing one tonne of material $m$ at plant $p$ during time $t$ | \$/tonne/km
|
||||
$c^\text{exp}_{pt}$ | Cost of adding one tonne of capacity to plant $p$ at time $t$ | \$/tonne
|
||||
$c^\text{open}_{pt}$ | Cost of opening plant $p$ at time $t$, at minimum capacity | $
|
||||
$c^\text{f-base}_{pt}$ | Fixed cost of keeping plant $p$ open during time period $t$ | $
|
||||
$c^\text{f-exp}_{pt}$ | Increase in fixed cost for each additional tonne of capacity | \$/tonne
|
||||
$c^\text{var}_{pt}$ | Variable cost of processing one tonne of input at plant $p$ at time $t$ | \$/tonne
|
||||
$c^\text{store}_{pt}$ | Cost of storing one tonne of original material at plant $p$ at time $t$ | \$/tonne
|
||||
$m^\text{min}_p$ | Minimum capacity of plant $p$ | tonne
|
||||
$m^\text{max}_p$ | Maximum capacity of plant $p$ | tonne
|
||||
$m^\text{disp}_{pmt}$ | Maximum amount of material $m$ that plant $p$ can dispose of during time $t$ | tonne
|
||||
$m^\text{store}_p$ | Maximum amount of original material that plant $p$ can store for later processing. | tonne
|
||||
|
||||
**Products:**
|
||||
#### Products
|
||||
|
||||
* $\alpha_{pm}$ - Amount of material $m$ recovered by plant $t$ for each tonne of original material (`tonne/tonne`)
|
||||
* $m^\text{initial}_{lt}$ - Amount of original material to be recycled at location $l$ during time $t$ (`tonne`)
|
||||
Symbol | Description | Unit
|
||||
:-------|:------------|:---
|
||||
$\alpha_{pm}$ | Amount of material $m$ recovered by plant $t$ for each tonne of original material | tonne/tonne
|
||||
$m^\text{initial}_{lt}$ | Amount of original material to be recycled at location $l$ during time $t$ | tonne
|
||||
|
||||
**Transportation:**
|
||||
#### Transportation
|
||||
|
||||
* $c^\text{tr}_{t}$ - Transportation cost during time $t$ (`$/tonne/km`)
|
||||
* $d_{lp}$ - Distance between plant $p$ and location $l$ (`km`)
|
||||
Symbol | Description | Unit
|
||||
:-------|:------------|:---
|
||||
$c^\text{tr}_{t}$ | Transportation cost during time $t$ | \$/tonne/km
|
||||
$d_{lp}$ | Distance between plant $p$ and location $l$ | km
|
||||
|
||||
|
||||
### Decision variables
|
||||
* $q_{mpt}$ - Amount of material $m$ recovered by plant $p$ during time $t$ (`tonne`)
|
||||
* $u_{pt}$ - Binary variable that equals 1 if plant $p$ starts operating at time $t$ (`bool`)
|
||||
* $w_{pt}$ - Extra capacity (amount above the minimum) added to plant $p$ during time $t$ (`tonne`)
|
||||
* $x_{pt}$ - Binary variable that equals 1 if plant $p$ is operational at time $t$ (`bool`)
|
||||
* $y_{lpt}$ - Amount of product sent from location $l$ to plant $p$ during time $t$ (`tonne`)
|
||||
* $z^{\text{disp}}_{mpt}$ - Amount of material $m$ disposed of by plant $p$ during time $t$ (`tonne`)
|
||||
* $z^{\text{store}}_{pt}$ - Amount of original material in storage at plant $p$ by the end of time period $t$ (`tonne`)
|
||||
* $z^{\text{proc}}_{mpt}$ - Amount of original material processed by plant $p$ during time period $t$ (`tonne`)
|
||||
|
||||
Symbol | Description | Unit
|
||||
:-------|:------------|:---
|
||||
$q_{mpt}$ | Amount of material $m$ recovered by plant $p$ during time $t$ | tonne
|
||||
$u_{pt}$ | Binary variable that equals 1 if plant $p$ starts operating at time $t$ | Boolean
|
||||
$w_{pt}$ | Extra capacity (amount above the minimum) added to plant $p$ during time $t$ | tonne
|
||||
$x_{pt}$ | Binary variable that equals 1 if plant $p$ is operational at time $t$ | Boolean
|
||||
$y_{lpt}$ | Amount of product sent from location $l$ to plant $p$ during time $t$ | tonne
|
||||
$z^{\text{disp}}_{mpt}$ | Amount of material $m$ disposed of by plant $p$ during time $t$ | tonne
|
||||
$z^{\text{store}}_{pt}$ | Amount of original material in storage at plant $p$ by the end of time period $t$ | tonne
|
||||
$z^{\text{proc}}_{mpt}$ | Amount of original material processed by plant $p$ during time period $t$ | tonne
|
||||
|
||||
|
||||
### Objective function
|
||||
|
||||
RELOG minimizes the overall capital, production and transportation costs:
|
||||
|
||||
```math
|
||||
\begin{align*}
|
||||
\text{minimize} \;\; &
|
||||
\sum_{t \in T} \sum_{p \in P} \left[
|
||||
@@ -73,6 +85,7 @@ RELOG minimizes the overall capital, production and transportation costs:
|
||||
&
|
||||
\sum_{t \in T} \sum_{p \in P} \sum_{m \in M} c^{\text{disp}}_{pmt} z_{pmt}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
In the first line, we have (i) opening costs, if plant starts operating at time $t$, (ii) fixed operating costs, if plant is operational, (iii) additional fixed operating costs coming from expansion performed in all previous time periods up to the current one, and finally (iv) the expansion costs during the current time period.
|
||||
In the second line, we have storage and variable processing costs.
|
||||
@@ -83,14 +96,17 @@ In the fourth line, we have the disposal costs.
|
||||
|
||||
* All original materials must be sent to a plant:
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& \sum_{p \in P} y_{lpt} = m^\text{initial}_{lt}
|
||||
& \forall l \in L, t \in T
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
* Amount received equals amount processed plus stored. Furthermore, all original material should be processed by the end of the simulation.
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& \sum_{l \in L} y_{lpt} + z^{\text{store}}_{p,t-1}
|
||||
= z^{\text{proc}}_{pt} + z^{\text{store}}_{p,t}
|
||||
& \forall p \in P, t \in T \\
|
||||
@@ -98,56 +114,70 @@ In the fourth line, we have the disposal costs.
|
||||
& \forall p \in P \\
|
||||
& z^{\text{store}}_{p,t^{\max}} = 0
|
||||
& \forall p \in P
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
* Plants have a limited processing capacity. Furthermore, if a plant is closed, it has zero processing capacity:
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& z^{\text{proc}}_{pt} \leq m^\text{min}_p x_p + \sum_{i=1}^t w_p
|
||||
& \forall p \in P, t \in T
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
* Plants have limited storage capacity. Furthermore, if a plant is closed, is has zero storage capacity:
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& z^{\text{store}}_{pt} \leq m^\text{store}_p x_p
|
||||
& \forall p \in P, t \in T
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
* Plants can only be expanded up to their maximum capacity. Furthermore, if a plant is closed, it cannot be expanded:
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& \sum_{i=1}^t w_p \leq m^\text{max}_p x_p
|
||||
& \forall p \in P, t \in T
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
* Amount of recovered material is proportional to amount processed:
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& q_{mpt} = \alpha_{pm} z^{\text{proc}}_{pt}
|
||||
& \forall m \in M, p \in P, t \in T
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
* Because we only consider a single type of plant, all recovered material must be immediately disposed of. In RELOG's full model, recovered materials may be sent to another plant for further processing.
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& q_{mpt} = z_{mpt}
|
||||
& \forall m \in M, p \in P, t \in T
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
* A plant is operational at time $t$ if it was operational at time $t-1$ or it was built at time $t$. This constraint also prevents a plant from being built multiple times.
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& x_{pt} = x_{p,t-1} + u_{pt}
|
||||
& \forall p \in P, t \in T \setminus \{1\} \\
|
||||
& x_{p,1} = u_{p,1}
|
||||
& \forall p \in P
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
|
||||
|
||||
* Variable bounds:
|
||||
|
||||
\begin{align}
|
||||
```math
|
||||
\begin{align*}
|
||||
& q_{mpt} \geq 0
|
||||
& \forall m \in M, p \in P, t \in T \\
|
||||
& u_{pt} \in \{0,1\}
|
||||
@@ -162,4 +192,5 @@ In the fourth line, we have the disposal costs.
|
||||
& p \in P, t \in T \\
|
||||
& z^{\text{disp}}_{mpt}, z^{\text{proc}}_{mpt} \geq 0
|
||||
& \forall m \in M, p \in P, t \in T
|
||||
\end{align}
|
||||
\end{align*}
|
||||
```
|
||||
@@ -6,15 +6,13 @@ In this page, we also illustrate what types of charts and visualizations can be
|
||||
|
||||
## Plants report
|
||||
|
||||
Report showing plant costs, capacities, energy expenditure and utilization factors.
|
||||
|
||||
Generated by `RELOG.write_plants_report(solution, filename)`. For a concrete example, see [nimh_plants.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_plants.csv).
|
||||
Report showing plant costs, capacities, energy expenditure and utilization factors. Generated by `RELOG.write_plants_report(solution, filename)`.
|
||||
|
||||
| Column | Description
|
||||
|:--------------------------------------|---------------|
|
||||
|:--------------------------------------|:---------------|
|
||||
| `plant type` | Plant type.
|
||||
| `location name` | Location name.
|
||||
| `year` | What year this row corresponds to. This reports includes one row for each year in the simulation.
|
||||
| `year` | What year this row corresponds to. This reports includes one row for each year.
|
||||
| `latitude (deg)` | Latitude of the plant.
|
||||
| `longitude (deg)` | Longitude of the plant.
|
||||
| `capacity (tonne)` | Capacity of the plant at this point in time.
|
||||
@@ -47,7 +45,9 @@ sns.barplot(x="year",
|
||||
.reset_index());
|
||||
```
|
||||
|
||||
<img src="../images/ex_plant_cost_per_year.png" width="500px"/>
|
||||
```@raw html
|
||||
<img src="../assets/ex_plant_cost_per_year.png" width="500px"/>
|
||||
```
|
||||
|
||||
* Map showing plant locations (in Python):
|
||||
```python
|
||||
@@ -67,21 +67,20 @@ points = gp.points_from_xy(data["longitude (deg)"],
|
||||
gp.GeoDataFrame(data, geometry=points).plot(ax=ax);
|
||||
```
|
||||
|
||||
<img src="../images/ex_plant_locations.png" width="1000px"/>
|
||||
|
||||
```@raw html
|
||||
<img src="../assets/ex_plant_locations.png" width="1000px"/>
|
||||
```
|
||||
|
||||
## Plant outputs report
|
||||
|
||||
Report showing amount of products produced, sent and disposed of by each plant, as well as disposal costs.
|
||||
|
||||
Generated by `RELOG.write_plant_outputs_report(solution, filename)`. For a concrete example, see [nimh_plant_outputs.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_plant_outputs.csv).
|
||||
Report showing amount of products produced, sent and disposed of by each plant, as well as disposal costs. Generated by `RELOG.write_plant_outputs_report(solution, filename)`.
|
||||
|
||||
|
||||
| Column | Description
|
||||
|:--------------------------------------|---------------|
|
||||
|:--------------------------------------|:---------------|
|
||||
| `plant type` | Plant type.
|
||||
| `location name` | Location name.
|
||||
| `year` | What year this row corresponds to. This reports includes one row for each year in the simulation.
|
||||
| `year` | What year this row corresponds to. This reports includes one row for each year.
|
||||
| `product name` | Product being produced.
|
||||
| `amount produced (tonne)` | Amount of product produced this year.
|
||||
| `amount sent (tonne)` | Amount of product produced by this plant and sent to another plant for further processing this year.
|
||||
@@ -105,17 +104,17 @@ sns.barplot(x="amount produced (tonne)",
|
||||
.reset_index());
|
||||
```
|
||||
|
||||
<img src="../images/ex_amount_produced.png" width="500px"/>
|
||||
```@raw html
|
||||
<img src="../assets/ex_amount_produced.png" width="500px"/>
|
||||
```
|
||||
|
||||
|
||||
## Plant emissions report
|
||||
|
||||
Report showing amount of emissions produced by each plant.
|
||||
|
||||
Generated by `RELOG.write_plant_emissions_report(solution, filename)`. For a concrete example, see [nimh_plant_emissions.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_plant_emissions.csv).
|
||||
Report showing amount of emissions produced by each plant. Generated by `RELOG.write_plant_emissions_report(solution, filename)`.
|
||||
|
||||
| Column | Description
|
||||
|:--------------------------------------|---------------|
|
||||
|:--------------------------------------|:---------------|
|
||||
| `plant type` | Plant type.
|
||||
| `location name` | Location name.
|
||||
| `year` | Year.
|
||||
@@ -139,17 +138,33 @@ sns.barplot(x="plant type",
|
||||
.reset_index());
|
||||
```
|
||||
|
||||
<img src="../images/ex_emissions.png" width="500px"/>
|
||||
```@raw html
|
||||
<img src="../assets/ex_emissions.png" width="500px"/>
|
||||
```
|
||||
|
||||
## Products report
|
||||
|
||||
Report showing primary product amounts, locations and marginal costs. Generated by `RELOG.write_products_report(solution, filename)`.
|
||||
|
||||
| Column | Description
|
||||
|:--------------------------------------|:---------------|
|
||||
| `product name` | Product name.
|
||||
| `location name` | Name of the collection center.
|
||||
| `latitude (deg)` | Latitude of the collection center.
|
||||
| `longitude (deg)` | Longitude of the collection center.
|
||||
| `year` | What year this row corresponds to. This reports includes one row for each year.
|
||||
| `amount (tonne)` | Amount of product available at this collection center.
|
||||
| `amount disposed (tonne)` | Amount of product disposed of at this collection center.
|
||||
| `marginal cost ($/tonne)` | Cost to process one additional tonne of this product coming from this collection center.
|
||||
|
||||
|
||||
## Transportation report
|
||||
|
||||
Report showing amount of product sent from initial locations to plants, and from one plant to another. Includes the distance between each pair of locations, amount-distance shipped, transportation costs and energy expenditure.
|
||||
|
||||
Generated by `RELOG.write_transportation_report(solution, filename)`. For a concrete example, see [nimh_transportation.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_transportation.csv).
|
||||
Report showing amount of product sent from initial locations to plants, and from one plant to another. Includes the distance between each pair of locations, amount-distance shipped, transportation costs and energy expenditure. Generated by `RELOG.write_transportation_report(solution, filename)`.
|
||||
|
||||
|
||||
| Column | Description
|
||||
|:--------------------------------------|---------------|
|
||||
|:--------------------------------------|:---------------|
|
||||
| `source type` | If product is being shipped from an initial location, equals `Origin`. If product is being shipped from a plant, equals plant type.
|
||||
| `source location name` | Name of the location where the product is being shipped from.
|
||||
| `source latitude (deg)` | Latitude of the source location.
|
||||
@@ -183,7 +198,9 @@ sns.barplot(x="product",
|
||||
.reset_index());
|
||||
```
|
||||
|
||||
<img src="../images/ex_transportation_amount_distance.png" width="500px"/>
|
||||
```@raw html
|
||||
<img src="../assets/ex_transportation_amount_distance.png" width="500px"/>
|
||||
```
|
||||
|
||||
* Map of transportation lines (in Python):
|
||||
|
||||
@@ -226,17 +243,17 @@ gp.GeoDataFrame(data, geometry=points).plot(ax=ax,
|
||||
markersize=50);
|
||||
```
|
||||
|
||||
<img src="../images/ex_transportation.png" width="1000px"/>
|
||||
```@raw html
|
||||
<img src="../assets/ex_transportation.png" width="1000px"/>
|
||||
```
|
||||
|
||||
|
||||
## Transportation emissions report
|
||||
|
||||
Report showing emissions for each trip between initial locations and plants, and between pairs of plants.
|
||||
|
||||
Generated by `RELOG.write_transportation_emissions_report(solution, filename)`. For a concrete example, see [nimh_transportation_emissions.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_transportation_emissions.csv).
|
||||
Report showing emissions for each trip between initial locations and plants, and between pairs of plants. Generated by `RELOG.write_transportation_emissions_report(solution, filename)`.
|
||||
|
||||
| Column | Description
|
||||
|:--------------------------------------|---------------|
|
||||
|:--------------------------------------|:---------------|
|
||||
| `source type` | If product is being shipped from an initial location, equals `Origin`. If product is being shipped from a plant, equals plant type.
|
||||
| `source location name` | Name of the location where the product is being shipped from.
|
||||
| `source latitude (deg)` | Latitude of the source location.
|
||||
@@ -270,4 +287,6 @@ sns.barplot(x="emission type",
|
||||
.reset_index());
|
||||
```
|
||||
|
||||
<img src="../images/ex_transportation_emissions.png" width="500px"/>
|
||||
```@raw html
|
||||
<img src="../assets/ex_transportation_emissions.png" width="500px"/>
|
||||
```
|
||||
@@ -3,22 +3,17 @@ Usage
|
||||
|
||||
## 1. Installation
|
||||
|
||||
To use RELOG, the first step is to install the [Julia programming language](https://julialang.org/) on your machine. Note that RELOG was developed and tested with Julia 1.5 and may not be compatible with newer versions. After Julia is installed, launch the Julia console, type `]` to switch to package manger mode, then run:
|
||||
To use RELOG, the first step is to install the [Julia programming language](https://julialang.org/) on your machine. Note that RELOG was developed and tested with Julia 1.8 and may not be compatible with newer versions. After Julia is installed, launch the Julia console, then run:
|
||||
|
||||
```text
|
||||
(@v1.5) pkg> add https://github.com/ANL-CEEESA/RELOG.git
|
||||
```julia
|
||||
using Pkg
|
||||
Pkg.add(name="RELOG", version="0.5")
|
||||
```
|
||||
|
||||
After the package and all its dependencies have been installed, please run the RELOG test suite, as shown below, to make sure that the package has been correctly installed:
|
||||
|
||||
```text
|
||||
(@v1.5) pkg> test RELOG
|
||||
```
|
||||
|
||||
To update the package to a newer version, type `]` to enter the package manager mode, then run:
|
||||
|
||||
```text
|
||||
(@v1.5) pkg> update RELOG
|
||||
```julia
|
||||
Pkg.test("RELOG")
|
||||
```
|
||||
|
||||
## 2. Modeling the problem
|
||||
@@ -70,7 +65,7 @@ For a complete description of the file formats above, and for a complete list of
|
||||
|
||||
Fundamentally, RELOG decides when and where to build plants based on a deterministic optimization problem that minimizes costs for a particular input file provided by the user. In practical situations, it may not be possible to perfectly estimate some (or most) entries in this input file in advance, such as costs, demands and emissions. In this situation, it may be interesting to evaluate how well does the facility location plan produced by RELOG work if costs, demands and emissions turn out to be different.
|
||||
|
||||
To simplify this what-if analysis, RELOG provides the `resolve` method, which updates a previous solution based on a new scenario. The method accepts a previous optimal solution, produced by RELOG, and a new input file, which describes the new scenario. The method reoptimizes the supply chain for this new input file, and produces a new solution which still builds the same set of plants as before, in exactly the same locations and with the same capacities, but that may now utilize the plants differently, based on the new data. For example, in the new solution, plants that were previously used at full capacity may now be utilized at half-capacity instead. As another example, regions that were previously served by a certain plant may now be served by a different one.
|
||||
To simplify this what-if analysis, RELOG provides the `resolve` method, which updates a previous solution based on a new scenario, but keeps some of the previous decisions fixed. More precisely, given an optimal solution produced by RELOG and a new input file describing the new scenario, the `resolve` method reoptimizes the supply chain and produces a new solution which still builds the same set of plants as before, in exactly the same locations and with the same capacities, but that may now utilize the plants differently, based on the new data. For example, in the new solution, plants that were previously used at full capacity may now be utilized at half-capacity instead. As another example, regions that were previously served by a certain plant may now be served by a different one.
|
||||
|
||||
The following snippet shows how to use the method:
|
||||
|
||||
@@ -79,14 +74,14 @@ The following snippet shows how to use the method:
|
||||
using RELOG
|
||||
|
||||
# Optimize for the average scenario
|
||||
solution_avg = RELOG.solve("input_avg.json")
|
||||
solution_avg, model_avg = RELOG.solve("input_avg.json", return_model=true)
|
||||
|
||||
# Write reports for the average scenario
|
||||
RELOG.write_plants_report(solution_avg, "plants_avg.csv")
|
||||
RELOG.write_transportation_report(solution_avg, "transportation_avg.csv")
|
||||
|
||||
# Re-optimize for the high-demand scenario, keeping plants fixed
|
||||
solution_high = RELOG.resolve(solution_avg, "input_high.json")
|
||||
solution_high = RELOG.resolve(model_avg, "input_high.json")
|
||||
|
||||
# Write reports for the high-demand scenario
|
||||
RELOG.write_plants_report(solution_high, "plants_high.csv")
|
||||
@@ -111,13 +106,17 @@ By default, RELOG internally uses [Cbc](https://github.com/coin-or/Cbc), an open
|
||||
```julia
|
||||
using RELOG, Gurobi, JuMP
|
||||
|
||||
gurobi = optimizer_with_attributes(Gurobi.Optimizer,
|
||||
gurobi = optimizer_with_attributes(
|
||||
Gurobi.Optimizer,
|
||||
"TimeLimit" => 3600,
|
||||
"MIPGap" => 0.001)
|
||||
"MIPGap" => 0.001,
|
||||
)
|
||||
|
||||
RELOG.solve("instance.json",
|
||||
RELOG.solve(
|
||||
"instance.json",
|
||||
output="solution.json",
|
||||
optimizer=gurobi)
|
||||
optimizer=gurobi,
|
||||
)
|
||||
```
|
||||
|
||||
### 5.2 Multi-period heuristics
|
||||
@@ -133,6 +132,8 @@ To solve an instance using this heuristic, use the option `heuristic=true`, as s
|
||||
```julia
|
||||
using RELOG
|
||||
|
||||
solution = RELOG.solve("/home/user/instance.json",
|
||||
heuristic=true)
|
||||
solution = RELOG.solve(
|
||||
"/home/user/instance.json",
|
||||
heuristic=true,
|
||||
)
|
||||
```
|
||||
@@ -1,202 +0,0 @@
|
||||
{
|
||||
"parameters": {
|
||||
"time horizon (years)": 2
|
||||
},
|
||||
"products": {
|
||||
"P1": {
|
||||
"transportation cost ($/km/tonne)": [0.015, 0.015],
|
||||
"transportation energy (J/km/tonne)": [0.12, 0.11],
|
||||
"transportation emissions (tonne/km/tonne)": {
|
||||
"CO2": [0.052, 0.050],
|
||||
"CH4": [0.003, 0.002]
|
||||
},
|
||||
"initial amounts": {
|
||||
"C1": {
|
||||
"latitude (deg)": 7.0,
|
||||
"longitude (deg)": 7.0,
|
||||
"amount (tonne)": [934.56, 934.56]
|
||||
},
|
||||
"C2": {
|
||||
"latitude (deg)": 7.0,
|
||||
"longitude (deg)": 19.0,
|
||||
"amount (tonne)": [198.95, 198.95]
|
||||
},
|
||||
"C3": {
|
||||
"latitude (deg)": 84.0,
|
||||
"longitude (deg)": 76.0,
|
||||
"amount (tonne)": [212.97, 212.97]
|
||||
},
|
||||
"C4": {
|
||||
"latitude (deg)": 21.0,
|
||||
"longitude (deg)": 16.0,
|
||||
"amount (tonne)": [352.19, 352.19]
|
||||
},
|
||||
"C5": {
|
||||
"latitude (deg)": 32.0,
|
||||
"longitude (deg)": 92.0,
|
||||
"amount (tonne)": [510.33, 510.33]
|
||||
},
|
||||
"C6": {
|
||||
"latitude (deg)": 14.0,
|
||||
"longitude (deg)": 62.0,
|
||||
"amount (tonne)": [471.66, 471.66]
|
||||
},
|
||||
"C7": {
|
||||
"latitude (deg)": 30.0,
|
||||
"longitude (deg)": 83.0,
|
||||
"amount (tonne)": [785.21, 785.21]
|
||||
},
|
||||
"C8": {
|
||||
"latitude (deg)": 35.0,
|
||||
"longitude (deg)": 40.0,
|
||||
"amount (tonne)": [706.17, 706.17]
|
||||
},
|
||||
"C9": {
|
||||
"latitude (deg)": 74.0,
|
||||
"longitude (deg)": 52.0,
|
||||
"amount (tonne)": [30.08, 30.08]
|
||||
},
|
||||
"C10": {
|
||||
"latitude (deg)": 22.0,
|
||||
"longitude (deg)": 54.0,
|
||||
"amount (tonne)": [536.52, 536.52]
|
||||
}
|
||||
}
|
||||
},
|
||||
"P2": {
|
||||
"transportation cost ($/km/tonne)": [0.02, 0.02]
|
||||
},
|
||||
"P3": {
|
||||
"transportation cost ($/km/tonne)": [0.0125, 0.0125]
|
||||
},
|
||||
"P4": {
|
||||
"transportation cost ($/km/tonne)": [0.0175, 0.0175]
|
||||
}
|
||||
},
|
||||
"plants": {
|
||||
"F1": {
|
||||
"input": "P1",
|
||||
"outputs (tonne/tonne)": {
|
||||
"P2": 0.2,
|
||||
"P3": 0.5
|
||||
},
|
||||
"energy (GJ/tonne)": [0.12, 0.11],
|
||||
"emissions (tonne/tonne)": {
|
||||
"CO2": [0.052, 0.050],
|
||||
"CH4": [0.003, 0.002]
|
||||
},
|
||||
"locations": {
|
||||
"L1": {
|
||||
"latitude (deg)": 0.0,
|
||||
"longitude (deg)": 0.0,
|
||||
"disposal": {
|
||||
"P2": {
|
||||
"cost ($/tonne)": [-10.0, -10.0],
|
||||
"limit (tonne)": [1.0, 1.0]
|
||||
},
|
||||
"P3": {
|
||||
"cost ($/tonne)": [-10.0, -10.0],
|
||||
"limit (tonne)": [1.0, 1.0]
|
||||
}
|
||||
},
|
||||
"capacities (tonne)": {
|
||||
"250.0": {
|
||||
"opening cost ($)": [500.0, 500.0],
|
||||
"fixed operating cost ($)": [30.0, 30.0],
|
||||
"variable operating cost ($/tonne)": [30.0, 30.0]
|
||||
},
|
||||
"1000.0": {
|
||||
"opening cost ($)": [1250.0, 1250.0],
|
||||
"fixed operating cost ($)": [30.0, 30.0],
|
||||
"variable operating cost ($/tonne)": [30.0, 30.0]
|
||||
}
|
||||
}
|
||||
},
|
||||
"L2": {
|
||||
"latitude (deg)": 0.5,
|
||||
"longitude (deg)": 0.5,
|
||||
"capacities (tonne)": {
|
||||
"0.0": {
|
||||
"opening cost ($)": [1000, 1000],
|
||||
"fixed operating cost ($)": [50.0, 50.0],
|
||||
"variable operating cost ($/tonne)": [50.0, 50.0]
|
||||
},
|
||||
"10000.0": {
|
||||
"opening cost ($)": [10000, 10000],
|
||||
"fixed operating cost ($)": [50.0, 50.0],
|
||||
"variable operating cost ($/tonne)": [50.0, 50.0]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F2": {
|
||||
"input": "P2",
|
||||
"outputs (tonne/tonne)": {
|
||||
"P3": 0.05,
|
||||
"P4": 0.80
|
||||
},
|
||||
"locations": {
|
||||
"L3": {
|
||||
"latitude (deg)": 25.0,
|
||||
"longitude (deg)": 65.0,
|
||||
"disposal": {
|
||||
"P3": {
|
||||
"cost ($/tonne)": [100.0, 100.0]
|
||||
}
|
||||
},
|
||||
"capacities (tonne)": {
|
||||
"1000.0": {
|
||||
"opening cost ($)": [3000, 3000],
|
||||
"fixed operating cost ($)": [50.0, 50.0],
|
||||
"variable operating cost ($/tonne)": [50.0, 50.0]
|
||||
}
|
||||
}
|
||||
},
|
||||
"L4": {
|
||||
"latitude (deg)": 0.75,
|
||||
"longitude (deg)": 0.20,
|
||||
"capacities (tonne)": {
|
||||
"10000": {
|
||||
"opening cost ($)": [3000, 3000],
|
||||
"fixed operating cost ($)": [50.0, 50.0],
|
||||
"variable operating cost ($/tonne)": [50.0, 50.0]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F3": {
|
||||
"input": "P4",
|
||||
"locations": {
|
||||
"L5": {
|
||||
"latitude (deg)": 100.0,
|
||||
"longitude (deg)": 100.0,
|
||||
"capacities (tonne)": {
|
||||
"15000": {
|
||||
"opening cost ($)": [0.0, 0.0],
|
||||
"fixed operating cost ($)": [0.0, 0.0],
|
||||
"variable operating cost ($/tonne)": [-15.0, -15.0]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F4": {
|
||||
"input": "P3",
|
||||
"locations": {
|
||||
"L6": {
|
||||
"latitude (deg)": 50.0,
|
||||
"longitude (deg)": 50.0,
|
||||
"capacities (tonne)": {
|
||||
"10000": {
|
||||
"opening cost ($)": [0.0, 0.0],
|
||||
"fixed operating cost ($)": [0.0, 0.0],
|
||||
"variable operating cost ($/tonne)": [-15.0, -15.0]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
[ Info: Reading s1.json...
|
||||
[ Info: Building graph...
|
||||
[ Info: 2 time periods
|
||||
[ Info: 6 process nodes
|
||||
[ Info: 8 shipping nodes (plant)
|
||||
[ Info: 10 shipping nodes (collection)
|
||||
[ Info: 38 arcs
|
||||
[ Info: Building optimization model...
|
||||
[ Info: Optimizing MILP...
|
||||
[ Info: Re-optimizing with integer variables fixed...
|
||||
[ Info: Extracting solution...
|
||||
75
juliaw
Executable file
@@ -0,0 +1,75 @@
|
||||
#!/bin/bash
|
||||
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
if [ ! -e Project.toml ]; then
|
||||
echo "juliaw: Project.toml not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -e Manifest.toml ]; then
|
||||
julia --project=. -e 'using Pkg; Pkg.instantiate()' || exit 1
|
||||
fi
|
||||
|
||||
if [ ! -e build/sysimage.so -o Project.toml -nt build/sysimage.so ]; then
|
||||
echo "juliaw: rebuilding system image..."
|
||||
|
||||
# Generate temporary project folder
|
||||
rm -rf $HOME/.juliaw
|
||||
mkdir -p $HOME/.juliaw/src
|
||||
cp Project.toml Manifest.toml $HOME/.juliaw
|
||||
NAME=$(julia -e 'using TOML; toml = TOML.parsefile("Project.toml"); "name" in keys(toml) && print(toml["name"])')
|
||||
if [ ! -z $NAME ]; then
|
||||
cat > $HOME/.juliaw/src/$NAME.jl << EOF
|
||||
module $NAME
|
||||
end
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Add PackageCompiler dependencies to temporary project
|
||||
julia --project=$HOME/.juliaw -e 'using Pkg; Pkg.add(["PackageCompiler", "TOML", "Logging"])'
|
||||
|
||||
# Generate system image scripts
|
||||
cat > $HOME/.juliaw/sysimage.jl << EOF
|
||||
using PackageCompiler
|
||||
using TOML
|
||||
using Logging
|
||||
|
||||
Logging.disable_logging(Logging.Info)
|
||||
mkpath("$PWD/build")
|
||||
|
||||
println("juliaw: generating precompilation statements...")
|
||||
run(\`julia --project="$PWD" --trace-compile="$PWD"/build/precompile.jl \$(ARGS)\`)
|
||||
|
||||
println("juliaw: finding dependencies...")
|
||||
project = TOML.parsefile("Project.toml")
|
||||
manifest = TOML.parsefile("Manifest.toml")
|
||||
deps = Symbol[]
|
||||
for dep in keys(project["deps"])
|
||||
if dep in keys(manifest)
|
||||
# Up to Julia 1.6
|
||||
dep_entry = manifest[dep][1]
|
||||
else
|
||||
# Julia 1.7+
|
||||
dep_entry = manifest["deps"][dep][1]
|
||||
end
|
||||
if "path" in keys(dep_entry)
|
||||
println(" - \$(dep) [skip]")
|
||||
else
|
||||
println(" - \$(dep)")
|
||||
push!(deps, Symbol(dep))
|
||||
end
|
||||
end
|
||||
|
||||
println("juliaw: building system image...")
|
||||
create_sysimage(
|
||||
deps,
|
||||
precompile_statements_file = "$PWD/build/precompile.jl",
|
||||
sysimage_path = "$PWD/build/sysimage.so",
|
||||
)
|
||||
EOF
|
||||
julia --project=$HOME/.juliaw $HOME/.juliaw/sysimage.jl $*
|
||||
else
|
||||
julia --project=. --sysimage build/sysimage.so $*
|
||||
fi
|
||||
26
src/RELOG.jl
@@ -3,9 +3,25 @@
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
module RELOG
|
||||
include("dotdict.jl")
|
||||
include("instance.jl")
|
||||
include("graph.jl")
|
||||
include("model.jl")
|
||||
include("reports.jl")
|
||||
|
||||
include("instance/structs.jl")
|
||||
|
||||
include("graph/structs.jl")
|
||||
|
||||
include("graph/build.jl")
|
||||
include("graph/csv.jl")
|
||||
include("instance/compress.jl")
|
||||
include("instance/geodb.jl")
|
||||
include("instance/parse.jl")
|
||||
include("instance/validate.jl")
|
||||
include("model/build.jl")
|
||||
include("model/getsol.jl")
|
||||
include("model/solve.jl")
|
||||
include("reports/plant_emissions.jl")
|
||||
include("reports/plant_outputs.jl")
|
||||
include("reports/plants.jl")
|
||||
include("reports/products.jl")
|
||||
include("reports/tr_emissions.jl")
|
||||
include("reports/tr.jl")
|
||||
include("reports/write.jl")
|
||||
end
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
.navbar-default {
|
||||
border-bottom: 0px;
|
||||
background-color: #fff;
|
||||
box-shadow: 0px 0px 15px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
a, .navbar-default a {
|
||||
color: #06a !important;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
.disabled > a {
|
||||
color: #999 !important;
|
||||
}
|
||||
|
||||
.navbar-default a:hover,
|
||||
.navbar-default .active,
|
||||
.active > a {
|
||||
background-color: #f0f0f0 !important;
|
||||
}
|
||||
|
||||
.icon-bar {
|
||||
background-color: #666 !important;
|
||||
}
|
||||
|
||||
.navbar-collapse {
|
||||
border-color: #fff !important;
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
MathJax.Hub.Config({
|
||||
"tex2jax": { inlineMath: [ [ '$', '$' ] ] }
|
||||
});
|
||||
MathJax.Hub.Config({
|
||||
config: ["MMLorHTML.js"],
|
||||
jax: ["input/TeX", "output/HTML-CSS", "output/NativeMML"],
|
||||
extensions: ["MathMenu.js", "MathZoom.js"]
|
||||
});
|
||||
@@ -1,68 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
struct DotDict
|
||||
inner::Dict
|
||||
end
|
||||
|
||||
DotDict() = DotDict(Dict())
|
||||
|
||||
function Base.setproperty!(d::DotDict, key::Symbol, value)
|
||||
setindex!(getfield(d, :inner), value, key)
|
||||
end
|
||||
|
||||
function Base.getproperty(d::DotDict, key::Symbol)
|
||||
(key == :inner ? getfield(d, :inner) : d.inner[key])
|
||||
end
|
||||
|
||||
function Base.getindex(d::DotDict, key::Int64)
|
||||
d.inner[Symbol(key)]
|
||||
end
|
||||
|
||||
function Base.getindex(d::DotDict, key::Symbol)
|
||||
d.inner[key]
|
||||
end
|
||||
|
||||
function Base.keys(d::DotDict)
|
||||
keys(d.inner)
|
||||
end
|
||||
|
||||
function Base.values(d::DotDict)
|
||||
values(d.inner)
|
||||
end
|
||||
|
||||
function Base.iterate(d::DotDict)
|
||||
iterate(values(d.inner))
|
||||
end
|
||||
|
||||
function Base.iterate(d::DotDict, v::Int64)
|
||||
iterate(values(d.inner), v)
|
||||
end
|
||||
|
||||
function Base.length(d::DotDict)
|
||||
length(values(d.inner))
|
||||
end
|
||||
|
||||
function Base.show(io::IO, d::DotDict)
|
||||
print(io, "DotDict with $(length(keys(d.inner))) entries:\n")
|
||||
count = 0
|
||||
for k in keys(d.inner)
|
||||
count += 1
|
||||
if count > 10
|
||||
print(io, " ...\n")
|
||||
break
|
||||
end
|
||||
print(io, " :$(k) => $(d.inner[k])\n")
|
||||
end
|
||||
end
|
||||
|
||||
function recursive_to_dot_dict(el)
|
||||
if typeof(el) == Dict{String, Any}
|
||||
return DotDict(Dict(Symbol(k) => recursive_to_dot_dict(el[k]) for k in keys(el)))
|
||||
else
|
||||
return el
|
||||
end
|
||||
end
|
||||
|
||||
export recursive_to_dot_dict
|
||||
@@ -4,43 +4,12 @@
|
||||
|
||||
using Geodesy
|
||||
|
||||
|
||||
abstract type Node
|
||||
function calculate_distance(source_lat, source_lon, dest_lat, dest_lon)::Float64
|
||||
x = LLA(source_lat, source_lon, 0.0)
|
||||
y = LLA(dest_lat, dest_lon, 0.0)
|
||||
return round(euclidean_distance(x, y) / 1000.0, digits = 2)
|
||||
end
|
||||
|
||||
|
||||
mutable struct Arc
|
||||
source::Node
|
||||
dest::Node
|
||||
values::Dict{String, Float64}
|
||||
end
|
||||
|
||||
|
||||
mutable struct ProcessNode <: Node
|
||||
index::Int
|
||||
location::Plant
|
||||
incoming_arcs::Array{Arc}
|
||||
outgoing_arcs::Array{Arc}
|
||||
end
|
||||
|
||||
|
||||
mutable struct ShippingNode <: Node
|
||||
index::Int
|
||||
location::Union{Plant, CollectionCenter}
|
||||
product::Product
|
||||
incoming_arcs::Array{Arc}
|
||||
outgoing_arcs::Array{Arc}
|
||||
end
|
||||
|
||||
|
||||
mutable struct Graph
|
||||
process_nodes::Array{ProcessNode}
|
||||
plant_shipping_nodes::Array{ShippingNode}
|
||||
collection_shipping_nodes::Array{ShippingNode}
|
||||
arcs::Array{Arc}
|
||||
end
|
||||
|
||||
|
||||
function build_graph(instance::Instance)::Graph
|
||||
arcs = []
|
||||
next_index = 0
|
||||
@@ -48,15 +17,18 @@ function build_graph(instance::Instance)::Graph
|
||||
plant_shipping_nodes = ShippingNode[]
|
||||
collection_shipping_nodes = ShippingNode[]
|
||||
|
||||
process_nodes_by_input_product = Dict(product => ProcessNode[]
|
||||
for product in instance.products)
|
||||
shipping_nodes_by_plant = Dict(plant => []
|
||||
for plant in instance.plants)
|
||||
name_to_process_node_map = Dict{Tuple{AbstractString,AbstractString},ProcessNode}()
|
||||
collection_center_to_node = Dict()
|
||||
|
||||
process_nodes_by_input_product =
|
||||
Dict(product => ProcessNode[] for product in instance.products)
|
||||
shipping_nodes_by_plant = Dict(plant => [] for plant in instance.plants)
|
||||
|
||||
# Build collection center shipping nodes
|
||||
for center in instance.collection_centers
|
||||
node = ShippingNode(next_index, center, center.product, [], [])
|
||||
next_index += 1
|
||||
collection_center_to_node[center] = node
|
||||
push!(collection_shipping_nodes, node)
|
||||
end
|
||||
|
||||
@@ -67,6 +39,8 @@ function build_graph(instance::Instance)::Graph
|
||||
push!(process_nodes, pn)
|
||||
push!(process_nodes_by_input_product[plant.input], pn)
|
||||
|
||||
name_to_process_node_map[(plant.plant_name, plant.location_name)] = pn
|
||||
|
||||
for product in keys(plant.output)
|
||||
sn = ShippingNode(next_index, plant, product, [], [])
|
||||
next_index += 1
|
||||
@@ -78,12 +52,14 @@ function build_graph(instance::Instance)::Graph
|
||||
# Build arcs from collection centers to plants, and from one plant to another
|
||||
for source in [collection_shipping_nodes; plant_shipping_nodes]
|
||||
for dest in process_nodes_by_input_product[source.product]
|
||||
distance = calculate_distance(source.location.latitude,
|
||||
distance = calculate_distance(
|
||||
source.location.latitude,
|
||||
source.location.longitude,
|
||||
dest.location.latitude,
|
||||
dest.location.longitude)
|
||||
dest.location.longitude,
|
||||
)
|
||||
values = Dict("distance" => distance)
|
||||
arc = Arc(source, dest, values)
|
||||
arc = Arc(length(arcs) + 1, source, dest, values)
|
||||
push!(source.outgoing_arcs, arc)
|
||||
push!(dest.incoming_arcs, arc)
|
||||
push!(arcs, arc)
|
||||
@@ -96,31 +72,32 @@ function build_graph(instance::Instance)::Graph
|
||||
for dest in shipping_nodes_by_plant[plant]
|
||||
weight = plant.output[dest.product]
|
||||
values = Dict("weight" => weight)
|
||||
arc = Arc(source, dest, values)
|
||||
arc = Arc(length(arcs) + 1, source, dest, values)
|
||||
push!(source.outgoing_arcs, arc)
|
||||
push!(dest.incoming_arcs, arc)
|
||||
push!(arcs, arc)
|
||||
end
|
||||
end
|
||||
|
||||
return Graph(process_nodes,
|
||||
return Graph(
|
||||
process_nodes,
|
||||
plant_shipping_nodes,
|
||||
collection_shipping_nodes,
|
||||
arcs)
|
||||
arcs,
|
||||
name_to_process_node_map,
|
||||
collection_center_to_node,
|
||||
)
|
||||
end
|
||||
|
||||
|
||||
function to_csv(graph::Graph)
|
||||
result = ""
|
||||
for a in graph.arcs
|
||||
result *= "$(a.source.index),$(a.dest.index)\n"
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
||||
|
||||
function calculate_distance(source_lat, source_lon, dest_lat, dest_lon)::Float64
|
||||
x = LLA(source_lat, source_lon, 0.0)
|
||||
y = LLA(dest_lat, dest_lon, 0.0)
|
||||
return round(distance(x, y) / 1000.0, digits=2)
|
||||
function print_graph_stats(instance::Instance, graph::Graph)::Nothing
|
||||
@info @sprintf(" %12d time periods", instance.time)
|
||||
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
|
||||
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
|
||||
@info @sprintf(
|
||||
" %12d shipping nodes (collection)",
|
||||
length(graph.collection_shipping_nodes)
|
||||
)
|
||||
@info @sprintf(" %12d arcs", length(graph.arcs))
|
||||
return
|
||||
end
|
||||
11
src/graph/csv.jl
Normal file
@@ -0,0 +1,11 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
function to_csv(graph::Graph)
|
||||
result = ""
|
||||
for a in graph.arcs
|
||||
result *= "$(a.source.index),$(a.dest.index)\n"
|
||||
end
|
||||
return result
|
||||
end
|
||||
46
src/graph/structs.jl
Normal file
@@ -0,0 +1,46 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using Geodesy
|
||||
|
||||
abstract type Node end
|
||||
|
||||
mutable struct Arc
|
||||
index::Int
|
||||
source::Node
|
||||
dest::Node
|
||||
values::Dict{String,Float64}
|
||||
end
|
||||
|
||||
mutable struct ProcessNode <: Node
|
||||
index::Int
|
||||
location::Plant
|
||||
incoming_arcs::Vector{Arc}
|
||||
outgoing_arcs::Vector{Arc}
|
||||
end
|
||||
|
||||
mutable struct ShippingNode <: Node
|
||||
index::Int
|
||||
location::Union{Plant,CollectionCenter}
|
||||
product::Product
|
||||
incoming_arcs::Vector{Arc}
|
||||
outgoing_arcs::Vector{Arc}
|
||||
end
|
||||
|
||||
mutable struct Graph
|
||||
process_nodes::Vector{ProcessNode}
|
||||
plant_shipping_nodes::Vector{ShippingNode}
|
||||
collection_shipping_nodes::Vector{ShippingNode}
|
||||
arcs::Vector{Arc}
|
||||
name_to_process_node_map::Dict{Tuple{AbstractString,AbstractString},ProcessNode}
|
||||
collection_center_to_node::Dict{CollectionCenter,ShippingNode}
|
||||
end
|
||||
|
||||
function Base.show(io::IO, instance::Graph)
|
||||
print(io, "RELOG graph with ")
|
||||
print(io, "$(length(instance.process_nodes)) process nodes, ")
|
||||
print(io, "$(length(instance.plant_shipping_nodes)) plant shipping nodes, ")
|
||||
print(io, "$(length(instance.collection_shipping_nodes)) collection shipping nodes, ")
|
||||
print(io, "$(length(instance.arcs)) arcs")
|
||||
end
|
||||
281
src/instance.jl
@@ -1,281 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
|
||||
mutable struct Product
|
||||
name::String
|
||||
transportation_cost::Array{Float64}
|
||||
transportation_energy::Array{Float64}
|
||||
transportation_emissions::Dict{String, Array{Float64}}
|
||||
end
|
||||
|
||||
|
||||
mutable struct CollectionCenter
|
||||
index::Int64
|
||||
name::String
|
||||
latitude::Float64
|
||||
longitude::Float64
|
||||
product::Product
|
||||
amount::Array{Float64}
|
||||
end
|
||||
|
||||
|
||||
mutable struct PlantSize
|
||||
capacity::Float64
|
||||
variable_operating_cost::Array{Float64}
|
||||
fixed_operating_cost::Array{Float64}
|
||||
opening_cost::Array{Float64}
|
||||
end
|
||||
|
||||
|
||||
mutable struct Plant
|
||||
index::Int64
|
||||
plant_name::String
|
||||
location_name::String
|
||||
input::Product
|
||||
output::Dict{Product, Float64}
|
||||
latitude::Float64
|
||||
longitude::Float64
|
||||
disposal_limit::Dict{Product, Array{Float64}}
|
||||
disposal_cost::Dict{Product, Array{Float64}}
|
||||
sizes::Array{PlantSize}
|
||||
energy::Array{Float64}
|
||||
emissions::Dict{String, Array{Float64}}
|
||||
storage_limit::Float64
|
||||
storage_cost::Array{Float64}
|
||||
end
|
||||
|
||||
|
||||
mutable struct Instance
|
||||
time::Int64
|
||||
products::Array{Product, 1}
|
||||
collection_centers::Array{CollectionCenter, 1}
|
||||
plants::Array{Plant, 1}
|
||||
building_period::Array{Int64}
|
||||
end
|
||||
|
||||
|
||||
function validate(json, schema)
|
||||
result = JSONSchema.validate(json, schema)
|
||||
if result !== nothing
|
||||
if result isa JSONSchema.SingleIssue
|
||||
path = join(result.path, " → ")
|
||||
if length(path) == 0
|
||||
path = "root"
|
||||
end
|
||||
msg = "$(result.msg) in $(path)"
|
||||
else
|
||||
msg = convert(String, result)
|
||||
end
|
||||
throw(msg)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function parsefile(path::String)::Instance
|
||||
return RELOG.parse(JSON.parsefile(path))
|
||||
end
|
||||
|
||||
|
||||
function parse(json)::Instance
|
||||
basedir = dirname(@__FILE__)
|
||||
json_schema = JSON.parsefile("$basedir/schemas/input.json")
|
||||
validate(json, Schema(json_schema))
|
||||
|
||||
T = json["parameters"]["time horizon (years)"]
|
||||
json_schema["definitions"]["TimeSeries"]["minItems"] = T
|
||||
json_schema["definitions"]["TimeSeries"]["maxItems"] = T
|
||||
validate(json, Schema(json_schema))
|
||||
|
||||
building_period = [1]
|
||||
if "building period (years)" in keys(json)
|
||||
building_period = json["building period (years)"]
|
||||
end
|
||||
|
||||
plants = Plant[]
|
||||
products = Product[]
|
||||
collection_centers = CollectionCenter[]
|
||||
prod_name_to_product = Dict{String, Product}()
|
||||
|
||||
# Create products
|
||||
for (product_name, product_dict) in json["products"]
|
||||
cost = product_dict["transportation cost (\$/km/tonne)"]
|
||||
energy = zeros(T)
|
||||
emissions = Dict()
|
||||
|
||||
if "transportation energy (J/km/tonne)" in keys(product_dict)
|
||||
energy = product_dict["transportation energy (J/km/tonne)"]
|
||||
end
|
||||
|
||||
if "transportation emissions (tonne/km/tonne)" in keys(product_dict)
|
||||
emissions = product_dict["transportation emissions (tonne/km/tonne)"]
|
||||
end
|
||||
|
||||
product = Product(product_name, cost, energy, emissions)
|
||||
push!(products, product)
|
||||
prod_name_to_product[product_name] = product
|
||||
|
||||
# Create collection centers
|
||||
if "initial amounts" in keys(product_dict)
|
||||
for (center_name, center_dict) in product_dict["initial amounts"]
|
||||
center = CollectionCenter(length(collection_centers) + 1,
|
||||
center_name,
|
||||
center_dict["latitude (deg)"],
|
||||
center_dict["longitude (deg)"],
|
||||
product,
|
||||
center_dict["amount (tonne)"])
|
||||
push!(collection_centers, center)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Create plants
|
||||
for (plant_name, plant_dict) in json["plants"]
|
||||
input = prod_name_to_product[plant_dict["input"]]
|
||||
output = Dict()
|
||||
|
||||
# Plant outputs
|
||||
if "outputs (tonne/tonne)" in keys(plant_dict)
|
||||
output = Dict(prod_name_to_product[key] => value
|
||||
for (key, value) in plant_dict["outputs (tonne/tonne)"]
|
||||
if value > 0)
|
||||
end
|
||||
|
||||
energy = zeros(T)
|
||||
emissions = Dict()
|
||||
|
||||
if "energy (GJ/tonne)" in keys(plant_dict)
|
||||
energy = plant_dict["energy (GJ/tonne)"]
|
||||
end
|
||||
|
||||
if "emissions (tonne/tonne)" in keys(plant_dict)
|
||||
emissions = plant_dict["emissions (tonne/tonne)"]
|
||||
end
|
||||
|
||||
for (location_name, location_dict) in plant_dict["locations"]
|
||||
sizes = PlantSize[]
|
||||
disposal_limit = Dict(p => [0.0 for t in 1:T] for p in keys(output))
|
||||
disposal_cost = Dict(p => [0.0 for t in 1:T] for p in keys(output))
|
||||
|
||||
# Disposal
|
||||
if "disposal" in keys(location_dict)
|
||||
for (product_name, disposal_dict) in location_dict["disposal"]
|
||||
limit = [1e8 for t in 1:T]
|
||||
if "limit (tonne)" in keys(disposal_dict)
|
||||
limit = disposal_dict["limit (tonne)"]
|
||||
end
|
||||
disposal_limit[prod_name_to_product[product_name]] = limit
|
||||
disposal_cost[prod_name_to_product[product_name]] = disposal_dict["cost (\$/tonne)"]
|
||||
end
|
||||
end
|
||||
|
||||
# Capacities
|
||||
for (capacity_name, capacity_dict) in location_dict["capacities (tonne)"]
|
||||
push!(sizes, PlantSize(Base.parse(Float64, capacity_name),
|
||||
capacity_dict["variable operating cost (\$/tonne)"],
|
||||
capacity_dict["fixed operating cost (\$)"],
|
||||
capacity_dict["opening cost (\$)"]))
|
||||
end
|
||||
length(sizes) > 1 || push!(sizes, sizes[1])
|
||||
sort!(sizes, by = x -> x.capacity)
|
||||
|
||||
# Storage
|
||||
storage_limit = 0
|
||||
storage_cost = zeros(T)
|
||||
if "storage" in keys(location_dict)
|
||||
storage_dict = location_dict["storage"]
|
||||
storage_limit = storage_dict["limit (tonne)"]
|
||||
storage_cost = storage_dict["cost (\$/tonne)"]
|
||||
end
|
||||
|
||||
# Validation: Capacities
|
||||
if length(sizes) != 2
|
||||
throw("At most two capacities are supported")
|
||||
end
|
||||
if sizes[1].variable_operating_cost != sizes[2].variable_operating_cost
|
||||
throw("Variable operating costs must be the same for all capacities")
|
||||
end
|
||||
|
||||
plant = Plant(length(plants) + 1,
|
||||
plant_name,
|
||||
location_name,
|
||||
input,
|
||||
output,
|
||||
location_dict["latitude (deg)"],
|
||||
location_dict["longitude (deg)"],
|
||||
disposal_limit,
|
||||
disposal_cost,
|
||||
sizes,
|
||||
energy,
|
||||
emissions,
|
||||
storage_limit,
|
||||
storage_cost)
|
||||
|
||||
push!(plants, plant)
|
||||
end
|
||||
end
|
||||
|
||||
@info @sprintf("%12d collection centers", length(collection_centers))
|
||||
@info @sprintf("%12d candidate plant locations", length(plants))
|
||||
|
||||
return Instance(T, products, collection_centers, plants, building_period)
|
||||
end
|
||||
|
||||
|
||||
"""
|
||||
_compress(instance::Instance)
|
||||
|
||||
Create a single-period instance from a multi-period one. Specifically,
|
||||
replaces every time-dependent attribute, such as initial_amounts,
|
||||
by a list with a single element, which is either a sum, an average,
|
||||
or something else that makes sense to that specific attribute.
|
||||
"""
|
||||
function _compress(instance::Instance)::Instance
|
||||
T = instance.time
|
||||
compressed = deepcopy(instance)
|
||||
compressed.time = 1
|
||||
compressed.building_period = [1]
|
||||
|
||||
# Compress products
|
||||
for p in compressed.products
|
||||
p.transportation_cost = [mean(p.transportation_cost)]
|
||||
p.transportation_energy = [mean(p.transportation_energy)]
|
||||
for (emission_name, emission_value) in p.transportation_emissions
|
||||
p.transportation_emissions[emission_name] = [mean(emission_value)]
|
||||
end
|
||||
end
|
||||
|
||||
# Compress collection centers
|
||||
for c in compressed.collection_centers
|
||||
c.amount = [maximum(c.amount) * T]
|
||||
end
|
||||
|
||||
# Compress plants
|
||||
for plant in compressed.plants
|
||||
plant.energy = [mean(plant.energy)]
|
||||
for (emission_name, emission_value) in plant.emissions
|
||||
plant.emissions[emission_name] = [mean(emission_value)]
|
||||
end
|
||||
for s in plant.sizes
|
||||
s.capacity *= T
|
||||
s.variable_operating_cost = [mean(s.variable_operating_cost)]
|
||||
s.opening_cost = [s.opening_cost[1]]
|
||||
s.fixed_operating_cost = [sum(s.fixed_operating_cost)]
|
||||
end
|
||||
for (prod_name, disp_limit) in plant.disposal_limit
|
||||
plant.disposal_limit[prod_name] = [sum(disp_limit)]
|
||||
end
|
||||
for (prod_name, disp_cost) in plant.disposal_cost
|
||||
plant.disposal_cost[prod_name] = [mean(disp_cost)]
|
||||
end
|
||||
end
|
||||
|
||||
return compressed
|
||||
end
|
||||
101
src/instance/compress.jl
Normal file
@@ -0,0 +1,101 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
"""
|
||||
_compress(instance::Instance)
|
||||
|
||||
Create a single-period instance from a multi-period one. Specifically,
|
||||
replaces every time-dependent attribute, such as initial_amounts,
|
||||
by a list with a single element, which is either a sum, an average,
|
||||
or something else that makes sense to that specific attribute.
|
||||
"""
|
||||
function _compress(instance::Instance)::Instance
|
||||
T = instance.time
|
||||
compressed = deepcopy(instance)
|
||||
compressed.time = 1
|
||||
compressed.building_period = [1]
|
||||
|
||||
# Compress products
|
||||
for p in compressed.products
|
||||
p.transportation_cost = [mean(p.transportation_cost)]
|
||||
p.transportation_energy = [mean(p.transportation_energy)]
|
||||
for (emission_name, emission_value) in p.transportation_emissions
|
||||
p.transportation_emissions[emission_name] = [mean(emission_value)]
|
||||
end
|
||||
p.disposal_limit = [maximum(p.disposal_limit) * T]
|
||||
p.disposal_cost = [mean(p.disposal_cost)]
|
||||
end
|
||||
|
||||
# Compress collection centers
|
||||
for c in compressed.collection_centers
|
||||
c.amount = [maximum(c.amount) * T]
|
||||
end
|
||||
|
||||
# Compress plants
|
||||
for plant in compressed.plants
|
||||
plant.energy = [mean(plant.energy)]
|
||||
for (emission_name, emission_value) in plant.emissions
|
||||
plant.emissions[emission_name] = [mean(emission_value)]
|
||||
end
|
||||
for s in plant.sizes
|
||||
s.capacity *= T
|
||||
s.variable_operating_cost = [mean(s.variable_operating_cost)]
|
||||
s.opening_cost = [s.opening_cost[1]]
|
||||
s.fixed_operating_cost = [sum(s.fixed_operating_cost)]
|
||||
end
|
||||
for (prod_name, disp_limit) in plant.disposal_limit
|
||||
plant.disposal_limit[prod_name] = [sum(disp_limit)]
|
||||
end
|
||||
for (prod_name, disp_cost) in plant.disposal_cost
|
||||
plant.disposal_cost[prod_name] = [mean(disp_cost)]
|
||||
end
|
||||
end
|
||||
|
||||
return compressed
|
||||
end
|
||||
|
||||
function _slice(instance::Instance, T::UnitRange)::Instance
|
||||
sliced = deepcopy(instance)
|
||||
sliced.time = length(T)
|
||||
|
||||
for p in sliced.products
|
||||
p.transportation_cost = p.transportation_cost[T]
|
||||
p.transportation_energy = p.transportation_energy[T]
|
||||
for (emission_name, emission_value) in p.transportation_emissions
|
||||
p.transportation_emissions[emission_name] = emission_value[T]
|
||||
end
|
||||
p.disposal_limit = p.disposal_limit[T]
|
||||
p.disposal_cost = p.disposal_cost[T]
|
||||
end
|
||||
|
||||
for c in sliced.collection_centers
|
||||
c.amount = c.amount[T]
|
||||
end
|
||||
|
||||
for plant in sliced.plants
|
||||
plant.energy = plant.energy[T]
|
||||
for (emission_name, emission_value) in plant.emissions
|
||||
plant.emissions[emission_name] = emission_value[T]
|
||||
end
|
||||
for s in plant.sizes
|
||||
s.variable_operating_cost = s.variable_operating_cost[T]
|
||||
s.opening_cost = s.opening_cost[T]
|
||||
s.fixed_operating_cost = s.fixed_operating_cost[T]
|
||||
end
|
||||
for (prod_name, disp_limit) in plant.disposal_limit
|
||||
plant.disposal_limit[prod_name] = disp_limit[T]
|
||||
end
|
||||
for (prod_name, disp_cost) in plant.disposal_cost
|
||||
plant.disposal_cost[prod_name] = disp_cost[T]
|
||||
end
|
||||
end
|
||||
|
||||
return sliced
|
||||
end
|
||||
212
src/instance/geodb.jl
Normal file
@@ -0,0 +1,212 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using CRC
|
||||
using CSV
|
||||
using DataFrames
|
||||
using Shapefile
|
||||
using Statistics
|
||||
using ZipFile
|
||||
using ProgressBars
|
||||
using OrderedCollections
|
||||
|
||||
import Downloads: download
|
||||
import Base: parse
|
||||
|
||||
crc32 = crc(CRC_32)
|
||||
|
||||
struct GeoPoint
|
||||
lat::Float64
|
||||
lon::Float64
|
||||
end
|
||||
|
||||
struct GeoRegion
|
||||
centroid::GeoPoint
|
||||
population::Int
|
||||
GeoRegion(; centroid, population) = new(centroid, population)
|
||||
end
|
||||
|
||||
DB_CACHE = Dict{String,Dict{String,GeoRegion}}()
|
||||
|
||||
function centroid(geom::Shapefile.Polygon)::GeoPoint
|
||||
x_max, x_min, y_max, y_min = -Inf, Inf, -Inf, Inf
|
||||
for p in geom.points
|
||||
x_max = max(x_max, p.x)
|
||||
x_min = min(x_min, p.x)
|
||||
y_max = max(y_max, p.y)
|
||||
y_min = min(y_min, p.y)
|
||||
end
|
||||
x_center = (x_max + x_min) / 2.0
|
||||
y_center = (y_max + y_min) / 2.0
|
||||
return GeoPoint(round(y_center, digits = 5), round(x_center, digits = 5))
|
||||
end
|
||||
|
||||
function _download_file(url, output, expected_crc32)::Nothing
|
||||
if isfile(output)
|
||||
return
|
||||
end
|
||||
mkpath(dirname(output))
|
||||
@info "Downloading: $url"
|
||||
fname = download(url)
|
||||
actual_crc32 = open(crc32, fname)
|
||||
expected_crc32 == actual_crc32 || error("CRC32 mismatch")
|
||||
cp(fname, output)
|
||||
return
|
||||
end
|
||||
|
||||
function _download_zip(url, outputdir, expected_output_file, expected_crc32)::Nothing
|
||||
if isfile(expected_output_file)
|
||||
return
|
||||
end
|
||||
mkpath(outputdir)
|
||||
@info "Downloading: $url"
|
||||
zip_filename = download(url)
|
||||
actual_crc32 = open(crc32, zip_filename)
|
||||
expected_crc32 == actual_crc32 || error("CRC32 mismatch")
|
||||
open(zip_filename) do zip_file
|
||||
zr = ZipFile.Reader(zip_file)
|
||||
for file in zr.files
|
||||
open(joinpath(outputdir, file.name), "w") do output_file
|
||||
write(output_file, read(file))
|
||||
end
|
||||
end
|
||||
end
|
||||
return
|
||||
end
|
||||
|
||||
function _geodb_load_gov_census(;
|
||||
db_name,
|
||||
extract_cols,
|
||||
shp_crc32,
|
||||
shp_filename,
|
||||
shp_url,
|
||||
population_url,
|
||||
population_crc32,
|
||||
population_col,
|
||||
population_preprocess,
|
||||
population_join,
|
||||
)::Dict{String,GeoRegion}
|
||||
basedir = joinpath(dirname(@__FILE__), "..", "..", "data", db_name)
|
||||
csv_filename = "$basedir/locations.csv"
|
||||
if !isfile(csv_filename)
|
||||
# Download required files
|
||||
_download_zip(shp_url, basedir, joinpath(basedir, shp_filename), shp_crc32)
|
||||
_download_file(population_url, "$basedir/population.csv", population_crc32)
|
||||
|
||||
# Read shapefile
|
||||
@info "Processing: $shp_filename"
|
||||
table = Shapefile.Table(joinpath(basedir, shp_filename))
|
||||
geoms = Shapefile.shapes(table)
|
||||
|
||||
# Build empty dataframe
|
||||
df = DataFrame()
|
||||
cols = extract_cols(table, 1)
|
||||
for k in keys(cols)
|
||||
df[!, k] = []
|
||||
end
|
||||
df[!, "latitude"] = Float64[]
|
||||
df[!, "longitude"] = Float64[]
|
||||
|
||||
# Add regions to dataframe
|
||||
for (i, geom) in tqdm(enumerate(geoms))
|
||||
c = centroid(geom)
|
||||
cols = extract_cols(table, i)
|
||||
push!(df, [values(cols)..., c.lat, c.lon])
|
||||
end
|
||||
sort!(df)
|
||||
|
||||
# Join with population data
|
||||
population = DataFrame(CSV.File("$basedir/population.csv"))
|
||||
population_preprocess(population)
|
||||
population = population[:, [population_join, population_col]]
|
||||
rename!(population, population_col => "population")
|
||||
df = leftjoin(df, population, on = population_join)
|
||||
|
||||
# Write output
|
||||
CSV.write(csv_filename, df)
|
||||
end
|
||||
if db_name ∉ keys(DB_CACHE)
|
||||
csv = CSV.File(csv_filename)
|
||||
DB_CACHE[db_name] = Dict(
|
||||
string(row.id) => GeoRegion(
|
||||
centroid = GeoPoint(row.latitude, row.longitude),
|
||||
population = (row.population === missing ? 0 : row.population),
|
||||
) for row in csv
|
||||
)
|
||||
end
|
||||
return DB_CACHE[db_name]
|
||||
end
|
||||
|
||||
# 2018 US counties
|
||||
# -----------------------------------------------------------------------------
|
||||
function _extract_cols_2018_us_county(
|
||||
table::Shapefile.Table,
|
||||
i::Int,
|
||||
)::OrderedDict{String,Any}
|
||||
return OrderedDict(
|
||||
"id" => table.STATEFP[i] * table.COUNTYFP[i],
|
||||
"statefp" => table.STATEFP[i],
|
||||
"countyfp" => table.COUNTYFP[i],
|
||||
"name" => table.NAME[i],
|
||||
)
|
||||
end
|
||||
|
||||
function _population_preprocess_2018_us_county(df)
|
||||
df[!, "id"] = [@sprintf("%02d%03d", row.STATE, row.COUNTY) for row in eachrow(df)]
|
||||
end
|
||||
|
||||
function _geodb_load_2018_us_county()::Dict{String,GeoRegion}
|
||||
return _geodb_load_gov_census(
|
||||
db_name = "2018-us-county",
|
||||
extract_cols = _extract_cols_2018_us_county,
|
||||
shp_crc32 = 0x83eaec6d,
|
||||
shp_filename = "cb_2018_us_county_500k.shp",
|
||||
shp_url = "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_county_500k.zip",
|
||||
population_url = "https://www2.census.gov/programs-surveys/popest/datasets/2010-2019/counties/totals/co-est2019-alldata.csv",
|
||||
population_crc32 = 0xf85b0405,
|
||||
population_col = "POPESTIMATE2019",
|
||||
population_join = "id",
|
||||
population_preprocess = _population_preprocess_2018_us_county,
|
||||
)
|
||||
end
|
||||
|
||||
# US States
|
||||
# -----------------------------------------------------------------------------
|
||||
function _extract_cols_us_state(table::Shapefile.Table, i::Int)::OrderedDict{String,Any}
|
||||
return OrderedDict(
|
||||
"id" => table.STUSPS[i],
|
||||
"statefp" => parse(Int, table.STATEFP[i]),
|
||||
"name" => table.NAME[i],
|
||||
)
|
||||
end
|
||||
|
||||
function _population_preprocess_us_state(df)
|
||||
rename!(df, "STATE" => "statefp")
|
||||
end
|
||||
|
||||
function _geodb_load_us_state()::Dict{String,GeoRegion}
|
||||
return _geodb_load_gov_census(
|
||||
db_name = "us-state",
|
||||
extract_cols = _extract_cols_us_state,
|
||||
shp_crc32 = 0x9469e5ca,
|
||||
shp_filename = "cb_2018_us_state_500k.shp",
|
||||
shp_url = "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_state_500k.zip",
|
||||
population_url = "http://www2.census.gov/programs-surveys/popest/datasets/2010-2019/national/totals/nst-est2019-alldata.csv",
|
||||
population_crc32 = 0x191cc64c,
|
||||
population_col = "POPESTIMATE2019",
|
||||
population_join = "statefp",
|
||||
population_preprocess = _population_preprocess_us_state,
|
||||
)
|
||||
end
|
||||
|
||||
function geodb_load(db_name::AbstractString)::Dict{String,GeoRegion}
|
||||
db_name == "2018-us-county" && return _geodb_load_2018_us_county()
|
||||
db_name == "us-state" && return _geodb_load_us_state()
|
||||
error("Unknown database: $db_name")
|
||||
end
|
||||
|
||||
function geodb_query(name)::GeoRegion
|
||||
db_name, id = split(name, ":")
|
||||
return geodb_load(db_name)[id]
|
||||
end
|
||||
201
src/instance/parse.jl
Normal file
@@ -0,0 +1,201 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
function parsefile(path::String)::Instance
|
||||
return RELOG.parse(JSON.parsefile(path))
|
||||
end
|
||||
|
||||
function parse(json)::Instance
|
||||
basedir = dirname(@__FILE__)
|
||||
json_schema = JSON.parsefile("$basedir/../schemas/input.json")
|
||||
validate(json, Schema(json_schema))
|
||||
|
||||
T = json["parameters"]["time horizon (years)"]
|
||||
json_schema["definitions"]["TimeSeries"]["minItems"] = T
|
||||
json_schema["definitions"]["TimeSeries"]["maxItems"] = T
|
||||
validate(json, Schema(json_schema))
|
||||
|
||||
building_period = [1]
|
||||
if "building period (years)" in keys(json)
|
||||
building_period = json["building period (years)"]
|
||||
end
|
||||
|
||||
plants = Plant[]
|
||||
products = Product[]
|
||||
collection_centers = CollectionCenter[]
|
||||
prod_name_to_product = Dict{String,Product}()
|
||||
|
||||
# Create products
|
||||
for (product_name, product_dict) in json["products"]
|
||||
cost = product_dict["transportation cost (\$/km/tonne)"]
|
||||
energy = zeros(T)
|
||||
emissions = Dict()
|
||||
disposal_limit = zeros(T)
|
||||
disposal_cost = zeros(T)
|
||||
|
||||
if "transportation energy (J/km/tonne)" in keys(product_dict)
|
||||
energy = product_dict["transportation energy (J/km/tonne)"]
|
||||
end
|
||||
|
||||
if "transportation emissions (tonne/km/tonne)" in keys(product_dict)
|
||||
emissions = product_dict["transportation emissions (tonne/km/tonne)"]
|
||||
end
|
||||
|
||||
if "disposal limit (tonne)" in keys(product_dict)
|
||||
disposal_limit = product_dict["disposal limit (tonne)"]
|
||||
end
|
||||
|
||||
if "disposal cost (\$/tonne)" in keys(product_dict)
|
||||
disposal_cost = product_dict["disposal cost (\$/tonne)"]
|
||||
end
|
||||
|
||||
prod_centers = []
|
||||
|
||||
product = Product(
|
||||
product_name,
|
||||
cost,
|
||||
energy,
|
||||
emissions,
|
||||
disposal_limit,
|
||||
disposal_cost,
|
||||
prod_centers,
|
||||
)
|
||||
push!(products, product)
|
||||
prod_name_to_product[product_name] = product
|
||||
|
||||
# Create collection centers
|
||||
if "initial amounts" in keys(product_dict)
|
||||
for (center_name, center_dict) in product_dict["initial amounts"]
|
||||
if "location" in keys(center_dict)
|
||||
region = geodb_query(center_dict["location"])
|
||||
center_dict["latitude (deg)"] = region.centroid.lat
|
||||
center_dict["longitude (deg)"] = region.centroid.lon
|
||||
end
|
||||
center = CollectionCenter(
|
||||
length(collection_centers) + 1,
|
||||
center_name,
|
||||
center_dict["latitude (deg)"],
|
||||
center_dict["longitude (deg)"],
|
||||
product,
|
||||
center_dict["amount (tonne)"],
|
||||
)
|
||||
push!(prod_centers, center)
|
||||
push!(collection_centers, center)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Create plants
|
||||
for (plant_name, plant_dict) in json["plants"]
|
||||
input = prod_name_to_product[plant_dict["input"]]
|
||||
output = Dict()
|
||||
|
||||
# Plant outputs
|
||||
if "outputs (tonne/tonne)" in keys(plant_dict)
|
||||
output = Dict(
|
||||
prod_name_to_product[key] => value for
|
||||
(key, value) in plant_dict["outputs (tonne/tonne)"] if value > 0
|
||||
)
|
||||
end
|
||||
|
||||
energy = zeros(T)
|
||||
emissions = Dict()
|
||||
|
||||
if "energy (GJ/tonne)" in keys(plant_dict)
|
||||
energy = plant_dict["energy (GJ/tonne)"]
|
||||
end
|
||||
|
||||
if "emissions (tonne/tonne)" in keys(plant_dict)
|
||||
emissions = plant_dict["emissions (tonne/tonne)"]
|
||||
end
|
||||
|
||||
for (location_name, location_dict) in plant_dict["locations"]
|
||||
sizes = PlantSize[]
|
||||
disposal_limit = Dict(p => [0.0 for t = 1:T] for p in keys(output))
|
||||
disposal_cost = Dict(p => [0.0 for t = 1:T] for p in keys(output))
|
||||
|
||||
# GeoDB
|
||||
if "location" in keys(location_dict)
|
||||
region = geodb_query(location_dict["location"])
|
||||
location_dict["latitude (deg)"] = region.centroid.lat
|
||||
location_dict["longitude (deg)"] = region.centroid.lon
|
||||
end
|
||||
|
||||
# Disposal
|
||||
if "disposal" in keys(location_dict)
|
||||
for (product_name, disposal_dict) in location_dict["disposal"]
|
||||
limit = [1e8 for t = 1:T]
|
||||
if "limit (tonne)" in keys(disposal_dict)
|
||||
limit = disposal_dict["limit (tonne)"]
|
||||
end
|
||||
disposal_limit[prod_name_to_product[product_name]] = limit
|
||||
disposal_cost[prod_name_to_product[product_name]] =
|
||||
disposal_dict["cost (\$/tonne)"]
|
||||
end
|
||||
end
|
||||
|
||||
# Capacities
|
||||
for (capacity_name, capacity_dict) in location_dict["capacities (tonne)"]
|
||||
push!(
|
||||
sizes,
|
||||
PlantSize(
|
||||
Base.parse(Float64, capacity_name),
|
||||
capacity_dict["variable operating cost (\$/tonne)"],
|
||||
capacity_dict["fixed operating cost (\$)"],
|
||||
capacity_dict["opening cost (\$)"],
|
||||
),
|
||||
)
|
||||
end
|
||||
length(sizes) > 1 || push!(sizes, sizes[1])
|
||||
sort!(sizes, by = x -> x.capacity)
|
||||
|
||||
# Storage
|
||||
storage_limit = 0
|
||||
storage_cost = zeros(T)
|
||||
if "storage" in keys(location_dict)
|
||||
storage_dict = location_dict["storage"]
|
||||
storage_limit = storage_dict["limit (tonne)"]
|
||||
storage_cost = storage_dict["cost (\$/tonne)"]
|
||||
end
|
||||
|
||||
# Validation: Capacities
|
||||
if length(sizes) != 2
|
||||
throw("At most two capacities are supported")
|
||||
end
|
||||
if sizes[1].variable_operating_cost != sizes[2].variable_operating_cost
|
||||
throw("Variable operating costs must be the same for all capacities")
|
||||
end
|
||||
|
||||
plant = Plant(
|
||||
length(plants) + 1,
|
||||
plant_name,
|
||||
location_name,
|
||||
input,
|
||||
output,
|
||||
location_dict["latitude (deg)"],
|
||||
location_dict["longitude (deg)"],
|
||||
disposal_limit,
|
||||
disposal_cost,
|
||||
sizes,
|
||||
energy,
|
||||
emissions,
|
||||
storage_limit,
|
||||
storage_cost,
|
||||
)
|
||||
|
||||
push!(plants, plant)
|
||||
end
|
||||
end
|
||||
|
||||
@info @sprintf("%12d collection centers", length(collection_centers))
|
||||
@info @sprintf("%12d candidate plant locations", length(plants))
|
||||
|
||||
return Instance(T, products, collection_centers, plants, building_period)
|
||||
end
|
||||
60
src/instance/structs.jl
Normal file
@@ -0,0 +1,60 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
mutable struct Product
|
||||
name::String
|
||||
transportation_cost::Vector{Float64}
|
||||
transportation_energy::Vector{Float64}
|
||||
transportation_emissions::Dict{String,Vector{Float64}}
|
||||
disposal_limit::Vector{Float64}
|
||||
disposal_cost::Vector{Float64}
|
||||
collection_centers::Vector
|
||||
end
|
||||
|
||||
mutable struct CollectionCenter
|
||||
index::Int64
|
||||
name::String
|
||||
latitude::Float64
|
||||
longitude::Float64
|
||||
product::Product
|
||||
amount::Vector{Float64}
|
||||
end
|
||||
|
||||
mutable struct PlantSize
|
||||
capacity::Float64
|
||||
variable_operating_cost::Vector{Float64}
|
||||
fixed_operating_cost::Vector{Float64}
|
||||
opening_cost::Vector{Float64}
|
||||
end
|
||||
|
||||
mutable struct Plant
|
||||
index::Int64
|
||||
plant_name::String
|
||||
location_name::String
|
||||
input::Product
|
||||
output::Dict{Product,Float64}
|
||||
latitude::Float64
|
||||
longitude::Float64
|
||||
disposal_limit::Dict{Product,Vector{Float64}}
|
||||
disposal_cost::Dict{Product,Vector{Float64}}
|
||||
sizes::Vector{PlantSize}
|
||||
energy::Vector{Float64}
|
||||
emissions::Dict{String,Vector{Float64}}
|
||||
storage_limit::Float64
|
||||
storage_cost::Vector{Float64}
|
||||
end
|
||||
|
||||
mutable struct Instance
|
||||
time::Int64
|
||||
products::Vector{Product}
|
||||
collection_centers::Vector{CollectionCenter}
|
||||
plants::Vector{Plant}
|
||||
building_period::Vector{Int64}
|
||||
end
|
||||
21
src/instance/validate.jl
Normal file
@@ -0,0 +1,21 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
function validate(json, schema)
|
||||
result = JSONSchema.validate(json, schema)
|
||||
if result !== nothing
|
||||
if result isa JSONSchema.SingleIssue
|
||||
msg = "$(result.reason) in $(result.path)"
|
||||
else
|
||||
msg = convert(String, result)
|
||||
end
|
||||
throw("Error parsing input file: $(msg)")
|
||||
end
|
||||
end
|
||||
535
src/model.jl
@@ -1,535 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, Cbc, Clp, ProgressBars, Printf, DataStructures
|
||||
|
||||
|
||||
mutable struct ManufacturingModel
|
||||
mip::JuMP.Model
|
||||
vars::DotDict
|
||||
eqs::DotDict
|
||||
instance::Instance
|
||||
graph::Graph
|
||||
end
|
||||
|
||||
|
||||
function build_model(instance::Instance, graph::Graph, optimizer)::ManufacturingModel
|
||||
model = ManufacturingModel(Model(optimizer), DotDict(), DotDict(), instance, graph)
|
||||
create_vars!(model)
|
||||
create_objective_function!(model)
|
||||
create_shipping_node_constraints!(model)
|
||||
create_process_node_constraints!(model)
|
||||
return model
|
||||
end
|
||||
|
||||
|
||||
function create_vars!(model::ManufacturingModel)
|
||||
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
|
||||
|
||||
vars.flow = Dict((a, t) => @variable(mip, lower_bound=0)
|
||||
for a in graph.arcs, t in 1:T)
|
||||
|
||||
vars.dispose = Dict((n, t) => @variable(mip,
|
||||
lower_bound=0,
|
||||
upper_bound=n.location.disposal_limit[n.product][t])
|
||||
for n in values(graph.plant_shipping_nodes), t in 1:T)
|
||||
|
||||
vars.store = Dict((n, t) => @variable(mip,
|
||||
lower_bound=0,
|
||||
upper_bound=n.location.storage_limit)
|
||||
for n in values(graph.process_nodes), t in 1:T)
|
||||
|
||||
vars.process = Dict((n, t) => @variable(mip,
|
||||
lower_bound = 0)
|
||||
for n in values(graph.process_nodes), t in 1:T)
|
||||
|
||||
vars.open_plant = Dict((n, t) => @variable(mip, binary=true)
|
||||
for n in values(graph.process_nodes), t in 1:T)
|
||||
|
||||
vars.is_open = Dict((n, t) => @variable(mip, binary=true)
|
||||
for n in values(graph.process_nodes), t in 1:T)
|
||||
|
||||
vars.capacity = Dict((n, t) => @variable(mip,
|
||||
lower_bound = 0,
|
||||
upper_bound = n.location.sizes[2].capacity)
|
||||
for n in values(graph.process_nodes), t in 1:T)
|
||||
|
||||
vars.expansion = Dict((n, t) => @variable(mip,
|
||||
lower_bound = 0,
|
||||
upper_bound = n.location.sizes[2].capacity -
|
||||
n.location.sizes[1].capacity)
|
||||
for n in values(graph.process_nodes), t in 1:T)
|
||||
end
|
||||
|
||||
|
||||
function slope_open(plant, t)
|
||||
if plant.sizes[2].capacity <= plant.sizes[1].capacity
|
||||
0.0
|
||||
else
|
||||
(plant.sizes[2].opening_cost[t] - plant.sizes[1].opening_cost[t]) /
|
||||
(plant.sizes[2].capacity - plant.sizes[1].capacity)
|
||||
end
|
||||
end
|
||||
|
||||
function slope_fix_oper_cost(plant, t)
|
||||
if plant.sizes[2].capacity <= plant.sizes[1].capacity
|
||||
0.0
|
||||
else
|
||||
(plant.sizes[2].fixed_operating_cost[t] - plant.sizes[1].fixed_operating_cost[t]) /
|
||||
(plant.sizes[2].capacity - plant.sizes[1].capacity)
|
||||
end
|
||||
end
|
||||
|
||||
function create_objective_function!(model::ManufacturingModel)
|
||||
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
|
||||
obj = AffExpr(0.0)
|
||||
|
||||
# Process node costs
|
||||
for n in values(graph.process_nodes), t in 1:T
|
||||
|
||||
# Transportation and variable operating costs
|
||||
for a in n.incoming_arcs
|
||||
c = n.location.input.transportation_cost[t] * a.values["distance"]
|
||||
add_to_expression!(obj, c, vars.flow[a, t])
|
||||
end
|
||||
|
||||
# Opening costs
|
||||
add_to_expression!(obj,
|
||||
n.location.sizes[1].opening_cost[t],
|
||||
vars.open_plant[n, t])
|
||||
|
||||
# Fixed operating costs (base)
|
||||
add_to_expression!(obj,
|
||||
n.location.sizes[1].fixed_operating_cost[t],
|
||||
vars.is_open[n, t])
|
||||
|
||||
# Fixed operating costs (expansion)
|
||||
add_to_expression!(obj,
|
||||
slope_fix_oper_cost(n.location, t),
|
||||
vars.expansion[n, t])
|
||||
|
||||
# Processing costs
|
||||
add_to_expression!(obj,
|
||||
n.location.sizes[1].variable_operating_cost[t],
|
||||
vars.process[n, t])
|
||||
|
||||
# Storage costs
|
||||
add_to_expression!(obj,
|
||||
n.location.storage_cost[t],
|
||||
vars.store[n, t])
|
||||
|
||||
# Expansion costs
|
||||
if t < T
|
||||
add_to_expression!(obj,
|
||||
slope_open(n.location, t) - slope_open(n.location, t + 1),
|
||||
vars.expansion[n, t])
|
||||
else
|
||||
add_to_expression!(obj,
|
||||
slope_open(n.location, t),
|
||||
vars.expansion[n, t])
|
||||
end
|
||||
end
|
||||
|
||||
# Shipping node costs
|
||||
for n in values(graph.plant_shipping_nodes), t in 1:T
|
||||
|
||||
# Disposal costs
|
||||
add_to_expression!(obj,
|
||||
n.location.disposal_cost[n.product][t],
|
||||
vars.dispose[n, t])
|
||||
end
|
||||
|
||||
@objective(mip, Min, obj)
|
||||
end
|
||||
|
||||
|
||||
function create_shipping_node_constraints!(model::ManufacturingModel)
|
||||
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
|
||||
eqs = model.eqs
|
||||
|
||||
eqs.balance = OrderedDict()
|
||||
|
||||
for t in 1:T
|
||||
# Collection centers
|
||||
for n in graph.collection_shipping_nodes
|
||||
eqs.balance[n, t] = @constraint(mip,
|
||||
sum(vars.flow[a, t] for a in n.outgoing_arcs)
|
||||
== n.location.amount[t])
|
||||
end
|
||||
|
||||
# Plants
|
||||
for n in graph.plant_shipping_nodes
|
||||
@constraint(mip,
|
||||
sum(vars.flow[a, t] for a in n.incoming_arcs) ==
|
||||
sum(vars.flow[a, t] for a in n.outgoing_arcs) + vars.dispose[n, t])
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
function create_process_node_constraints!(model::ManufacturingModel)
|
||||
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
|
||||
|
||||
for t in 1:T, n in graph.process_nodes
|
||||
input_sum = AffExpr(0.0)
|
||||
for a in n.incoming_arcs
|
||||
add_to_expression!(input_sum, 1.0, vars.flow[a, t])
|
||||
end
|
||||
|
||||
# Output amount is implied by amount processed
|
||||
for a in n.outgoing_arcs
|
||||
@constraint(mip, vars.flow[a, t] == a.values["weight"] * vars.process[n, t])
|
||||
end
|
||||
|
||||
# If plant is closed, capacity is zero
|
||||
@constraint(mip, vars.capacity[n, t] <= n.location.sizes[2].capacity * vars.is_open[n, t])
|
||||
|
||||
# If plant is open, capacity is greater than base
|
||||
@constraint(mip, vars.capacity[n, t] >= n.location.sizes[1].capacity * vars.is_open[n, t])
|
||||
|
||||
# Capacity is linked to expansion
|
||||
@constraint(mip, vars.capacity[n, t] <= n.location.sizes[1].capacity + vars.expansion[n, t])
|
||||
|
||||
# Can only process up to capacity
|
||||
@constraint(mip, vars.process[n, t] <= vars.capacity[n, t])
|
||||
|
||||
if t > 1
|
||||
# Plant capacity can only increase over time
|
||||
@constraint(mip, vars.capacity[n, t] >= vars.capacity[n, t-1])
|
||||
@constraint(mip, vars.expansion[n, t] >= vars.expansion[n, t-1])
|
||||
end
|
||||
|
||||
# Amount received equals amount processed plus stored
|
||||
store_in = 0
|
||||
if t > 1
|
||||
store_in = vars.store[n, t-1]
|
||||
end
|
||||
if t == T
|
||||
@constraint(mip, vars.store[n, t] == 0)
|
||||
end
|
||||
@constraint(mip,
|
||||
input_sum + store_in == vars.store[n, t] + vars.process[n, t])
|
||||
|
||||
|
||||
# Plant is currently open if it was already open in the previous time period or
|
||||
# if it was built just now
|
||||
if t > 1
|
||||
@constraint(mip, vars.is_open[n, t] == vars.is_open[n, t-1] + vars.open_plant[n, t])
|
||||
else
|
||||
@constraint(mip, vars.is_open[n, t] == vars.open_plant[n, t])
|
||||
end
|
||||
|
||||
# Plant can only be opened during building period
|
||||
if t ∉ model.instance.building_period
|
||||
@constraint(mip, vars.open_plant[n, t] == 0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
default_milp_optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
|
||||
default_lp_optimizer = optimizer_with_attributes(Clp.Optimizer, "LogLevel" => 0)
|
||||
|
||||
function solve(instance::Instance;
|
||||
optimizer=nothing,
|
||||
output=nothing,
|
||||
marginal_costs=true,
|
||||
)
|
||||
|
||||
milp_optimizer = lp_optimizer = optimizer
|
||||
if optimizer == nothing
|
||||
milp_optimizer = default_milp_optimizer
|
||||
lp_optimizer = default_lp_optimizer
|
||||
end
|
||||
|
||||
@info "Building graph..."
|
||||
graph = RELOG.build_graph(instance)
|
||||
@info @sprintf(" %12d time periods", instance.time)
|
||||
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
|
||||
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
|
||||
@info @sprintf(" %12d shipping nodes (collection)", length(graph.collection_shipping_nodes))
|
||||
@info @sprintf(" %12d arcs", length(graph.arcs))
|
||||
|
||||
@info "Building optimization model..."
|
||||
model = RELOG.build_model(instance, graph, milp_optimizer)
|
||||
|
||||
@info "Optimizing MILP..."
|
||||
JuMP.optimize!(model.mip)
|
||||
|
||||
if !has_values(model.mip)
|
||||
@warn "No solution available"
|
||||
return OrderedDict()
|
||||
end
|
||||
|
||||
if marginal_costs
|
||||
@info "Re-optimizing with integer variables fixed..."
|
||||
all_vars = JuMP.all_variables(model.mip)
|
||||
vals = OrderedDict(var => JuMP.value(var) for var in all_vars)
|
||||
JuMP.set_optimizer(model.mip, lp_optimizer)
|
||||
for var in all_vars
|
||||
if JuMP.is_binary(var)
|
||||
JuMP.unset_binary(var)
|
||||
JuMP.fix(var, vals[var])
|
||||
end
|
||||
end
|
||||
JuMP.optimize!(model.mip)
|
||||
end
|
||||
|
||||
@info "Extracting solution..."
|
||||
solution = get_solution(model, marginal_costs=marginal_costs)
|
||||
|
||||
if output != nothing
|
||||
write(solution, output)
|
||||
end
|
||||
|
||||
return solution
|
||||
end
|
||||
|
||||
function solve(filename::AbstractString;
|
||||
heuristic=false,
|
||||
kwargs...,
|
||||
)
|
||||
@info "Reading $filename..."
|
||||
instance = RELOG.parsefile(filename)
|
||||
if heuristic && instance.time > 1
|
||||
@info "Solving single-period version..."
|
||||
compressed = _compress(instance)
|
||||
csol = solve(compressed;
|
||||
output=nothing,
|
||||
marginal_costs=false,
|
||||
kwargs...)
|
||||
@info "Filtering candidate locations..."
|
||||
selected_pairs = []
|
||||
for (plant_name, plant_dict) in csol["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
push!(selected_pairs, (plant_name, location_name))
|
||||
end
|
||||
end
|
||||
filtered_plants = []
|
||||
for p in instance.plants
|
||||
if (p.plant_name, p.location_name) in selected_pairs
|
||||
push!(filtered_plants, p)
|
||||
end
|
||||
end
|
||||
instance.plants = filtered_plants
|
||||
@info "Solving original version..."
|
||||
end
|
||||
sol = solve(instance; kwargs...)
|
||||
return sol
|
||||
end
|
||||
|
||||
|
||||
function get_solution(model::ManufacturingModel;
|
||||
marginal_costs=true,
|
||||
)
|
||||
mip, vars, eqs, graph, instance = model.mip, model.vars, model.eqs, model.graph, model.instance
|
||||
T = instance.time
|
||||
|
||||
output = OrderedDict(
|
||||
"Plants" => OrderedDict(),
|
||||
"Products" => OrderedDict(),
|
||||
"Costs" => OrderedDict(
|
||||
"Fixed operating (\$)" => zeros(T),
|
||||
"Variable operating (\$)" => zeros(T),
|
||||
"Opening (\$)" => zeros(T),
|
||||
"Transportation (\$)" => zeros(T),
|
||||
"Disposal (\$)" => zeros(T),
|
||||
"Expansion (\$)" => zeros(T),
|
||||
"Storage (\$)" => zeros(T),
|
||||
"Total (\$)" => zeros(T),
|
||||
),
|
||||
"Energy" => OrderedDict(
|
||||
"Plants (GJ)" => zeros(T),
|
||||
"Transportation (GJ)" => zeros(T),
|
||||
),
|
||||
"Emissions" => OrderedDict(
|
||||
"Plants (tonne)" => OrderedDict(),
|
||||
"Transportation (tonne)" => OrderedDict(),
|
||||
),
|
||||
)
|
||||
|
||||
plant_to_process_node = OrderedDict(n.location => n for n in graph.process_nodes)
|
||||
plant_to_shipping_nodes = OrderedDict()
|
||||
for p in instance.plants
|
||||
plant_to_shipping_nodes[p] = []
|
||||
for a in plant_to_process_node[p].outgoing_arcs
|
||||
push!(plant_to_shipping_nodes[p], a.dest)
|
||||
end
|
||||
end
|
||||
|
||||
# Products
|
||||
if marginal_costs
|
||||
for n in graph.collection_shipping_nodes
|
||||
location_dict = OrderedDict{Any, Any}(
|
||||
"Marginal cost (\$/tonne)" => [round(abs(JuMP.shadow_price(eqs.balance[n, t])), digits=2)
|
||||
for t in 1:T]
|
||||
)
|
||||
if n.product.name ∉ keys(output["Products"])
|
||||
output["Products"][n.product.name] = OrderedDict()
|
||||
end
|
||||
output["Products"][n.product.name][n.location.name] = location_dict
|
||||
end
|
||||
end
|
||||
|
||||
# Plants
|
||||
for plant in instance.plants
|
||||
skip_plant = true
|
||||
process_node = plant_to_process_node[plant]
|
||||
plant_dict = OrderedDict{Any, Any}(
|
||||
"Input" => OrderedDict(),
|
||||
"Output" => OrderedDict(
|
||||
"Send" => OrderedDict(),
|
||||
"Dispose" => OrderedDict(),
|
||||
),
|
||||
"Input product" => plant.input.name,
|
||||
"Total input (tonne)" => [0.0 for t in 1:T],
|
||||
"Total output" => OrderedDict(),
|
||||
"Latitude (deg)" => plant.latitude,
|
||||
"Longitude (deg)" => plant.longitude,
|
||||
"Capacity (tonne)" => [JuMP.value(vars.capacity[process_node, t])
|
||||
for t in 1:T],
|
||||
"Opening cost (\$)" => [JuMP.value(vars.open_plant[process_node, t]) *
|
||||
plant.sizes[1].opening_cost[t]
|
||||
for t in 1:T],
|
||||
"Fixed operating cost (\$)" => [JuMP.value(vars.is_open[process_node, t]) *
|
||||
plant.sizes[1].fixed_operating_cost[t] +
|
||||
JuMP.value(vars.expansion[process_node, t]) *
|
||||
slope_fix_oper_cost(plant, t)
|
||||
for t in 1:T],
|
||||
"Expansion cost (\$)" => [(if t == 1
|
||||
slope_open(plant, t) * JuMP.value(vars.expansion[process_node, t])
|
||||
else
|
||||
slope_open(plant, t) * (
|
||||
JuMP.value(vars.expansion[process_node, t]) -
|
||||
JuMP.value(vars.expansion[process_node, t - 1])
|
||||
)
|
||||
end)
|
||||
for t in 1:T],
|
||||
"Process (tonne)" => [JuMP.value(vars.process[process_node, t])
|
||||
for t in 1:T],
|
||||
"Variable operating cost (\$)" => [JuMP.value(vars.process[process_node, t]) *
|
||||
plant.sizes[1].variable_operating_cost[t]
|
||||
for t in 1:T],
|
||||
"Storage (tonne)" => [JuMP.value(vars.store[process_node, t])
|
||||
for t in 1:T],
|
||||
"Storage cost (\$)" => [JuMP.value(vars.store[process_node, t]) *
|
||||
plant.storage_cost[t]
|
||||
for t in 1:T],
|
||||
)
|
||||
output["Costs"]["Fixed operating (\$)"] += plant_dict["Fixed operating cost (\$)"]
|
||||
output["Costs"]["Variable operating (\$)"] += plant_dict["Variable operating cost (\$)"]
|
||||
output["Costs"]["Opening (\$)"] += plant_dict["Opening cost (\$)"]
|
||||
output["Costs"]["Expansion (\$)"] += plant_dict["Expansion cost (\$)"]
|
||||
output["Costs"]["Storage (\$)"] += plant_dict["Storage cost (\$)"]
|
||||
|
||||
# Inputs
|
||||
for a in process_node.incoming_arcs
|
||||
vals = [JuMP.value(vars.flow[a, t]) for t in 1:T]
|
||||
if sum(vals) <= 1e-3
|
||||
continue
|
||||
end
|
||||
skip_plant = false
|
||||
dict = OrderedDict{Any, Any}(
|
||||
"Amount (tonne)" => vals,
|
||||
"Distance (km)" => a.values["distance"],
|
||||
"Latitude (deg)" => a.source.location.latitude,
|
||||
"Longitude (deg)" => a.source.location.longitude,
|
||||
"Transportation cost (\$)" => a.source.product.transportation_cost .*
|
||||
vals .*
|
||||
a.values["distance"],
|
||||
"Transportation energy (J)" => vals .*
|
||||
a.values["distance"] .*
|
||||
a.source.product.transportation_energy,
|
||||
"Emissions (tonne)" => OrderedDict(),
|
||||
)
|
||||
emissions_dict = output["Emissions"]["Transportation (tonne)"]
|
||||
for (em_name, em_values) in a.source.product.transportation_emissions
|
||||
dict["Emissions (tonne)"][em_name] = em_values .*
|
||||
dict["Amount (tonne)"] .*
|
||||
a.values["distance"]
|
||||
if em_name ∉ keys(emissions_dict)
|
||||
emissions_dict[em_name] = zeros(T)
|
||||
end
|
||||
emissions_dict[em_name] += dict["Emissions (tonne)"][em_name]
|
||||
end
|
||||
if a.source.location isa CollectionCenter
|
||||
plant_name = "Origin"
|
||||
location_name = a.source.location.name
|
||||
else
|
||||
plant_name = a.source.location.plant_name
|
||||
location_name = a.source.location.location_name
|
||||
end
|
||||
|
||||
if plant_name ∉ keys(plant_dict["Input"])
|
||||
plant_dict["Input"][plant_name] = OrderedDict()
|
||||
end
|
||||
plant_dict["Input"][plant_name][location_name] = dict
|
||||
plant_dict["Total input (tonne)"] += vals
|
||||
output["Costs"]["Transportation (\$)"] += dict["Transportation cost (\$)"]
|
||||
output["Energy"]["Transportation (GJ)"] += dict["Transportation energy (J)"] / 1e9
|
||||
end
|
||||
|
||||
plant_dict["Energy (GJ)"] = plant_dict["Total input (tonne)"] .* plant.energy
|
||||
output["Energy"]["Plants (GJ)"] += plant_dict["Energy (GJ)"]
|
||||
|
||||
plant_dict["Emissions (tonne)"] = OrderedDict()
|
||||
emissions_dict = output["Emissions"]["Plants (tonne)"]
|
||||
for (em_name, em_values) in plant.emissions
|
||||
plant_dict["Emissions (tonne)"][em_name] = em_values .* plant_dict["Total input (tonne)"]
|
||||
if em_name ∉ keys(emissions_dict)
|
||||
emissions_dict[em_name] = zeros(T)
|
||||
end
|
||||
emissions_dict[em_name] += plant_dict["Emissions (tonne)"][em_name]
|
||||
end
|
||||
|
||||
# Outputs
|
||||
for shipping_node in plant_to_shipping_nodes[plant]
|
||||
product_name = shipping_node.product.name
|
||||
plant_dict["Total output"][product_name] = zeros(T)
|
||||
plant_dict["Output"]["Send"][product_name] = product_dict = OrderedDict()
|
||||
|
||||
disposal_amount = [JuMP.value(vars.dispose[shipping_node, t]) for t in 1:T]
|
||||
if sum(disposal_amount) > 1e-5
|
||||
skip_plant = false
|
||||
plant_dict["Output"]["Dispose"][product_name] = disposal_dict = OrderedDict()
|
||||
disposal_dict["Amount (tonne)"] = [JuMP.value(model.vars.dispose[shipping_node, t])
|
||||
for t in 1:T]
|
||||
disposal_dict["Cost (\$)"] = [disposal_dict["Amount (tonne)"][t] *
|
||||
plant.disposal_cost[shipping_node.product][t]
|
||||
for t in 1:T]
|
||||
plant_dict["Total output"][product_name] += disposal_amount
|
||||
output["Costs"]["Disposal (\$)"] += disposal_dict["Cost (\$)"]
|
||||
end
|
||||
|
||||
for a in shipping_node.outgoing_arcs
|
||||
vals = [JuMP.value(vars.flow[a, t]) for t in 1:T]
|
||||
if sum(vals) <= 1e-3
|
||||
continue
|
||||
end
|
||||
skip_plant = false
|
||||
dict = OrderedDict(
|
||||
"Amount (tonne)" => vals,
|
||||
"Distance (km)" => a.values["distance"],
|
||||
"Latitude (deg)" => a.dest.location.latitude,
|
||||
"Longitude (deg)" => a.dest.location.longitude,
|
||||
)
|
||||
if a.dest.location.plant_name ∉ keys(product_dict)
|
||||
product_dict[a.dest.location.plant_name] = OrderedDict()
|
||||
end
|
||||
product_dict[a.dest.location.plant_name][a.dest.location.location_name] = dict
|
||||
plant_dict["Total output"][product_name] += vals
|
||||
end
|
||||
end
|
||||
|
||||
if !skip_plant
|
||||
if plant.plant_name ∉ keys(output["Plants"])
|
||||
output["Plants"][plant.plant_name] = OrderedDict()
|
||||
end
|
||||
output["Plants"][plant.plant_name][plant.location_name] = plant_dict
|
||||
end
|
||||
end
|
||||
|
||||
output["Costs"]["Total (\$)"] = sum(values(output["Costs"]))
|
||||
return output
|
||||
end
|
||||
364
src/model/build.jl
Normal file
@@ -0,0 +1,364 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, ProgressBars, Printf, DataStructures, StochasticPrograms
|
||||
|
||||
function build_model(
|
||||
instance::Instance,
|
||||
graph::Graph,
|
||||
optimizer,
|
||||
)
|
||||
return build_model(
|
||||
instance,
|
||||
[graph],
|
||||
[1.0],
|
||||
optimizer=optimizer,
|
||||
method=:ef,
|
||||
)
|
||||
end
|
||||
|
||||
function build_model(
|
||||
instance::Instance,
|
||||
graphs::Vector{Graph},
|
||||
probs::Vector{Float64};
|
||||
optimizer,
|
||||
method=:ef,
|
||||
tol=0.1,
|
||||
)
|
||||
T = instance.time
|
||||
|
||||
@stochastic_model model begin
|
||||
# Stage 1: Build plants
|
||||
# =====================================================================
|
||||
@stage 1 begin
|
||||
pn = graphs[1].process_nodes
|
||||
PN = length(pn)
|
||||
|
||||
# Var: open_plant
|
||||
@decision(
|
||||
model,
|
||||
open_plant[n in 1:PN, t in 1:T],
|
||||
binary = true,
|
||||
)
|
||||
|
||||
# Var: is_open
|
||||
@decision(
|
||||
model,
|
||||
is_open[n in 1:PN, t in 1:T],
|
||||
binary = true,
|
||||
)
|
||||
|
||||
# Objective function
|
||||
@objective(
|
||||
model,
|
||||
Min,
|
||||
|
||||
# Opening, fixed operating costs
|
||||
sum(
|
||||
pn[n].location.sizes[1].opening_cost[t] * open_plant[n, t] +
|
||||
pn[n].location.sizes[1].fixed_operating_cost[t] * is_open[n, t]
|
||||
for n in 1:PN
|
||||
for t in 1:T
|
||||
),
|
||||
)
|
||||
|
||||
for t = 1:T, n in 1:PN
|
||||
# Plant is currently open if it was already open in the previous time period or
|
||||
# if it was built just now
|
||||
if t > 1
|
||||
@constraint(
|
||||
model,
|
||||
is_open[n, t] == is_open[n, t-1] + open_plant[n, t]
|
||||
)
|
||||
else
|
||||
@constraint(model, is_open[n, t] == open_plant[n, t])
|
||||
end
|
||||
|
||||
# Plant can only be opened during building period
|
||||
if t ∉ instance.building_period
|
||||
@constraint(model, open_plant[n, t] == 0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Stage 2: Flows, disposal, capacity & storage
|
||||
# =====================================================================
|
||||
@stage 2 begin
|
||||
@uncertain graph
|
||||
pn = graph.process_nodes
|
||||
psn = graph.plant_shipping_nodes
|
||||
csn = graph.collection_shipping_nodes
|
||||
arcs = graph.arcs
|
||||
|
||||
A = length(arcs)
|
||||
PN = length(pn)
|
||||
CSN = length(csn)
|
||||
PSN = length(psn)
|
||||
|
||||
# Var: flow
|
||||
@recourse(
|
||||
model,
|
||||
flow[a in 1:A, t in 1:T],
|
||||
lower_bound = 0,
|
||||
)
|
||||
|
||||
# Var: plant_dispose
|
||||
@recourse(
|
||||
model,
|
||||
plant_dispose[n in 1:PSN, t in 1:T],
|
||||
lower_bound = 0,
|
||||
upper_bound = psn[n].location.disposal_limit[psn[n].product][t],
|
||||
)
|
||||
|
||||
# Var: collection_dispose
|
||||
@recourse(
|
||||
model,
|
||||
collection_dispose[n in 1:CSN, t in 1:T],
|
||||
lower_bound = 0,
|
||||
upper_bound = graph.collection_shipping_nodes[n].location.amount[t],
|
||||
)
|
||||
|
||||
# Var: collection_shortfall
|
||||
@recourse(
|
||||
model,
|
||||
collection_shortfall[n in 1:CSN, t in 1:T],
|
||||
lower_bound = 0,
|
||||
)
|
||||
|
||||
# Var: store
|
||||
@recourse(
|
||||
model,
|
||||
store[
|
||||
n in 1:PN,
|
||||
t in 1:T,
|
||||
],
|
||||
lower_bound = 0,
|
||||
upper_bound = pn[n].location.storage_limit,
|
||||
)
|
||||
|
||||
# Var: process
|
||||
@recourse(
|
||||
model,
|
||||
process[
|
||||
n in 1:PN,
|
||||
t in 1:T,
|
||||
],
|
||||
lower_bound = 0,
|
||||
)
|
||||
|
||||
# Var: capacity
|
||||
@recourse(
|
||||
model,
|
||||
capacity[
|
||||
n in 1:PN,
|
||||
t in 1:T,
|
||||
],
|
||||
lower_bound = 0,
|
||||
upper_bound = pn[n].location.sizes[2].capacity,
|
||||
)
|
||||
|
||||
# Var: expansion
|
||||
@recourse(
|
||||
model,
|
||||
expansion[
|
||||
n in 1:PN,
|
||||
t in 1:T,
|
||||
],
|
||||
lower_bound = 0,
|
||||
upper_bound = (
|
||||
pn[n].location.sizes[2].capacity -
|
||||
pn[n].location.sizes[1].capacity
|
||||
),
|
||||
)
|
||||
|
||||
# Objective function
|
||||
@objective(
|
||||
model,
|
||||
Min,
|
||||
sum(
|
||||
# Transportation costs
|
||||
pn[n].location.input.transportation_cost[t] *
|
||||
a.values["distance"] *
|
||||
flow[a.index,t]
|
||||
|
||||
for n in 1:PN
|
||||
for a in pn[n].incoming_arcs
|
||||
for t in 1:T
|
||||
) + sum(
|
||||
# Fixed operating costs (expansion)
|
||||
slope_fix_oper_cost(pn[n].location, t) * expansion[n, t] +
|
||||
|
||||
# Processing costs
|
||||
pn[n].location.sizes[1].variable_operating_cost[t] * process[n, t] +
|
||||
|
||||
# Storage costs
|
||||
pn[n].location.storage_cost[t] * store[n, t] +
|
||||
|
||||
# Expansion costs
|
||||
(
|
||||
t < T ? (
|
||||
(
|
||||
slope_open(pn[n].location, t) -
|
||||
slope_open(pn[n].location, t + 1)
|
||||
) * expansion[n, t]
|
||||
) : slope_open(pn[n].location, t) * expansion[n, t]
|
||||
)
|
||||
|
||||
for n in 1:PN
|
||||
for t in 1:T
|
||||
) + sum(
|
||||
# Disposal costs (plants)
|
||||
psn[n].location.disposal_cost[psn[n].product][t] * plant_dispose[n, t]
|
||||
for n in 1:PSN
|
||||
for t in 1:T
|
||||
) + sum(
|
||||
# Disposal costs (collection centers)
|
||||
csn[n].location.product.disposal_cost[t] * collection_dispose[n, t]
|
||||
for n in 1:CSN
|
||||
for t in 1:T
|
||||
) + sum(
|
||||
# Collection shortfall
|
||||
1e4 * collection_shortfall[n, t]
|
||||
for n in 1:CSN
|
||||
for t in 1:T
|
||||
)
|
||||
)
|
||||
|
||||
# Process node constraints
|
||||
for t = 1:T, n in 1:PN
|
||||
node = pn[n]
|
||||
|
||||
# Output amount is implied by amount processed
|
||||
for arc in node.outgoing_arcs
|
||||
@constraint(
|
||||
model,
|
||||
flow[arc.index, t] == arc.values["weight"] * process[n, t]
|
||||
)
|
||||
end
|
||||
|
||||
# If plant is closed, capacity is zero
|
||||
@constraint(
|
||||
model,
|
||||
capacity[n, t] <= node.location.sizes[2].capacity * is_open[n, t]
|
||||
)
|
||||
|
||||
# If plant is open, capacity is greater than base
|
||||
@constraint(
|
||||
model,
|
||||
capacity[n, t] >= node.location.sizes[1].capacity * is_open[n, t]
|
||||
)
|
||||
|
||||
# Capacity is linked to expansion
|
||||
@constraint(
|
||||
model,
|
||||
capacity[n, t] <=
|
||||
node.location.sizes[1].capacity + expansion[n, t]
|
||||
)
|
||||
|
||||
# Can only process up to capacity
|
||||
@constraint(model, process[n, t] <= capacity[n, t])
|
||||
|
||||
if t > 1
|
||||
# Plant capacity can only increase over time
|
||||
@constraint(model, capacity[n, t] >= capacity[n, t-1])
|
||||
@constraint(model, expansion[n, t] >= expansion[n, t-1])
|
||||
end
|
||||
|
||||
# Amount received equals amount processed plus stored
|
||||
store_in = 0
|
||||
if t > 1
|
||||
store_in = store[n, t-1]
|
||||
end
|
||||
if t == T
|
||||
@constraint(model, store[n, t] == 0)
|
||||
end
|
||||
@constraint(
|
||||
model,
|
||||
sum(
|
||||
flow[arc.index, t]
|
||||
for arc in node.incoming_arcs
|
||||
) + store_in == store[n, t] + process[n, t]
|
||||
)
|
||||
|
||||
end
|
||||
|
||||
# Material flow at collection shipping nodes
|
||||
@constraint(
|
||||
model,
|
||||
eq_balance_centers[
|
||||
n in 1:CSN,
|
||||
t in 1:T,
|
||||
],
|
||||
sum(
|
||||
flow[arc.index, t]
|
||||
for arc in csn[n].outgoing_arcs
|
||||
) == csn[n].location.amount[t] - collection_dispose[n, t] - collection_shortfall[n, t]
|
||||
)
|
||||
|
||||
# Material flow at plant shipping nodes
|
||||
@constraint(
|
||||
model,
|
||||
eq_balance_plant[
|
||||
n in 1:PSN,
|
||||
t in 1:T,
|
||||
],
|
||||
sum(flow[a.index, t] for a in psn[n].incoming_arcs) ==
|
||||
sum(flow[a.index, t] for a in psn[n].outgoing_arcs) +
|
||||
plant_dispose[n, t]
|
||||
)
|
||||
|
||||
# Enforce product disposal limit at collection centers
|
||||
for t in 1:T, prod in instance.products
|
||||
if isempty(prod.collection_centers)
|
||||
continue
|
||||
end
|
||||
@constraint(
|
||||
model,
|
||||
sum(
|
||||
collection_dispose[n, t]
|
||||
for n in 1:CSN
|
||||
if csn[n].product.name == prod.name
|
||||
) <= prod.disposal_limit[t]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
ξ = [
|
||||
@scenario graph = graphs[i] probability = probs[i]
|
||||
for i in 1:length(graphs)
|
||||
]
|
||||
|
||||
if method == :ef
|
||||
sp = instantiate(model, ξ; optimizer=optimizer)
|
||||
elseif method == :lshaped
|
||||
sp = instantiate(model, ξ; optimizer=LShaped.Optimizer)
|
||||
set_optimizer_attribute(sp, MasterOptimizer(), optimizer)
|
||||
set_optimizer_attribute(sp, SubProblemOptimizer(), optimizer)
|
||||
set_optimizer_attribute(sp, RelativeTolerance(), tol)
|
||||
else
|
||||
error("unknown method: $method")
|
||||
end
|
||||
|
||||
return sp
|
||||
end
|
||||
|
||||
|
||||
function slope_open(plant, t)
|
||||
if plant.sizes[2].capacity <= plant.sizes[1].capacity
|
||||
0.0
|
||||
else
|
||||
(plant.sizes[2].opening_cost[t] - plant.sizes[1].opening_cost[t]) /
|
||||
(plant.sizes[2].capacity - plant.sizes[1].capacity)
|
||||
end
|
||||
end
|
||||
|
||||
function slope_fix_oper_cost(plant, t)
|
||||
if plant.sizes[2].capacity <= plant.sizes[1].capacity
|
||||
0.0
|
||||
else
|
||||
(plant.sizes[2].fixed_operating_cost[t] - plant.sizes[1].fixed_operating_cost[t]) /
|
||||
(plant.sizes[2].capacity - plant.sizes[1].capacity)
|
||||
end
|
||||
end
|
||||
265
src/model/getsol.jl
Normal file
@@ -0,0 +1,265 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, ProgressBars, Printf, DataStructures
|
||||
|
||||
function get_solution(
|
||||
instance,
|
||||
graph,
|
||||
model,
|
||||
scenario_index::Int=1;
|
||||
marginal_costs=false,
|
||||
)
|
||||
value(x) = StochasticPrograms.value(x, scenario_index)
|
||||
ivalue(x) = StochasticPrograms.value(x)
|
||||
shadow_price(x) = StochasticPrograms.shadow_price(x, scenario_index)
|
||||
|
||||
T = instance.time
|
||||
|
||||
pn = graph.process_nodes
|
||||
psn = graph.plant_shipping_nodes
|
||||
csn = graph.collection_shipping_nodes
|
||||
arcs = graph.arcs
|
||||
|
||||
A = length(arcs)
|
||||
PN = length(pn)
|
||||
CSN = length(csn)
|
||||
PSN = length(psn)
|
||||
|
||||
flow = model[2, :flow]
|
||||
|
||||
output = OrderedDict(
|
||||
"Plants" => OrderedDict(),
|
||||
"Products" => OrderedDict(),
|
||||
"Costs" => OrderedDict(
|
||||
"Fixed operating (\$)" => zeros(T),
|
||||
"Variable operating (\$)" => zeros(T),
|
||||
"Opening (\$)" => zeros(T),
|
||||
"Transportation (\$)" => zeros(T),
|
||||
"Disposal (\$)" => zeros(T),
|
||||
"Expansion (\$)" => zeros(T),
|
||||
"Storage (\$)" => zeros(T),
|
||||
"Total (\$)" => zeros(T),
|
||||
),
|
||||
"Energy" =>
|
||||
OrderedDict("Plants (GJ)" => zeros(T), "Transportation (GJ)" => zeros(T)),
|
||||
"Emissions" => OrderedDict(
|
||||
"Plants (tonne)" => OrderedDict(),
|
||||
"Transportation (tonne)" => OrderedDict(),
|
||||
),
|
||||
)
|
||||
|
||||
pn = graph.process_nodes
|
||||
psn = graph.plant_shipping_nodes
|
||||
|
||||
plant_to_process_node_index = OrderedDict(
|
||||
pn[n].location => n
|
||||
for n in 1:length(pn)
|
||||
)
|
||||
|
||||
plant_to_shipping_node_indices = OrderedDict(p => [] for p in instance.plants)
|
||||
for n in 1:length(psn)
|
||||
push!(plant_to_shipping_node_indices[psn[n].location], n)
|
||||
end
|
||||
|
||||
# Products
|
||||
for n in 1:CSN
|
||||
node = csn[n]
|
||||
location_dict = OrderedDict{Any,Any}(
|
||||
"Latitude (deg)" => node.location.latitude,
|
||||
"Longitude (deg)" => node.location.longitude,
|
||||
"Amount (tonne)" => node.location.amount,
|
||||
"Dispose (tonne)" => [
|
||||
value(model[2, :collection_dispose][n, t])
|
||||
for t = 1:T
|
||||
],
|
||||
"Disposal cost (\$)" => [
|
||||
value(model[2, :collection_dispose][n, t]) *
|
||||
node.location.product.disposal_cost[t]
|
||||
for t = 1:T
|
||||
]
|
||||
)
|
||||
if marginal_costs
|
||||
location_dict["Marginal cost (\$/tonne)"] = [
|
||||
round(abs(shadow_price(model[2, :eq_balance_centers][n, t])), digits=2) for t = 1:T
|
||||
]
|
||||
end
|
||||
if node.product.name ∉ keys(output["Products"])
|
||||
output["Products"][node.product.name] = OrderedDict()
|
||||
end
|
||||
output["Products"][node.product.name][node.location.name] = location_dict
|
||||
end
|
||||
|
||||
# Plants
|
||||
for plant in instance.plants
|
||||
skip_plant = true
|
||||
n = plant_to_process_node_index[plant]
|
||||
process_node = pn[n]
|
||||
plant_dict = OrderedDict{Any,Any}(
|
||||
"Input" => OrderedDict(),
|
||||
"Output" =>
|
||||
OrderedDict("Send" => OrderedDict(), "Dispose" => OrderedDict()),
|
||||
"Input product" => plant.input.name,
|
||||
"Total input (tonne)" => [0.0 for t = 1:T],
|
||||
"Total output" => OrderedDict(),
|
||||
"Latitude (deg)" => plant.latitude,
|
||||
"Longitude (deg)" => plant.longitude,
|
||||
"Capacity (tonne)" =>
|
||||
[value(model[2, :capacity][n, t]) for t = 1:T],
|
||||
"Opening cost (\$)" => [
|
||||
ivalue(model[1, :open_plant][n, t]) *
|
||||
plant.sizes[1].opening_cost[t] for t = 1:T
|
||||
],
|
||||
"Fixed operating cost (\$)" => [
|
||||
ivalue(model[1, :is_open][n, t]) *
|
||||
plant.sizes[1].fixed_operating_cost[t] +
|
||||
value(model[2, :expansion][n, t]) *
|
||||
slope_fix_oper_cost(plant, t) for t = 1:T
|
||||
],
|
||||
"Expansion cost (\$)" => [
|
||||
(
|
||||
if t == 1
|
||||
slope_open(plant, t) * value(model[2, :expansion][n, t])
|
||||
else
|
||||
slope_open(plant, t) * (
|
||||
value(model[2, :expansion][n, t]) -
|
||||
value(model[2, :expansion][n, t-1])
|
||||
)
|
||||
end
|
||||
) for t = 1:T
|
||||
],
|
||||
"Process (tonne)" =>
|
||||
[value(model[2, :process][n, t]) for t = 1:T],
|
||||
"Variable operating cost (\$)" => [
|
||||
value(model[2, :process][n, t]) *
|
||||
plant.sizes[1].variable_operating_cost[t] for t = 1:T
|
||||
],
|
||||
"Storage (tonne)" =>
|
||||
[value(model[2, :store][n, t]) for t = 1:T],
|
||||
"Storage cost (\$)" => [
|
||||
value(model[2, :store][n, t]) * plant.storage_cost[t]
|
||||
for t = 1:T
|
||||
],
|
||||
)
|
||||
output["Costs"]["Fixed operating (\$)"] += plant_dict["Fixed operating cost (\$)"]
|
||||
output["Costs"]["Variable operating (\$)"] +=
|
||||
plant_dict["Variable operating cost (\$)"]
|
||||
output["Costs"]["Opening (\$)"] += plant_dict["Opening cost (\$)"]
|
||||
output["Costs"]["Expansion (\$)"] += plant_dict["Expansion cost (\$)"]
|
||||
output["Costs"]["Storage (\$)"] += plant_dict["Storage cost (\$)"]
|
||||
|
||||
# Inputs
|
||||
for a in process_node.incoming_arcs
|
||||
vals = [value(flow[a.index, t]) for t = 1:T]
|
||||
if sum(vals) <= 1e-3
|
||||
continue
|
||||
end
|
||||
skip_plant = false
|
||||
dict = OrderedDict{Any,Any}(
|
||||
"Amount (tonne)" => vals,
|
||||
"Distance (km)" => a.values["distance"],
|
||||
"Latitude (deg)" => a.source.location.latitude,
|
||||
"Longitude (deg)" => a.source.location.longitude,
|
||||
"Transportation cost (\$)" =>
|
||||
a.source.product.transportation_cost .* vals .* a.values["distance"],
|
||||
"Transportation energy (J)" =>
|
||||
vals .* a.values["distance"] .* a.source.product.transportation_energy,
|
||||
"Emissions (tonne)" => OrderedDict(),
|
||||
)
|
||||
emissions_dict = output["Emissions"]["Transportation (tonne)"]
|
||||
for (em_name, em_values) in a.source.product.transportation_emissions
|
||||
dict["Emissions (tonne)"][em_name] =
|
||||
em_values .* dict["Amount (tonne)"] .* a.values["distance"]
|
||||
if em_name ∉ keys(emissions_dict)
|
||||
emissions_dict[em_name] = zeros(T)
|
||||
end
|
||||
emissions_dict[em_name] += dict["Emissions (tonne)"][em_name]
|
||||
end
|
||||
if a.source.location isa CollectionCenter
|
||||
plant_name = "Origin"
|
||||
location_name = a.source.location.name
|
||||
else
|
||||
plant_name = a.source.location.plant_name
|
||||
location_name = a.source.location.location_name
|
||||
end
|
||||
|
||||
if plant_name ∉ keys(plant_dict["Input"])
|
||||
plant_dict["Input"][plant_name] = OrderedDict()
|
||||
end
|
||||
plant_dict["Input"][plant_name][location_name] = dict
|
||||
plant_dict["Total input (tonne)"] += vals
|
||||
output["Costs"]["Transportation (\$)"] += dict["Transportation cost (\$)"]
|
||||
output["Energy"]["Transportation (GJ)"] +=
|
||||
dict["Transportation energy (J)"] / 1e9
|
||||
end
|
||||
|
||||
plant_dict["Energy (GJ)"] = plant_dict["Total input (tonne)"] .* plant.energy
|
||||
output["Energy"]["Plants (GJ)"] += plant_dict["Energy (GJ)"]
|
||||
|
||||
plant_dict["Emissions (tonne)"] = OrderedDict()
|
||||
emissions_dict = output["Emissions"]["Plants (tonne)"]
|
||||
for (em_name, em_values) in plant.emissions
|
||||
plant_dict["Emissions (tonne)"][em_name] =
|
||||
em_values .* plant_dict["Total input (tonne)"]
|
||||
if em_name ∉ keys(emissions_dict)
|
||||
emissions_dict[em_name] = zeros(T)
|
||||
end
|
||||
emissions_dict[em_name] += plant_dict["Emissions (tonne)"][em_name]
|
||||
end
|
||||
|
||||
# Outputs
|
||||
for n2 in plant_to_shipping_node_indices[plant]
|
||||
shipping_node = psn[n2]
|
||||
product_name = shipping_node.product.name
|
||||
plant_dict["Total output"][product_name] = zeros(T)
|
||||
plant_dict["Output"]["Send"][product_name] = product_dict = OrderedDict()
|
||||
|
||||
disposal_amount =
|
||||
[value(model[2, :plant_dispose][n2, t]) for t = 1:T]
|
||||
if sum(disposal_amount) > 1e-5
|
||||
skip_plant = false
|
||||
plant_dict["Output"]["Dispose"][product_name] =
|
||||
disposal_dict = OrderedDict()
|
||||
disposal_dict["Amount (tonne)"] =
|
||||
[value(model[2, :plant_dispose][n2, t]) for t = 1:T]
|
||||
disposal_dict["Cost (\$)"] = [
|
||||
disposal_dict["Amount (tonne)"][t] *
|
||||
plant.disposal_cost[shipping_node.product][t] for t = 1:T
|
||||
]
|
||||
plant_dict["Total output"][product_name] += disposal_amount
|
||||
output["Costs"]["Disposal (\$)"] += disposal_dict["Cost (\$)"]
|
||||
end
|
||||
|
||||
for a in shipping_node.outgoing_arcs
|
||||
vals = [value(flow[a.index, t]) for t = 1:T]
|
||||
if sum(vals) <= 1e-3
|
||||
continue
|
||||
end
|
||||
skip_plant = false
|
||||
dict = OrderedDict(
|
||||
"Amount (tonne)" => vals,
|
||||
"Distance (km)" => a.values["distance"],
|
||||
"Latitude (deg)" => a.dest.location.latitude,
|
||||
"Longitude (deg)" => a.dest.location.longitude,
|
||||
)
|
||||
if a.dest.location.plant_name ∉ keys(product_dict)
|
||||
product_dict[a.dest.location.plant_name] = OrderedDict()
|
||||
end
|
||||
product_dict[a.dest.location.plant_name][a.dest.location.location_name] =
|
||||
dict
|
||||
plant_dict["Total output"][product_name] += vals
|
||||
end
|
||||
end
|
||||
|
||||
if !skip_plant
|
||||
if plant.plant_name ∉ keys(output["Plants"])
|
||||
output["Plants"][plant.plant_name] = OrderedDict()
|
||||
end
|
||||
output["Plants"][plant.plant_name][plant.location_name] = plant_dict
|
||||
end
|
||||
end
|
||||
|
||||
output["Costs"]["Total (\$)"] = sum(values(output["Costs"]))
|
||||
return output
|
||||
end
|
||||
137
src/model/solve.jl
Normal file
@@ -0,0 +1,137 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, HiGHS, ProgressBars, Printf, DataStructures
|
||||
|
||||
function _get_default_milp_optimizer()
|
||||
return optimizer_with_attributes(HiGHS.Optimizer)
|
||||
end
|
||||
|
||||
function _get_default_lp_optimizer()
|
||||
return optimizer_with_attributes(HiGHS.Optimizer)
|
||||
end
|
||||
|
||||
|
||||
function _print_graph_stats(instance::Instance, graph::Graph)::Nothing
|
||||
@info @sprintf(" %12d time periods", instance.time)
|
||||
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
|
||||
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
|
||||
@info @sprintf(
|
||||
" %12d shipping nodes (collection)",
|
||||
length(graph.collection_shipping_nodes)
|
||||
)
|
||||
@info @sprintf(" %12d arcs", length(graph.arcs))
|
||||
return
|
||||
end
|
||||
|
||||
function solve_stochastic(;
|
||||
scenarios::Vector{String},
|
||||
probs::Vector{Float64},
|
||||
optimizer,
|
||||
method=:ef,
|
||||
tol=0.1,
|
||||
)
|
||||
@info "Reading instance files..."
|
||||
instances = [parsefile(sc) for sc in scenarios]
|
||||
|
||||
@info "Building graphs..."
|
||||
graphs = [build_graph(inst) for inst in instances]
|
||||
|
||||
@info "Building stochastic model..."
|
||||
sp = RELOG.build_model(instances[1], graphs, probs; optimizer, method, tol)
|
||||
|
||||
@info "Optimizing stochastic model..."
|
||||
optimize!(sp)
|
||||
|
||||
@info "Extracting solution..."
|
||||
solutions = [
|
||||
get_solution(instances[i], graphs[i], sp, i)
|
||||
for i in 1:length(instances)
|
||||
]
|
||||
|
||||
return solutions
|
||||
end
|
||||
|
||||
function solve(
|
||||
instance::Instance;
|
||||
optimizer=HiGHS.Optimizer,
|
||||
marginal_costs=true,
|
||||
return_model=false
|
||||
)
|
||||
@info "Building graph..."
|
||||
graph = RELOG.build_graph(instance)
|
||||
_print_graph_stats(instance, graph)
|
||||
|
||||
@info "Building model..."
|
||||
model = RELOG.build_model(instance, [graph], [1.0]; optimizer)
|
||||
|
||||
@info "Optimizing model..."
|
||||
optimize!(model)
|
||||
if !has_values(model)
|
||||
error("No solution available")
|
||||
end
|
||||
|
||||
@info "Extracting solution..."
|
||||
solution = get_solution(instance, graph, model, 1)
|
||||
|
||||
if marginal_costs
|
||||
@info "Re-optimizing with integer variables fixed..."
|
||||
open_plant_vals = value.(model[1, :open_plant])
|
||||
is_open_vals = value.(model[1, :is_open])
|
||||
|
||||
for n in 1:length(graph.process_nodes), t in 1:instance.time
|
||||
unset_binary(model[1, :open_plant][n, t])
|
||||
unset_binary(model[1, :is_open][n, t])
|
||||
fix(
|
||||
model[1, :open_plant][n, t],
|
||||
open_plant_vals[n, t]
|
||||
)
|
||||
fix(
|
||||
model[1, :is_open][n, t],
|
||||
is_open_vals[n, t]
|
||||
)
|
||||
|
||||
end
|
||||
optimize!(model)
|
||||
if has_values(model)
|
||||
@info "Extracting solution..."
|
||||
solution = get_solution(instance, graph, model, 1, marginal_costs=true)
|
||||
else
|
||||
@warn "Error computing marginal costs. Ignoring."
|
||||
end
|
||||
end
|
||||
|
||||
if return_model
|
||||
return solution, model
|
||||
else
|
||||
return solution
|
||||
end
|
||||
end
|
||||
|
||||
function solve(filename::AbstractString; heuristic=false, kwargs...)
|
||||
@info "Reading $filename..."
|
||||
instance = RELOG.parsefile(filename)
|
||||
if heuristic && instance.time > 1
|
||||
@info "Solving single-period version..."
|
||||
compressed = _compress(instance)
|
||||
csol, model = solve(compressed; marginal_costs=false, return_model=true, kwargs...)
|
||||
@info "Filtering candidate locations..."
|
||||
selected_pairs = []
|
||||
for (plant_name, plant_dict) in csol["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
push!(selected_pairs, (plant_name, location_name))
|
||||
end
|
||||
end
|
||||
filtered_plants = []
|
||||
for p in instance.plants
|
||||
if (p.plant_name, p.location_name) in selected_pairs
|
||||
push!(filtered_plants, p)
|
||||
end
|
||||
end
|
||||
instance.plants = filtered_plants
|
||||
@info "Solving original version..."
|
||||
end
|
||||
sol = solve(instance; kwargs...)
|
||||
return sol
|
||||
end
|
||||
278
src/reports.jl
@@ -1,278 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function plants_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."latitude (deg)" = Float64[]
|
||||
df."longitude (deg)" = Float64[]
|
||||
df."capacity (tonne)" = Float64[]
|
||||
df."amount processed (tonne)" = Float64[]
|
||||
df."amount received (tonne)" = Float64[]
|
||||
df."amount in storage (tonne)" = Float64[]
|
||||
df."utilization factor (%)" = Float64[]
|
||||
df."energy (GJ)" = Float64[]
|
||||
df."opening cost (\$)" = Float64[]
|
||||
df."expansion cost (\$)" = Float64[]
|
||||
df."fixed operating cost (\$)" = Float64[]
|
||||
df."variable operating cost (\$)" = Float64[]
|
||||
df."storage cost (\$)" = Float64[]
|
||||
df."total cost (\$)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for year in 1:T
|
||||
capacity = round(location_dict["Capacity (tonne)"][year], digits=2)
|
||||
received = round(location_dict["Total input (tonne)"][year], digits=2)
|
||||
processed = round(location_dict["Process (tonne)"][year], digits=2)
|
||||
in_storage = round(location_dict["Storage (tonne)"][year], digits=2)
|
||||
utilization_factor = round(processed / capacity * 100.0, digits=2)
|
||||
energy = round(location_dict["Energy (GJ)"][year], digits=2)
|
||||
latitude = round(location_dict["Latitude (deg)"], digits=6)
|
||||
longitude = round(location_dict["Longitude (deg)"], digits=6)
|
||||
opening_cost = round(location_dict["Opening cost (\$)"][year], digits=2)
|
||||
expansion_cost = round(location_dict["Expansion cost (\$)"][year], digits=2)
|
||||
fixed_cost = round(location_dict["Fixed operating cost (\$)"][year], digits=2)
|
||||
var_cost = round(location_dict["Variable operating cost (\$)"][year], digits=2)
|
||||
storage_cost = round(location_dict["Storage cost (\$)"][year], digits=2)
|
||||
total_cost = round(opening_cost + expansion_cost + fixed_cost +
|
||||
var_cost + storage_cost, digits=2)
|
||||
push!(df, [
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
latitude,
|
||||
longitude,
|
||||
capacity,
|
||||
processed,
|
||||
received,
|
||||
in_storage,
|
||||
utilization_factor,
|
||||
energy,
|
||||
opening_cost,
|
||||
expansion_cost,
|
||||
fixed_cost,
|
||||
var_cost,
|
||||
storage_cost,
|
||||
total_cost,
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
function plant_outputs_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."product name" = String[]
|
||||
df."amount produced (tonne)" = Float64[]
|
||||
df."amount sent (tonne)" = Float64[]
|
||||
df."amount disposed (tonne)" = Float64[]
|
||||
df."disposal cost (\$)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for (product_name, amount_produced) in location_dict["Total output"]
|
||||
send_dict = location_dict["Output"]["Send"]
|
||||
disposal_dict = location_dict["Output"]["Dispose"]
|
||||
|
||||
sent = zeros(T)
|
||||
if product_name in keys(send_dict)
|
||||
for (dst_plant_name, dst_plant_dict) in send_dict[product_name]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
sent += dst_location_dict["Amount (tonne)"]
|
||||
end
|
||||
end
|
||||
end
|
||||
sent = round.(sent, digits=2)
|
||||
|
||||
disposal_amount = zeros(T)
|
||||
disposal_cost = zeros(T)
|
||||
if product_name in keys(disposal_dict)
|
||||
disposal_amount += disposal_dict[product_name]["Amount (tonne)"]
|
||||
disposal_cost += disposal_dict[product_name]["Cost (\$)"]
|
||||
end
|
||||
disposal_amount = round.(disposal_amount, digits=2)
|
||||
disposal_cost = round.(disposal_cost, digits=2)
|
||||
|
||||
for year in 1:T
|
||||
push!(df, [
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
product_name,
|
||||
round(amount_produced[year], digits=2),
|
||||
sent[year],
|
||||
disposal_amount[year],
|
||||
disposal_cost[year],
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
|
||||
function plant_emissions_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."emission type" = String[]
|
||||
df."emission amount (tonne)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for (emission_name, emission_amount) in location_dict["Emissions (tonne)"]
|
||||
for year in 1:T
|
||||
push!(df, [
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
emission_name,
|
||||
round(emission_amount[year], digits=2),
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
|
||||
function transportation_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."source type" = String[]
|
||||
df."source location name" = String[]
|
||||
df."source latitude (deg)" = Float64[]
|
||||
df."source longitude (deg)" = Float64[]
|
||||
df."destination type" = String[]
|
||||
df."destination location name" = String[]
|
||||
df."destination latitude (deg)" = Float64[]
|
||||
df."destination longitude (deg)" = Float64[]
|
||||
df."product" = String[]
|
||||
df."year" = Int[]
|
||||
df."distance (km)" = Float64[]
|
||||
df."amount (tonne)" = Float64[]
|
||||
df."amount-distance (tonne-km)" = Float64[]
|
||||
df."transportation cost (\$)" = Float64[]
|
||||
df."transportation energy (GJ)" = Float64[]
|
||||
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
|
||||
for (src_location_name, src_location_dict) in src_plant_dict
|
||||
for year in 1:T
|
||||
push!(df, [
|
||||
src_plant_name,
|
||||
src_location_name,
|
||||
round(src_location_dict["Latitude (deg)"], digits=6),
|
||||
round(src_location_dict["Longitude (deg)"], digits=6),
|
||||
dst_plant_name,
|
||||
dst_location_name,
|
||||
round(dst_location_dict["Latitude (deg)"], digits=6),
|
||||
round(dst_location_dict["Longitude (deg)"], digits=6),
|
||||
dst_location_dict["Input product"],
|
||||
year,
|
||||
round(src_location_dict["Distance (km)"], digits=2),
|
||||
round(src_location_dict["Amount (tonne)"][year], digits=2),
|
||||
round(src_location_dict["Amount (tonne)"][year] *
|
||||
src_location_dict["Distance (km)"],
|
||||
digits=2),
|
||||
round(src_location_dict["Transportation cost (\$)"][year], digits=2),
|
||||
round(src_location_dict["Transportation energy (J)"][year] / 1e9, digits=2),
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
|
||||
function transportation_emissions_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."source type" = String[]
|
||||
df."source location name" = String[]
|
||||
df."source latitude (deg)" = Float64[]
|
||||
df."source longitude (deg)" = Float64[]
|
||||
df."destination type" = String[]
|
||||
df."destination location name" = String[]
|
||||
df."destination latitude (deg)" = Float64[]
|
||||
df."destination longitude (deg)" = Float64[]
|
||||
df."product" = String[]
|
||||
df."year" = Int[]
|
||||
df."distance (km)" = Float64[]
|
||||
df."shipped amount (tonne)" = Float64[]
|
||||
df."shipped amount-distance (tonne-km)" = Float64[]
|
||||
df."emission type" = String[]
|
||||
df."emission amount (tonne)" = Float64[]
|
||||
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
|
||||
for (src_location_name, src_location_dict) in src_plant_dict
|
||||
for (emission_name, emission_amount) in src_location_dict["Emissions (tonne)"]
|
||||
for year in 1:T
|
||||
push!(df, [
|
||||
src_plant_name,
|
||||
src_location_name,
|
||||
round(src_location_dict["Latitude (deg)"], digits=6),
|
||||
round(src_location_dict["Longitude (deg)"], digits=6),
|
||||
dst_plant_name,
|
||||
dst_location_name,
|
||||
round(dst_location_dict["Latitude (deg)"], digits=6),
|
||||
round(dst_location_dict["Longitude (deg)"], digits=6),
|
||||
dst_location_dict["Input product"],
|
||||
year,
|
||||
round(src_location_dict["Distance (km)"], digits=2),
|
||||
round(src_location_dict["Amount (tonne)"][year], digits=2),
|
||||
round(src_location_dict["Amount (tonne)"][year] *
|
||||
src_location_dict["Distance (km)"],
|
||||
digits=2),
|
||||
emission_name,
|
||||
round(emission_amount[year], digits=2),
|
||||
])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
function write(solution::AbstractDict, filename::AbstractString)
|
||||
@info "Writing solution: $filename"
|
||||
open(filename, "w") do file
|
||||
JSON.print(file, solution, 2)
|
||||
end
|
||||
end
|
||||
|
||||
write_plants_report(solution, filename) =
|
||||
CSV.write(filename, plants_report(solution))
|
||||
|
||||
write_plant_outputs_report(solution, filename) =
|
||||
CSV.write(filename, plant_outputs_report(solution))
|
||||
|
||||
write_plant_emissions_report(solution, filename) =
|
||||
CSV.write(filename, plant_emissions_report(solution))
|
||||
|
||||
write_transportation_report(solution, filename) =
|
||||
CSV.write(filename, transportation_report(solution))
|
||||
|
||||
write_transportation_emissions_report(solution, filename) =
|
||||
CSV.write(filename, transportation_emissions_report(solution))
|
||||
38
src/reports/plant_emissions.jl
Normal file
@@ -0,0 +1,38 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function plant_emissions_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."emission type" = String[]
|
||||
df."emission amount (tonne)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for (emission_name, emission_amount) in location_dict["Emissions (tonne)"]
|
||||
for year = 1:T
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
emission_name,
|
||||
round(emission_amount[year], digits = 2),
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_plant_emissions_report(solution, filename) =
|
||||
CSV.write(filename, plant_emissions_report(solution))
|
||||
66
src/reports/plant_outputs.jl
Normal file
@@ -0,0 +1,66 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function plant_outputs_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."product name" = String[]
|
||||
df."amount produced (tonne)" = Float64[]
|
||||
df."amount sent (tonne)" = Float64[]
|
||||
df."amount disposed (tonne)" = Float64[]
|
||||
df."disposal cost (\$)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for (product_name, amount_produced) in location_dict["Total output"]
|
||||
send_dict = location_dict["Output"]["Send"]
|
||||
disposal_dict = location_dict["Output"]["Dispose"]
|
||||
|
||||
sent = zeros(T)
|
||||
if product_name in keys(send_dict)
|
||||
for (dst_plant_name, dst_plant_dict) in send_dict[product_name]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
sent += dst_location_dict["Amount (tonne)"]
|
||||
end
|
||||
end
|
||||
end
|
||||
sent = round.(sent, digits = 2)
|
||||
|
||||
disposal_amount = zeros(T)
|
||||
disposal_cost = zeros(T)
|
||||
if product_name in keys(disposal_dict)
|
||||
disposal_amount += disposal_dict[product_name]["Amount (tonne)"]
|
||||
disposal_cost += disposal_dict[product_name]["Cost (\$)"]
|
||||
end
|
||||
disposal_amount = round.(disposal_amount, digits = 2)
|
||||
disposal_cost = round.(disposal_cost, digits = 2)
|
||||
|
||||
for year = 1:T
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
product_name,
|
||||
round(amount_produced[year], digits = 2),
|
||||
sent[year],
|
||||
disposal_amount[year],
|
||||
disposal_cost[year],
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_plant_outputs_report(solution, filename) =
|
||||
CSV.write(filename, plant_outputs_report(solution))
|
||||
79
src/reports/plants.jl
Normal file
@@ -0,0 +1,79 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function plants_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."latitude (deg)" = Float64[]
|
||||
df."longitude (deg)" = Float64[]
|
||||
df."capacity (tonne)" = Float64[]
|
||||
df."amount processed (tonne)" = Float64[]
|
||||
df."amount received (tonne)" = Float64[]
|
||||
df."amount in storage (tonne)" = Float64[]
|
||||
df."utilization factor (%)" = Float64[]
|
||||
df."energy (GJ)" = Float64[]
|
||||
df."opening cost (\$)" = Float64[]
|
||||
df."expansion cost (\$)" = Float64[]
|
||||
df."fixed operating cost (\$)" = Float64[]
|
||||
df."variable operating cost (\$)" = Float64[]
|
||||
df."storage cost (\$)" = Float64[]
|
||||
df."total cost (\$)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for year = 1:T
|
||||
capacity = round(location_dict["Capacity (tonne)"][year], digits = 2)
|
||||
received = round(location_dict["Total input (tonne)"][year], digits = 2)
|
||||
processed = round(location_dict["Process (tonne)"][year], digits = 2)
|
||||
in_storage = round(location_dict["Storage (tonne)"][year], digits = 2)
|
||||
utilization_factor = round(processed / capacity * 100.0, digits = 2)
|
||||
energy = round(location_dict["Energy (GJ)"][year], digits = 2)
|
||||
latitude = round(location_dict["Latitude (deg)"], digits = 6)
|
||||
longitude = round(location_dict["Longitude (deg)"], digits = 6)
|
||||
opening_cost = round(location_dict["Opening cost (\$)"][year], digits = 2)
|
||||
expansion_cost =
|
||||
round(location_dict["Expansion cost (\$)"][year], digits = 2)
|
||||
fixed_cost =
|
||||
round(location_dict["Fixed operating cost (\$)"][year], digits = 2)
|
||||
var_cost =
|
||||
round(location_dict["Variable operating cost (\$)"][year], digits = 2)
|
||||
storage_cost = round(location_dict["Storage cost (\$)"][year], digits = 2)
|
||||
total_cost = round(
|
||||
opening_cost + expansion_cost + fixed_cost + var_cost + storage_cost,
|
||||
digits = 2,
|
||||
)
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
latitude,
|
||||
longitude,
|
||||
capacity,
|
||||
processed,
|
||||
received,
|
||||
in_storage,
|
||||
utilization_factor,
|
||||
energy,
|
||||
opening_cost,
|
||||
expansion_cost,
|
||||
fixed_cost,
|
||||
var_cost,
|
||||
storage_cost,
|
||||
total_cost,
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_plants_report(solution, filename) = CSV.write(filename, plants_report(solution))
|
||||
52
src/reports/products.jl
Normal file
@@ -0,0 +1,52 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function products_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."product name" = String[]
|
||||
df."location name" = String[]
|
||||
df."latitude (deg)" = Float64[]
|
||||
df."longitude (deg)" = Float64[]
|
||||
df."year" = Int[]
|
||||
df."amount (tonne)" = Float64[]
|
||||
df."marginal cost (\$/tonne)" = Float64[]
|
||||
df."amount disposed (tonne)" = Float64[]
|
||||
df."disposal cost (\$)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (prod_name, prod_dict) in solution["Products"]
|
||||
for (location_name, location_dict) in prod_dict
|
||||
for year = 1:T
|
||||
marginal_cost = NaN
|
||||
if "Marginal cost (\$/tonne)" in keys(location_dict)
|
||||
marginal_cost = location_dict["Marginal cost (\$/tonne)"][year]
|
||||
end
|
||||
latitude = round(location_dict["Latitude (deg)"], digits = 6)
|
||||
longitude = round(location_dict["Longitude (deg)"], digits = 6)
|
||||
amount = location_dict["Amount (tonne)"][year]
|
||||
amount_disposed = location_dict["Dispose (tonne)"][year]
|
||||
disposal_cost = location_dict["Disposal cost (\$)"][year]
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
prod_name,
|
||||
location_name,
|
||||
latitude,
|
||||
longitude,
|
||||
year,
|
||||
amount,
|
||||
marginal_cost,
|
||||
amount_disposed,
|
||||
disposal_cost,
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_products_report(solution, filename) = CSV.write(filename, products_report(solution))
|
||||
75
src/reports/tr.jl
Normal file
@@ -0,0 +1,75 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function transportation_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."source type" = String[]
|
||||
df."source location name" = String[]
|
||||
df."source latitude (deg)" = Float64[]
|
||||
df."source longitude (deg)" = Float64[]
|
||||
df."destination type" = String[]
|
||||
df."destination location name" = String[]
|
||||
df."destination latitude (deg)" = Float64[]
|
||||
df."destination longitude (deg)" = Float64[]
|
||||
df."product" = String[]
|
||||
df."year" = Int[]
|
||||
df."distance (km)" = Float64[]
|
||||
df."amount (tonne)" = Float64[]
|
||||
df."amount-distance (tonne-km)" = Float64[]
|
||||
df."transportation cost (\$)" = Float64[]
|
||||
df."transportation energy (GJ)" = Float64[]
|
||||
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
|
||||
for (src_location_name, src_location_dict) in src_plant_dict
|
||||
for year = 1:T
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
src_plant_name,
|
||||
src_location_name,
|
||||
round(src_location_dict["Latitude (deg)"], digits = 6),
|
||||
round(src_location_dict["Longitude (deg)"], digits = 6),
|
||||
dst_plant_name,
|
||||
dst_location_name,
|
||||
round(dst_location_dict["Latitude (deg)"], digits = 6),
|
||||
round(dst_location_dict["Longitude (deg)"], digits = 6),
|
||||
dst_location_dict["Input product"],
|
||||
year,
|
||||
round(src_location_dict["Distance (km)"], digits = 2),
|
||||
round(
|
||||
src_location_dict["Amount (tonne)"][year],
|
||||
digits = 2,
|
||||
),
|
||||
round(
|
||||
src_location_dict["Amount (tonne)"][year] *
|
||||
src_location_dict["Distance (km)"],
|
||||
digits = 2,
|
||||
),
|
||||
round(
|
||||
src_location_dict["Transportation cost (\$)"][year],
|
||||
digits = 2,
|
||||
),
|
||||
round(
|
||||
src_location_dict["Transportation energy (J)"][year] /
|
||||
1e9,
|
||||
digits = 2,
|
||||
),
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_transportation_report(solution, filename) =
|
||||
CSV.write(filename, transportation_report(solution))
|
||||
71
src/reports/tr_emissions.jl
Normal file
@@ -0,0 +1,71 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function transportation_emissions_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."source type" = String[]
|
||||
df."source location name" = String[]
|
||||
df."source latitude (deg)" = Float64[]
|
||||
df."source longitude (deg)" = Float64[]
|
||||
df."destination type" = String[]
|
||||
df."destination location name" = String[]
|
||||
df."destination latitude (deg)" = Float64[]
|
||||
df."destination longitude (deg)" = Float64[]
|
||||
df."product" = String[]
|
||||
df."year" = Int[]
|
||||
df."distance (km)" = Float64[]
|
||||
df."shipped amount (tonne)" = Float64[]
|
||||
df."shipped amount-distance (tonne-km)" = Float64[]
|
||||
df."emission type" = String[]
|
||||
df."emission amount (tonne)" = Float64[]
|
||||
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
|
||||
for (src_location_name, src_location_dict) in src_plant_dict
|
||||
for (emission_name, emission_amount) in
|
||||
src_location_dict["Emissions (tonne)"]
|
||||
for year = 1:T
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
src_plant_name,
|
||||
src_location_name,
|
||||
round(src_location_dict["Latitude (deg)"], digits = 6),
|
||||
round(src_location_dict["Longitude (deg)"], digits = 6),
|
||||
dst_plant_name,
|
||||
dst_location_name,
|
||||
round(dst_location_dict["Latitude (deg)"], digits = 6),
|
||||
round(dst_location_dict["Longitude (deg)"], digits = 6),
|
||||
dst_location_dict["Input product"],
|
||||
year,
|
||||
round(src_location_dict["Distance (km)"], digits = 2),
|
||||
round(
|
||||
src_location_dict["Amount (tonne)"][year],
|
||||
digits = 2,
|
||||
),
|
||||
round(
|
||||
src_location_dict["Amount (tonne)"][year] *
|
||||
src_location_dict["Distance (km)"],
|
||||
digits = 2,
|
||||
),
|
||||
emission_name,
|
||||
round(emission_amount[year], digits = 2),
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_transportation_emissions_report(solution, filename) =
|
||||
CSV.write(filename, transportation_emissions_report(solution))
|
||||
14
src/reports/write.jl
Normal file
@@ -0,0 +1,14 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
import Base: write
|
||||
|
||||
function write(solution::AbstractDict, filename::AbstractString)
|
||||
@info "Writing solution: $filename"
|
||||
open(filename, "w") do file
|
||||
JSON.print(file, solution, 2)
|
||||
end
|
||||
end
|
||||
@@ -12,7 +12,9 @@
|
||||
"Parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"time horizon (years)": { "type": "number" }
|
||||
"time horizon (years)": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"time horizon (years)"
|
||||
@@ -23,17 +25,27 @@
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": { "type": "string" },
|
||||
"input": {
|
||||
"type": "string"
|
||||
},
|
||||
"outputs (tonne/tonne)": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "type": "number" }
|
||||
"additionalProperties": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"energy (GJ/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"energy (GJ/tonne)": { "$ref": "#/definitions/TimeSeries" },
|
||||
"emissions (tonne/tonne)": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#/definitions/TimeSeries" }
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"locations": { "$ref": "#/definitions/PlantLocation" }
|
||||
"locations": {
|
||||
"$ref": "#/definitions/PlantLocation"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"input",
|
||||
@@ -46,15 +58,26 @@
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"latitude (deg)": { "type": "number" },
|
||||
"longitude (deg)": { "type": "number" },
|
||||
"location": {
|
||||
"type": "string"
|
||||
},
|
||||
"latitude (deg)": {
|
||||
"type": "number"
|
||||
},
|
||||
"longitude (deg)": {
|
||||
"type": "number"
|
||||
},
|
||||
"disposal": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cost ($/tonne)": { "$ref": "#/definitions/TimeSeries" },
|
||||
"limit (tonne)": { "$ref": "#/definitions/TimeSeries" }
|
||||
"cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"limit (tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cost ($/tonne)"
|
||||
@@ -64,8 +87,12 @@
|
||||
"storage": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cost ($/tonne)": { "$ref": "#/definitions/TimeSeries" },
|
||||
"limit (tonne)": { "type": "number" }
|
||||
"cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"limit (tonne)": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cost ($/tonne)",
|
||||
@@ -77,9 +104,15 @@
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"variable operating cost ($/tonne)": { "$ref": "#/definitions/TimeSeries" },
|
||||
"fixed operating cost ($)": { "$ref": "#/definitions/TimeSeries" },
|
||||
"opening cost ($)": { "$ref": "#/definitions/TimeSeries" }
|
||||
"variable operating cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"fixed operating cost ($)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"opening cost ($)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"variable operating cost ($/tonne)",
|
||||
@@ -90,8 +123,6 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"latitude (deg)",
|
||||
"longitude (deg)",
|
||||
"capacities (tonne)"
|
||||
]
|
||||
}
|
||||
@@ -101,13 +132,20 @@
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"latitude (deg)": { "type": "number" },
|
||||
"longitude (deg)": { "type": "number" },
|
||||
"amount (tonne)": { "$ref": "#/definitions/TimeSeries" }
|
||||
"location": {
|
||||
"type": "string"
|
||||
},
|
||||
"latitude (deg)": {
|
||||
"type": "number"
|
||||
},
|
||||
"longitude (deg)": {
|
||||
"type": "number"
|
||||
},
|
||||
"amount (tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"latitude (deg)",
|
||||
"longitude (deg)",
|
||||
"amount (tonne)"
|
||||
]
|
||||
}
|
||||
@@ -117,13 +155,27 @@
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"transportation cost ($/km/tonne)": { "$ref": "#/definitions/TimeSeries" },
|
||||
"transportation energy (J/km/tonne)": { "$ref": "#/definitions/TimeSeries" },
|
||||
"transportation cost ($/km/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"transportation energy (J/km/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"transportation emissions (tonne/km/tonne)": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#/definitions/TimeSeries" }
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"initial amounts": { "$ref": "#/definitions/InitialAmount" }
|
||||
"initial amounts": {
|
||||
"$ref": "#/definitions/InitialAmount"
|
||||
},
|
||||
"disposal limit (tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"disposal cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"transportation cost ($/km/tonne)"
|
||||
@@ -133,9 +185,15 @@
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parameters": { "$ref": "#/definitions/Parameters" },
|
||||
"plants": { "$ref": "#/definitions/Plant" },
|
||||
"products": { "$ref": "#/definitions/Product" }
|
||||
"parameters": {
|
||||
"$ref": "#/definitions/Parameters"
|
||||
},
|
||||
"plants": {
|
||||
"$ref": "#/definitions/Plant"
|
||||
},
|
||||
"products": {
|
||||
"$ref": "#/definitions/Product"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"parameters",
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
using PackageCompiler
|
||||
using TOML
|
||||
using Logging
|
||||
|
||||
using Cbc
|
||||
using Clp
|
||||
using Geodesy
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using JuMP
|
||||
using MathOptInterface
|
||||
using ProgressBars
|
||||
Logging.disable_logging(Logging.Info)
|
||||
|
||||
pkg = [:Cbc,
|
||||
:Clp,
|
||||
:Geodesy,
|
||||
:JSON,
|
||||
:JSONSchema,
|
||||
:JuMP,
|
||||
:MathOptInterface,
|
||||
:ProgressBars]
|
||||
mkpath("build")
|
||||
|
||||
@info "Building system image..."
|
||||
create_sysimage(pkg, sysimage_path="build/sysimage.so")
|
||||
printstyled("Generating precompilation statements...\n", color = :light_green)
|
||||
run(`julia --project=. --trace-compile=build/precompile.jl $ARGS`)
|
||||
|
||||
printstyled("Finding dependencies...\n", color = :light_green)
|
||||
project = TOML.parsefile("Project.toml")
|
||||
manifest = TOML.parsefile("Manifest.toml")
|
||||
deps = Symbol[]
|
||||
for dep in keys(project["deps"])
|
||||
if "path" in keys(manifest[dep][1])
|
||||
printstyled(" skip $(dep)\n", color = :light_black)
|
||||
else
|
||||
println(" add $(dep)")
|
||||
push!(deps, Symbol(dep))
|
||||
end
|
||||
end
|
||||
|
||||
printstyled("Building system image...\n", color = :light_green)
|
||||
create_sysimage(
|
||||
deps,
|
||||
precompile_statements_file = "build/precompile.jl",
|
||||
sysimage_path = "build/sysimage.so",
|
||||
)
|
||||
|
||||
4406
test/fixtures/instances/case3_p010_s1.00.json
vendored
Normal file
4406
test/fixtures/instances/case3_p010_s1.25.json
vendored
Normal file
357
test/fixtures/instances/s1.json
vendored
Normal file
@@ -0,0 +1,357 @@
|
||||
{
|
||||
"parameters": {
|
||||
"time horizon (years)": 2
|
||||
},
|
||||
"products": {
|
||||
"P1": {
|
||||
"transportation cost ($/km/tonne)": [
|
||||
0.015,
|
||||
0.015
|
||||
],
|
||||
"transportation energy (J/km/tonne)": [
|
||||
0.12,
|
||||
0.11
|
||||
],
|
||||
"transportation emissions (tonne/km/tonne)": {
|
||||
"CO2": [
|
||||
0.052,
|
||||
0.050
|
||||
],
|
||||
"CH4": [
|
||||
0.003,
|
||||
0.002
|
||||
]
|
||||
},
|
||||
"initial amounts": {
|
||||
"C1": {
|
||||
"latitude (deg)": 7.0,
|
||||
"longitude (deg)": 7.0,
|
||||
"amount (tonne)": [
|
||||
934.56,
|
||||
934.56
|
||||
]
|
||||
},
|
||||
"C2": {
|
||||
"latitude (deg)": 7.0,
|
||||
"longitude (deg)": 19.0,
|
||||
"amount (tonne)": [
|
||||
198.95,
|
||||
198.95
|
||||
]
|
||||
},
|
||||
"C3": {
|
||||
"latitude (deg)": 84.0,
|
||||
"longitude (deg)": 76.0,
|
||||
"amount (tonne)": [
|
||||
212.97,
|
||||
212.97
|
||||
]
|
||||
},
|
||||
"C4": {
|
||||
"latitude (deg)": 21.0,
|
||||
"longitude (deg)": 16.0,
|
||||
"amount (tonne)": [
|
||||
352.19,
|
||||
352.19
|
||||
]
|
||||
},
|
||||
"C5": {
|
||||
"latitude (deg)": 32.0,
|
||||
"longitude (deg)": 92.0,
|
||||
"amount (tonne)": [
|
||||
510.33,
|
||||
510.33
|
||||
]
|
||||
},
|
||||
"C6": {
|
||||
"latitude (deg)": 14.0,
|
||||
"longitude (deg)": 62.0,
|
||||
"amount (tonne)": [
|
||||
471.66,
|
||||
471.66
|
||||
]
|
||||
},
|
||||
"C7": {
|
||||
"latitude (deg)": 30.0,
|
||||
"longitude (deg)": 83.0,
|
||||
"amount (tonne)": [
|
||||
785.21,
|
||||
785.21
|
||||
]
|
||||
},
|
||||
"C8": {
|
||||
"latitude (deg)": 35.0,
|
||||
"longitude (deg)": 40.0,
|
||||
"amount (tonne)": [
|
||||
706.17,
|
||||
706.17
|
||||
]
|
||||
},
|
||||
"C9": {
|
||||
"latitude (deg)": 74.0,
|
||||
"longitude (deg)": 52.0,
|
||||
"amount (tonne)": [
|
||||
30.08,
|
||||
30.08
|
||||
]
|
||||
},
|
||||
"C10": {
|
||||
"latitude (deg)": 22.0,
|
||||
"longitude (deg)": 54.0,
|
||||
"amount (tonne)": [
|
||||
536.52,
|
||||
536.52
|
||||
]
|
||||
}
|
||||
},
|
||||
"disposal limit (tonne)": [
|
||||
1.0,
|
||||
1.0
|
||||
],
|
||||
"disposal cost ($/tonne)": [
|
||||
-1000,
|
||||
-1000
|
||||
]
|
||||
},
|
||||
"P2": {
|
||||
"transportation cost ($/km/tonne)": [
|
||||
0.02,
|
||||
0.02
|
||||
]
|
||||
},
|
||||
"P3": {
|
||||
"transportation cost ($/km/tonne)": [
|
||||
0.0125,
|
||||
0.0125
|
||||
]
|
||||
},
|
||||
"P4": {
|
||||
"transportation cost ($/km/tonne)": [
|
||||
0.0175,
|
||||
0.0175
|
||||
]
|
||||
}
|
||||
},
|
||||
"plants": {
|
||||
"F1": {
|
||||
"input": "P1",
|
||||
"outputs (tonne/tonne)": {
|
||||
"P2": 0.2,
|
||||
"P3": 0.5
|
||||
},
|
||||
"energy (GJ/tonne)": [
|
||||
0.12,
|
||||
0.11
|
||||
],
|
||||
"emissions (tonne/tonne)": {
|
||||
"CO2": [
|
||||
0.052,
|
||||
0.050
|
||||
],
|
||||
"CH4": [
|
||||
0.003,
|
||||
0.002
|
||||
]
|
||||
},
|
||||
"locations": {
|
||||
"L1": {
|
||||
"latitude (deg)": 0.0,
|
||||
"longitude (deg)": 0.0,
|
||||
"disposal": {
|
||||
"P2": {
|
||||
"cost ($/tonne)": [
|
||||
-10.0,
|
||||
-10.0
|
||||
],
|
||||
"limit (tonne)": [
|
||||
1.0,
|
||||
1.0
|
||||
]
|
||||
},
|
||||
"P3": {
|
||||
"cost ($/tonne)": [
|
||||
-10.0,
|
||||
-10.0
|
||||
],
|
||||
"limit (tonne)": [
|
||||
1.0,
|
||||
1.0
|
||||
]
|
||||
}
|
||||
},
|
||||
"capacities (tonne)": {
|
||||
"250.0": {
|
||||
"opening cost ($)": [
|
||||
500.0,
|
||||
500.0
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
30.0,
|
||||
30.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
30.0,
|
||||
30.0
|
||||
]
|
||||
},
|
||||
"1000.0": {
|
||||
"opening cost ($)": [
|
||||
1250.0,
|
||||
1250.0
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
30.0,
|
||||
30.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
30.0,
|
||||
30.0
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"L2": {
|
||||
"latitude (deg)": 0.5,
|
||||
"longitude (deg)": 0.5,
|
||||
"capacities (tonne)": {
|
||||
"0.0": {
|
||||
"opening cost ($)": [
|
||||
1000,
|
||||
1000
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
50.0,
|
||||
50.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
50.0,
|
||||
50.0
|
||||
]
|
||||
},
|
||||
"10000.0": {
|
||||
"opening cost ($)": [
|
||||
10000,
|
||||
10000
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
50.0,
|
||||
50.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
50.0,
|
||||
50.0
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F2": {
|
||||
"input": "P2",
|
||||
"outputs (tonne/tonne)": {
|
||||
"P3": 0.05,
|
||||
"P4": 0.80
|
||||
},
|
||||
"locations": {
|
||||
"L3": {
|
||||
"latitude (deg)": 25.0,
|
||||
"longitude (deg)": 65.0,
|
||||
"disposal": {
|
||||
"P3": {
|
||||
"cost ($/tonne)": [
|
||||
100.0,
|
||||
100.0
|
||||
]
|
||||
}
|
||||
},
|
||||
"capacities (tonne)": {
|
||||
"1000.0": {
|
||||
"opening cost ($)": [
|
||||
3000,
|
||||
3000
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
50.0,
|
||||
50.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
50.0,
|
||||
50.0
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"L4": {
|
||||
"latitude (deg)": 0.75,
|
||||
"longitude (deg)": 0.20,
|
||||
"capacities (tonne)": {
|
||||
"10000": {
|
||||
"opening cost ($)": [
|
||||
3000,
|
||||
3000
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
50.0,
|
||||
50.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
50.0,
|
||||
50.0
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F3": {
|
||||
"input": "P4",
|
||||
"locations": {
|
||||
"L5": {
|
||||
"latitude (deg)": 100.0,
|
||||
"longitude (deg)": 100.0,
|
||||
"capacities (tonne)": {
|
||||
"15000": {
|
||||
"opening cost ($)": [
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
-15.0,
|
||||
-15.0
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F4": {
|
||||
"input": "P3",
|
||||
"locations": {
|
||||
"L6": {
|
||||
"latitude (deg)": 50.0,
|
||||
"longitude (deg)": 50.0,
|
||||
"capacities (tonne)": {
|
||||
"10000": {
|
||||
"opening cost ($)": [
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
-15.0,
|
||||
-15.0
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
347
test/fixtures/instances/s2.json
vendored
Normal file
@@ -0,0 +1,347 @@
|
||||
{
|
||||
"parameters": {
|
||||
"time horizon (years)": 2
|
||||
},
|
||||
"products": {
|
||||
"P1": {
|
||||
"transportation cost ($/km/tonne)": [
|
||||
0.015,
|
||||
0.015
|
||||
],
|
||||
"transportation energy (J/km/tonne)": [
|
||||
0.12,
|
||||
0.11
|
||||
],
|
||||
"transportation emissions (tonne/km/tonne)": {
|
||||
"CO2": [
|
||||
0.052,
|
||||
0.050
|
||||
],
|
||||
"CH4": [
|
||||
0.003,
|
||||
0.002
|
||||
]
|
||||
},
|
||||
"initial amounts": {
|
||||
"C1": {
|
||||
"location": "2018-us-county:17043",
|
||||
"amount (tonne)": [
|
||||
934.56,
|
||||
934.56
|
||||
]
|
||||
},
|
||||
"C2": {
|
||||
"latitude (deg)": 7.0,
|
||||
"longitude (deg)": 19.0,
|
||||
"amount (tonne)": [
|
||||
198.95,
|
||||
198.95
|
||||
]
|
||||
},
|
||||
"C3": {
|
||||
"latitude (deg)": 84.0,
|
||||
"longitude (deg)": 76.0,
|
||||
"amount (tonne)": [
|
||||
212.97,
|
||||
212.97
|
||||
]
|
||||
},
|
||||
"C4": {
|
||||
"latitude (deg)": 21.0,
|
||||
"longitude (deg)": 16.0,
|
||||
"amount (tonne)": [
|
||||
352.19,
|
||||
352.19
|
||||
]
|
||||
},
|
||||
"C5": {
|
||||
"latitude (deg)": 32.0,
|
||||
"longitude (deg)": 92.0,
|
||||
"amount (tonne)": [
|
||||
510.33,
|
||||
510.33
|
||||
]
|
||||
},
|
||||
"C6": {
|
||||
"latitude (deg)": 14.0,
|
||||
"longitude (deg)": 62.0,
|
||||
"amount (tonne)": [
|
||||
471.66,
|
||||
471.66
|
||||
]
|
||||
},
|
||||
"C7": {
|
||||
"latitude (deg)": 30.0,
|
||||
"longitude (deg)": 83.0,
|
||||
"amount (tonne)": [
|
||||
785.21,
|
||||
785.21
|
||||
]
|
||||
},
|
||||
"C8": {
|
||||
"latitude (deg)": 35.0,
|
||||
"longitude (deg)": 40.0,
|
||||
"amount (tonne)": [
|
||||
706.17,
|
||||
706.17
|
||||
]
|
||||
},
|
||||
"C9": {
|
||||
"latitude (deg)": 74.0,
|
||||
"longitude (deg)": 52.0,
|
||||
"amount (tonne)": [
|
||||
30.08,
|
||||
30.08
|
||||
]
|
||||
},
|
||||
"C10": {
|
||||
"latitude (deg)": 22.0,
|
||||
"longitude (deg)": 54.0,
|
||||
"amount (tonne)": [
|
||||
536.52,
|
||||
536.52
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"P2": {
|
||||
"transportation cost ($/km/tonne)": [
|
||||
0.02,
|
||||
0.02
|
||||
]
|
||||
},
|
||||
"P3": {
|
||||
"transportation cost ($/km/tonne)": [
|
||||
0.0125,
|
||||
0.0125
|
||||
]
|
||||
},
|
||||
"P4": {
|
||||
"transportation cost ($/km/tonne)": [
|
||||
0.0175,
|
||||
0.0175
|
||||
]
|
||||
}
|
||||
},
|
||||
"plants": {
|
||||
"F1": {
|
||||
"input": "P1",
|
||||
"outputs (tonne/tonne)": {
|
||||
"P2": 0.2,
|
||||
"P3": 0.5
|
||||
},
|
||||
"energy (GJ/tonne)": [
|
||||
0.12,
|
||||
0.11
|
||||
],
|
||||
"emissions (tonne/tonne)": {
|
||||
"CO2": [
|
||||
0.052,
|
||||
0.050
|
||||
],
|
||||
"CH4": [
|
||||
0.003,
|
||||
0.002
|
||||
]
|
||||
},
|
||||
"locations": {
|
||||
"L1": {
|
||||
"latitude (deg)": 0.0,
|
||||
"longitude (deg)": 0.0,
|
||||
"disposal": {
|
||||
"P2": {
|
||||
"cost ($/tonne)": [
|
||||
-10.0,
|
||||
-10.0
|
||||
],
|
||||
"limit (tonne)": [
|
||||
1.0,
|
||||
1.0
|
||||
]
|
||||
},
|
||||
"P3": {
|
||||
"cost ($/tonne)": [
|
||||
-10.0,
|
||||
-10.0
|
||||
],
|
||||
"limit (tonne)": [
|
||||
1.0,
|
||||
1.0
|
||||
]
|
||||
}
|
||||
},
|
||||
"capacities (tonne)": {
|
||||
"250.0": {
|
||||
"opening cost ($)": [
|
||||
500.0,
|
||||
500.0
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
30.0,
|
||||
30.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
30.0,
|
||||
30.0
|
||||
]
|
||||
},
|
||||
"1000.0": {
|
||||
"opening cost ($)": [
|
||||
1250.0,
|
||||
1250.0
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
30.0,
|
||||
30.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
30.0,
|
||||
30.0
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"L2": {
|
||||
"location": "2018-us-county:17043",
|
||||
"capacities (tonne)": {
|
||||
"0.0": {
|
||||
"opening cost ($)": [
|
||||
1000,
|
||||
1000
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
50.0,
|
||||
50.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
50.0,
|
||||
50.0
|
||||
]
|
||||
},
|
||||
"10000.0": {
|
||||
"opening cost ($)": [
|
||||
10000,
|
||||
10000
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
50.0,
|
||||
50.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
50.0,
|
||||
50.0
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F2": {
|
||||
"input": "P2",
|
||||
"outputs (tonne/tonne)": {
|
||||
"P3": 0.05,
|
||||
"P4": 0.80
|
||||
},
|
||||
"locations": {
|
||||
"L3": {
|
||||
"latitude (deg)": 25.0,
|
||||
"longitude (deg)": 65.0,
|
||||
"disposal": {
|
||||
"P3": {
|
||||
"cost ($/tonne)": [
|
||||
100.0,
|
||||
100.0
|
||||
]
|
||||
}
|
||||
},
|
||||
"capacities (tonne)": {
|
||||
"1000.0": {
|
||||
"opening cost ($)": [
|
||||
3000,
|
||||
3000
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
50.0,
|
||||
50.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
50.0,
|
||||
50.0
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"L4": {
|
||||
"latitude (deg)": 0.75,
|
||||
"longitude (deg)": 0.20,
|
||||
"capacities (tonne)": {
|
||||
"10000": {
|
||||
"opening cost ($)": [
|
||||
3000,
|
||||
3000
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
50.0,
|
||||
50.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
50.0,
|
||||
50.0
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F3": {
|
||||
"input": "P4",
|
||||
"locations": {
|
||||
"L5": {
|
||||
"latitude (deg)": 100.0,
|
||||
"longitude (deg)": 100.0,
|
||||
"capacities (tonne)": {
|
||||
"15000": {
|
||||
"opening cost ($)": [
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
-15.0,
|
||||
-15.0
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"F4": {
|
||||
"input": "P3",
|
||||
"locations": {
|
||||
"L6": {
|
||||
"latitude (deg)": 50.0,
|
||||
"longitude (deg)": 50.0,
|
||||
"capacities (tonne)": {
|
||||
"10000": {
|
||||
"opening cost ($)": [
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"fixed operating cost ($)": [
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"variable operating cost ($/tonne)": [
|
||||
-15.0,
|
||||
-15.0
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
7
test/fixtures/nimh_plant_emissions.csv
vendored
@@ -1,7 +0,0 @@
|
||||
plant type,location name,year,emission type,emission amount (tonne)
|
||||
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,CO2,40711.3
|
||||
Rare Earth Recycling Plant,"Stanly, North Carolina",1,CO2,23336.47
|
||||
Rare Earth Recycling Plant,"Lynn, Texas",1,CO2,52927.44
|
||||
Mega Plant,"Sebastian, Arkansas",1,CO2,110818.84
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,CO2,63523.43
|
||||
Mega Plant,"Maricopa, Arizona",1,CO2,144072.0
|
||||
|
37
test/fixtures/nimh_plant_outputs.csv
vendored
@@ -1,37 +0,0 @@
|
||||
plant type,location name,year,product name,amount produced (tonne),amount sent (tonne),amount disposed (tonne),disposal cost ($)
|
||||
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,rare earth cela,18045.12,0.0,18045.12,0.0
|
||||
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,rare earth diddy,7624.02,0.0,7624.02,0.0
|
||||
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,salt,6434.8,0.0,6434.8,324314.03
|
||||
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,rare earth misch,2188.78,0.0,2188.78,0.0
|
||||
Rare Earth Recycling Plant,"Stanly, North Carolina",1,rare earth cela,10343.8,0.0,10343.8,0.0
|
||||
Rare Earth Recycling Plant,"Stanly, North Carolina",1,rare earth diddy,4370.23,0.0,4370.23,0.0
|
||||
Rare Earth Recycling Plant,"Stanly, North Carolina",1,salt,3688.55,0.0,3688.55,185902.85
|
||||
Rare Earth Recycling Plant,"Stanly, North Carolina",1,rare earth misch,1254.65,0.0,1254.65,0.0
|
||||
Rare Earth Recycling Plant,"Lynn, Texas",1,rare earth cela,23459.88,0.0,23459.88,0.0
|
||||
Rare Earth Recycling Plant,"Lynn, Texas",1,rare earth diddy,9911.74,0.0,9911.74,0.0
|
||||
Rare Earth Recycling Plant,"Lynn, Texas",1,salt,8365.68,0.0,8365.68,421630.2
|
||||
Rare Earth Recycling Plant,"Lynn, Texas",1,rare earth misch,2845.56,0.0,2845.56,0.0
|
||||
Mega Plant,"Sebastian, Arkansas",1,iron-nickel scrap,35656.28,0.0,35656.28,0.0
|
||||
Mega Plant,"Sebastian, Arkansas",1,mixed-hydroxides,73141.86,0.0,73141.86,0.0
|
||||
Mega Plant,"Sebastian, Arkansas",1,leach residue,31470.35,0.0,31470.35,6.38848022e6
|
||||
Mega Plant,"Sebastian, Arkansas",1,plastic pack,96503.37,0.0,96503.37,4.86376966e6
|
||||
Mega Plant,"Sebastian, Arkansas",1,salt,285304.6,0.0,285304.6,1.437935192e7
|
||||
Mega Plant,"Sebastian, Arkansas",1,rare earth mix,44145.84,44145.84,0.0,0.0
|
||||
Mega Plant,"Sebastian, Arkansas",1,nickel-iron scrap,178655.26,0.0,178655.26,0.0
|
||||
Mega Plant,"Sebastian, Arkansas",1,nickel,37931.36,0.0,37931.36,0.0
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,iron-nickel scrap,20438.84,0.0,20438.84,0.0
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,mixed-hydroxides,41926.28,0.0,41926.28,0.0
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,leach residue,18039.39,0.0,18039.39,3.66199595e6
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,plastic pack,55317.53,0.0,55317.53,2.78800343e6
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,salt,163541.92,0.0,163541.92,8.24251257e6
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,rare earth mix,25305.22,25305.22,0.0,0.0
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,nickel-iron scrap,102408.52,0.0,102408.52,0.0
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,nickel,21742.96,0.0,21742.96,0.0
|
||||
Mega Plant,"Maricopa, Arizona",1,iron-nickel scrap,46355.57,0.0,46355.57,0.0
|
||||
Mega Plant,"Maricopa, Arizona",1,mixed-hydroxides,95089.37,0.0,95089.37,0.0
|
||||
Mega Plant,"Maricopa, Arizona",1,leach residue,40913.58,0.0,40913.58,8.30545698e6
|
||||
Mega Plant,"Maricopa, Arizona",1,plastic pack,125460.91,0.0,125460.91,6.32322998e6
|
||||
Mega Plant,"Maricopa, Arizona",1,salt,370915.31,0.0,370915.31,1.869413141e7
|
||||
Mega Plant,"Maricopa, Arizona",1,rare earth mix,57392.58,57392.58,0.0,0.0
|
||||
Mega Plant,"Maricopa, Arizona",1,nickel-iron scrap,232263.93,0.0,232263.93,0.0
|
||||
Mega Plant,"Maricopa, Arizona",1,nickel,49313.34,0.0,49313.34,0.0
|
||||
|
7
test/fixtures/nimh_plants.csv
vendored
@@ -1,7 +0,0 @@
|
||||
plant type,location name,year,latitude (deg),longitude (deg),capacity (tonne),amount processed (tonne),utilization factor (%),energy (GJ),opening cost ($),expansion cost ($),fixed operating cost ($),variable operating cost ($),total cost ($)
|
||||
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,35.23416,-94.212943,44145.84,44145.84,100.0,1.13360359e6,6.9926855e6,1.793555439e7,1.677420707e7,1.0080261442e8,1.4250506138e8
|
||||
Rare Earth Recycling Plant,"Stanly, North Carolina",1,35.334445,-80.223231,25305.22,25305.22,100.0,649802.71,7.1653444e6,9.98954108e6,1.126536154e7,5.778193764e7,8.620218466e7
|
||||
Rare Earth Recycling Plant,"Lynn, Texas",1,33.166444,-101.793455,57392.58,57392.58,100.0,1.47376145e6,7.4243328e6,2.5154042e7,2.06474474e7,1.310502261e8,1.842760483e8
|
||||
Mega Plant,"Sebastian, Arkansas",1,35.23416,-94.212943,553817.3,553817.3,100.0,3.08574408e6,1.6858178e7,4.012879371e7,3.834652057e7,4.2898688058e8,5.2432037286e8
|
||||
Mega Plant,"District of Columbia, District of Columbia",1,38.930028,-76.974164,317458.4,317458.4,100.0,1.76880602e6,2.12288167e7,2.746685387e7,2.602109584e7,2.4590327664e8,3.2062004305e8
|
||||
Mega Plant,"Maricopa, Arizona",1,33.647365,-111.893669,720000.0,720000.0,100.0,4.0116763e6,2.10206911e7,6.60955172e7,4.70124619e7,5.57712e8,6.918406702e8
|
||||
|
BIN
test/fixtures/nimh_solution.json.gz
vendored
3618
test/fixtures/nimh_transportation.csv
vendored
3618
test/fixtures/nimh_transportation_emissions.csv
vendored
@@ -3,13 +3,12 @@
|
||||
|
||||
using RELOG
|
||||
|
||||
@testset "Graph" begin
|
||||
function graph_build_test()
|
||||
@testset "build_graph" begin
|
||||
basedir = dirname(@__FILE__)
|
||||
instance = RELOG.parsefile("$basedir/../instances/s1.json")
|
||||
instance = RELOG.parsefile(fixture("instances/s1.json"))
|
||||
graph = RELOG.build_graph(instance)
|
||||
process_node_by_location_name = Dict(n.location.location_name => n
|
||||
for n in graph.process_nodes)
|
||||
process_node_by_location_name =
|
||||
Dict(n.location.location_name => n for n in graph.process_nodes)
|
||||
|
||||
@test length(graph.plant_shipping_nodes) == 8
|
||||
@test length(graph.collection_shipping_nodes) == 10
|
||||
@@ -39,4 +38,3 @@ using RELOG
|
||||
@test length(graph.arcs) == 38
|
||||
end
|
||||
end
|
||||
|
||||
54
test/instance/compress_test.jl
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2020 Argonne National Laboratory
|
||||
# Written by Alinson Santos Xavier <axavier@anl.gov>
|
||||
|
||||
using RELOG
|
||||
|
||||
function compress_test()
|
||||
@testset "compress" begin
|
||||
instance = RELOG.parsefile(fixture("instances/s1.json"))
|
||||
compressed = RELOG._compress(instance)
|
||||
|
||||
product_name_to_product = Dict(p.name => p for p in compressed.products)
|
||||
location_name_to_facility = Dict()
|
||||
for p in compressed.plants
|
||||
location_name_to_facility[p.location_name] = p
|
||||
end
|
||||
for c in compressed.collection_centers
|
||||
location_name_to_facility[c.name] = c
|
||||
end
|
||||
|
||||
p1 = product_name_to_product["P1"]
|
||||
p2 = product_name_to_product["P2"]
|
||||
p3 = product_name_to_product["P3"]
|
||||
c1 = location_name_to_facility["C1"]
|
||||
l1 = location_name_to_facility["L1"]
|
||||
|
||||
@test compressed.time == 1
|
||||
@test compressed.building_period == [1]
|
||||
|
||||
@test p1.name == "P1"
|
||||
@test p1.transportation_cost ≈ [0.015]
|
||||
@test p1.transportation_energy ≈ [0.115]
|
||||
@test p1.transportation_emissions["CO2"] ≈ [0.051]
|
||||
@test p1.transportation_emissions["CH4"] ≈ [0.0025]
|
||||
|
||||
@test c1.name == "C1"
|
||||
@test c1.amount ≈ [1869.12]
|
||||
|
||||
@test l1.plant_name == "F1"
|
||||
@test l1.location_name == "L1"
|
||||
@test l1.energy ≈ [0.115]
|
||||
@test l1.emissions["CO2"] ≈ [0.051]
|
||||
@test l1.emissions["CH4"] ≈ [0.0025]
|
||||
@test l1.sizes[1].opening_cost ≈ [500]
|
||||
@test l1.sizes[2].opening_cost ≈ [1250]
|
||||
@test l1.sizes[1].fixed_operating_cost ≈ [60]
|
||||
@test l1.sizes[2].fixed_operating_cost ≈ [60]
|
||||
@test l1.sizes[1].variable_operating_cost ≈ [30]
|
||||
@test l1.sizes[2].variable_operating_cost ≈ [30]
|
||||
@test l1.disposal_limit[p2] ≈ [2.0]
|
||||
@test l1.disposal_limit[p3] ≈ [2.0]
|
||||
@test l1.disposal_cost[p2] ≈ [-10.0]
|
||||
@test l1.disposal_cost[p3] ≈ [-10.0]
|
||||
end
|
||||
end
|
||||
27
test/instance/geodb_test.jl
Normal file
@@ -0,0 +1,27 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using RELOG
|
||||
|
||||
function geodb_test()
|
||||
@testset "geodb_query (2018-us-county)" begin
|
||||
region = RELOG.geodb_query("2018-us-county:17043")
|
||||
@test region.centroid.lat == 41.83956
|
||||
@test region.centroid.lon == -88.08857
|
||||
@test region.population == 922_921
|
||||
end
|
||||
|
||||
# @testset "geodb_query (2018-us-zcta)" begin
|
||||
# region = RELOG.geodb_query("2018-us-zcta:60439")
|
||||
# @test region.centroid.lat == 41.68241
|
||||
# @test region.centroid.lon == -87.98954
|
||||
# end
|
||||
|
||||
@testset "geodb_query (us-state)" begin
|
||||
region = RELOG.geodb_query("us-state:IL")
|
||||
@test region.centroid.lat == 39.73939
|
||||
@test region.centroid.lon == -89.50414
|
||||
@test region.population == 12_671_821
|
||||
end
|
||||
end
|
||||
@@ -3,10 +3,9 @@
|
||||
|
||||
using RELOG
|
||||
|
||||
@testset "Instance" begin
|
||||
@testset "load" begin
|
||||
basedir = dirname(@__FILE__)
|
||||
instance = RELOG.parsefile("$basedir/../instances/s1.json")
|
||||
function parse_test()
|
||||
@testset "parse" begin
|
||||
instance = RELOG.parsefile(fixture("instances/s1.json"))
|
||||
|
||||
centers = instance.collection_centers
|
||||
plants = instance.plants
|
||||
@@ -41,7 +40,14 @@ using RELOG
|
||||
@test plant.sizes[2].fixed_operating_cost == [30, 30]
|
||||
@test plant.sizes[2].variable_operating_cost == [30, 30]
|
||||
|
||||
p1 = product_name_to_product["P1"]
|
||||
@test p1.disposal_limit == [1.0, 1.0]
|
||||
@test p1.disposal_cost == [-1000.0, -1000.0]
|
||||
|
||||
p2 = product_name_to_product["P2"]
|
||||
@test p2.disposal_limit == [0.0, 0.0]
|
||||
@test p2.disposal_cost == [0.0, 0.0]
|
||||
|
||||
p3 = product_name_to_product["P3"]
|
||||
@test length(plant.output) == 2
|
||||
@test plant.output[p2] == 0.2
|
||||
@@ -71,57 +77,16 @@ using RELOG
|
||||
@test plant.disposal_limit[p4] == [0, 0]
|
||||
end
|
||||
|
||||
@testset "validate timeseries" begin
|
||||
@test_throws String RELOG.parsefile("fixtures/s1-wrong-length.json")
|
||||
@testset "parse (geodb)" begin
|
||||
instance = RELOG.parsefile(fixture("instances/s2.json"))
|
||||
|
||||
centers = instance.collection_centers
|
||||
@test centers[1].name == "C1"
|
||||
@test centers[1].latitude == 41.83956
|
||||
@test centers[1].longitude == -88.08857
|
||||
end
|
||||
|
||||
@testset "compress" begin
|
||||
basedir = dirname(@__FILE__)
|
||||
instance = RELOG.parsefile("$basedir/../instances/s1.json")
|
||||
compressed = RELOG._compress(instance)
|
||||
|
||||
product_name_to_product = Dict(p.name => p for p in compressed.products)
|
||||
location_name_to_facility = Dict()
|
||||
for p in compressed.plants
|
||||
location_name_to_facility[p.location_name] = p
|
||||
end
|
||||
for c in compressed.collection_centers
|
||||
location_name_to_facility[c.name] = c
|
||||
end
|
||||
|
||||
p1 = product_name_to_product["P1"]
|
||||
p2 = product_name_to_product["P2"]
|
||||
p3 = product_name_to_product["P3"]
|
||||
c1 = location_name_to_facility["C1"]
|
||||
l1 = location_name_to_facility["L1"]
|
||||
|
||||
@test compressed.time == 1
|
||||
@test compressed.building_period == [1]
|
||||
|
||||
@test p1.name == "P1"
|
||||
@test p1.transportation_cost ≈ [0.015]
|
||||
@test p1.transportation_energy ≈ [0.115]
|
||||
@test p1.transportation_emissions["CO2"] ≈ [0.051]
|
||||
@test p1.transportation_emissions["CH4"] ≈ [0.0025]
|
||||
|
||||
@test c1.name == "C1"
|
||||
@test c1.amount ≈ [1869.12]
|
||||
|
||||
@test l1.plant_name == "F1"
|
||||
@test l1.location_name == "L1"
|
||||
@test l1.energy ≈ [0.115]
|
||||
@test l1.emissions["CO2"] ≈ [0.051]
|
||||
@test l1.emissions["CH4"] ≈ [0.0025]
|
||||
@test l1.sizes[1].opening_cost ≈ [500]
|
||||
@test l1.sizes[2].opening_cost ≈ [1250]
|
||||
@test l1.sizes[1].fixed_operating_cost ≈ [60]
|
||||
@test l1.sizes[2].fixed_operating_cost ≈ [60]
|
||||
@test l1.sizes[1].variable_operating_cost ≈ [30]
|
||||
@test l1.sizes[2].variable_operating_cost ≈ [30]
|
||||
@test l1.disposal_limit[p2] ≈ [2.0]
|
||||
@test l1.disposal_limit[p3] ≈ [2.0]
|
||||
@test l1.disposal_cost[p2] ≈ [-10.0]
|
||||
@test l1.disposal_cost[p3] ≈ [-10.0]
|
||||
end
|
||||
# @testset "parse (invalid)" begin
|
||||
# @test_throws ErrorException RELOG.parsefile(fixture("s1-wrong-length.json"))
|
||||
# end
|
||||
end
|
||||
|
||||
38
test/model/build_test.jl
Normal file
@@ -0,0 +1,38 @@
|
||||
# Copyright (C) 2020 Argonne National Laboratory
|
||||
# Written by Alinson Santos Xavier <axavier@anl.gov>
|
||||
|
||||
using RELOG, HiGHS, JuMP, Printf, JSON, MathOptInterface.FileFormats
|
||||
|
||||
function model_build_test()
|
||||
@testset "build" begin
|
||||
instance = RELOG.parsefile(fixture("instances/s1.json"))
|
||||
graph = RELOG.build_graph(instance)
|
||||
model = RELOG.build_model(instance, graph, HiGHS.Optimizer)
|
||||
|
||||
process_node_by_location_name =
|
||||
Dict(n.location.location_name => n for n in graph.process_nodes)
|
||||
|
||||
shipping_node_by_loc_and_prod_names = Dict(
|
||||
(n.location.location_name, n.product.name) => n for n in graph.plant_shipping_nodes
|
||||
)
|
||||
|
||||
@test length(model[1, :open_plant]) == 12
|
||||
@test length(model[2, :flow]) == 76
|
||||
@test length(model[2, :plant_dispose]) == 16
|
||||
@test length(model[2, :capacity]) == 12
|
||||
@test length(model[2, :expansion]) == 12
|
||||
|
||||
# l1 = process_node_by_location_name["L1"]
|
||||
# v = model[2, :capacity][l1.index, 1]
|
||||
# @test lower_bound(v) == 0.0
|
||||
# @test upper_bound(v) == 1000.0
|
||||
|
||||
# v = model[2, :expansion][l1.index, 1]
|
||||
# @test lower_bound(v) == 0.0
|
||||
# @test upper_bound(v) == 750.0
|
||||
|
||||
# v = model[2, :plant_dispose][shipping_node_by_loc_and_prod_names["L1", "P2"].index, 1]
|
||||
# @test lower_bound(v) == 0.0
|
||||
# @test upper_bound(v) == 1.0
|
||||
end
|
||||
end
|
||||
85
test/model/solve_test.jl
Normal file
@@ -0,0 +1,85 @@
|
||||
# Copyright (C) 2020 Argonne National Laboratory
|
||||
# Written by Alinson Santos Xavier <axavier@anl.gov>
|
||||
|
||||
using RELOG, JuMP, Printf, JSON, MathOptInterface.FileFormats
|
||||
|
||||
basedir = dirname(@__FILE__)
|
||||
|
||||
function model_solve_test()
|
||||
@testset "solve (exact)" begin
|
||||
solution = RELOG.solve(fixture("instances/s1.json"))
|
||||
|
||||
solution_filename = tempname()
|
||||
RELOG.write(solution, solution_filename)
|
||||
@test isfile(solution_filename)
|
||||
|
||||
@test "Costs" in keys(solution)
|
||||
@test "Fixed operating (\$)" in keys(solution["Costs"])
|
||||
@test "Transportation (\$)" in keys(solution["Costs"])
|
||||
@test "Variable operating (\$)" in keys(solution["Costs"])
|
||||
@test "Total (\$)" in keys(solution["Costs"])
|
||||
|
||||
@test "Plants" in keys(solution)
|
||||
@test "F1" in keys(solution["Plants"])
|
||||
@test "F2" in keys(solution["Plants"])
|
||||
@test "F3" in keys(solution["Plants"])
|
||||
@test "F4" in keys(solution["Plants"])
|
||||
|
||||
@test "Products" in keys(solution)
|
||||
@test "P1" in keys(solution["Products"])
|
||||
@test "C1" in keys(solution["Products"]["P1"])
|
||||
@test "Dispose (tonne)" in keys(solution["Products"]["P1"]["C1"])
|
||||
|
||||
total_disposal =
|
||||
sum([loc["Dispose (tonne)"] for loc in values(solution["Products"]["P1"])])
|
||||
@test total_disposal == [1.0, 1.0]
|
||||
end
|
||||
|
||||
@testset "solve (heuristic)" begin
|
||||
# Should not crash
|
||||
solution = RELOG.solve(fixture("instances/s1.json"), heuristic = true)
|
||||
end
|
||||
|
||||
# @testset "solve (infeasible)" begin
|
||||
# json = JSON.parsefile(fixture("instances/s1.json"))
|
||||
# for (location_name, location_dict) in json["products"]["P1"]["initial amounts"]
|
||||
# location_dict["amount (tonne)"] *= 1000
|
||||
# end
|
||||
# @test_throws ErrorException("No solution available") RELOG.solve(RELOG.parse(json))
|
||||
# end
|
||||
|
||||
@testset "solve (with storage)" begin
|
||||
basedir = dirname(@__FILE__)
|
||||
filename = "$basedir/../fixtures/storage.json"
|
||||
instance = RELOG.parsefile(filename)
|
||||
@test instance.plants[1].storage_limit == 50.0
|
||||
@test instance.plants[1].storage_cost == [2.0, 1.5, 1.0]
|
||||
|
||||
solution = RELOG.solve(filename)
|
||||
plant_dict = solution["Plants"]["mega plant"]["Chicago"]
|
||||
@test plant_dict["Variable operating cost (\$)"] == [500.0, 0.0, 100.0]
|
||||
@test plant_dict["Process (tonne)"] == [50.0, 0.0, 50.0]
|
||||
@test plant_dict["Storage (tonne)"] == [50.0, 50.0, 0.0]
|
||||
@test plant_dict["Storage cost (\$)"] == [100.0, 75.0, 0.0]
|
||||
|
||||
@test solution["Costs"]["Variable operating (\$)"] == [500.0, 0.0, 100.0]
|
||||
@test solution["Costs"]["Storage (\$)"] == [100.0, 75.0, 0.0]
|
||||
@test solution["Costs"]["Total (\$)"] == [600.0, 75.0, 100.0]
|
||||
end
|
||||
|
||||
@testset "solve (stochastic)" begin
|
||||
# Should not crash
|
||||
solutions = RELOG.solve_stochastic(
|
||||
scenarios=[
|
||||
fixture("instances/case3_p010_s1.00.json"),
|
||||
fixture("instances/case3_p010_s1.25.json"),
|
||||
],
|
||||
probs=[0.5, 0.5],
|
||||
optimizer=optimizer_with_attributes(
|
||||
HiGHS.Optimizer,
|
||||
"log_to_console" => false,
|
||||
),
|
||||
method=:lshaped,
|
||||
)
|
||||
end
|
||||
end
|
||||
@@ -1,100 +0,0 @@
|
||||
# Copyright (C) 2020 Argonne National Laboratory
|
||||
# Written by Alinson Santos Xavier <axavier@anl.gov>
|
||||
|
||||
using RELOG, Cbc, JuMP, Printf, JSON, MathOptInterface.FileFormats
|
||||
|
||||
@testset "Model" begin
|
||||
@testset "build" begin
|
||||
basedir = dirname(@__FILE__)
|
||||
instance = RELOG.parsefile("$basedir/../instances/s1.json")
|
||||
graph = RELOG.build_graph(instance)
|
||||
model = RELOG.build_model(instance, graph, Cbc.Optimizer)
|
||||
set_optimizer_attribute(model.mip, "logLevel", 0)
|
||||
|
||||
process_node_by_location_name = Dict(n.location.location_name => n
|
||||
for n in graph.process_nodes)
|
||||
|
||||
shipping_node_by_location_and_product_names = Dict((n.location.location_name, n.product.name) => n
|
||||
for n in graph.plant_shipping_nodes)
|
||||
|
||||
@test length(model.vars.flow) == 76
|
||||
@test length(model.vars.dispose) == 16
|
||||
@test length(model.vars.open_plant) == 12
|
||||
@test length(model.vars.capacity) == 12
|
||||
@test length(model.vars.expansion) == 12
|
||||
|
||||
l1 = process_node_by_location_name["L1"]
|
||||
v = model.vars.capacity[l1, 1]
|
||||
@test lower_bound(v) == 0.0
|
||||
@test upper_bound(v) == 1000.0
|
||||
|
||||
v = model.vars.expansion[l1, 1]
|
||||
@test lower_bound(v) == 0.0
|
||||
@test upper_bound(v) == 750.0
|
||||
|
||||
v = model.vars.dispose[shipping_node_by_location_and_product_names["L1", "P2"], 1]
|
||||
@test lower_bound(v) == 0.0
|
||||
@test upper_bound(v) == 1.0
|
||||
|
||||
# dest = FileFormats.Model(format = FileFormats.FORMAT_LP)
|
||||
# MOI.copy_to(dest, model.mip)
|
||||
# MOI.write_to_file(dest, "model.lp")
|
||||
end
|
||||
|
||||
@testset "solve (exact)" begin
|
||||
solution_filename_a = tempname()
|
||||
solution_filename_b = tempname()
|
||||
solution = RELOG.solve("$(pwd())/../instances/s1.json",
|
||||
output=solution_filename_a)
|
||||
|
||||
@test isfile(solution_filename_a)
|
||||
|
||||
RELOG.write(solution, solution_filename_b)
|
||||
@test isfile(solution_filename_b)
|
||||
|
||||
@test "Costs" in keys(solution)
|
||||
@test "Fixed operating (\$)" in keys(solution["Costs"])
|
||||
@test "Transportation (\$)" in keys(solution["Costs"])
|
||||
@test "Variable operating (\$)" in keys(solution["Costs"])
|
||||
@test "Total (\$)" in keys(solution["Costs"])
|
||||
|
||||
@test "Plants" in keys(solution)
|
||||
@test "F1" in keys(solution["Plants"])
|
||||
@test "F2" in keys(solution["Plants"])
|
||||
@test "F3" in keys(solution["Plants"])
|
||||
@test "F4" in keys(solution["Plants"])
|
||||
end
|
||||
|
||||
|
||||
@testset "solve (heuristic)" begin
|
||||
# Should not crash
|
||||
solution = RELOG.solve("$(pwd())/../instances/s1.json", heuristic=true)
|
||||
end
|
||||
|
||||
@testset "infeasible solve" begin
|
||||
json = JSON.parsefile("$(pwd())/../instances/s1.json")
|
||||
for (location_name, location_dict) in json["products"]["P1"]["initial amounts"]
|
||||
location_dict["amount (tonne)"] *= 1000
|
||||
end
|
||||
RELOG.solve(RELOG.parse(json))
|
||||
end
|
||||
|
||||
@testset "storage" begin
|
||||
basedir = dirname(@__FILE__)
|
||||
filename = "$basedir/fixtures/storage.json"
|
||||
instance = RELOG.parsefile(filename)
|
||||
@test instance.plants[1].storage_limit == 50.0
|
||||
@test instance.plants[1].storage_cost == [2.0, 1.5, 1.0]
|
||||
|
||||
solution = RELOG.solve(filename)
|
||||
plant_dict = solution["Plants"]["mega plant"]["Chicago"]
|
||||
@test plant_dict["Variable operating cost (\$)"] == [500.0, 0.0, 100.0]
|
||||
@test plant_dict["Process (tonne)"] == [50.0, 0.0, 50.0]
|
||||
@test plant_dict["Storage (tonne)"] == [50.0, 50.0, 0.0]
|
||||
@test plant_dict["Storage cost (\$)"] == [100.0, 75.0, 0.0]
|
||||
|
||||
@test solution["Costs"]["Variable operating (\$)"] == [500.0, 0.0, 100.0]
|
||||
@test solution["Costs"]["Storage (\$)"] == [100.0, 75.0, 0.0]
|
||||
@test solution["Costs"]["Total (\$)"] == [600.0, 75.0, 100.0]
|
||||
end
|
||||
end
|
||||
@@ -4,38 +4,20 @@
|
||||
|
||||
using RELOG, JSON, GZip
|
||||
|
||||
load_json_gz(filename) = JSON.parse(GZip.gzopen(filename))
|
||||
|
||||
# function check(func, expected_csv_filename::String)
|
||||
# solution = load_json_gz("fixtures/nimh_solution.json.gz")
|
||||
# actual_csv_filename = tempname()
|
||||
# func(solution, actual_csv_filename)
|
||||
# @test isfile(actual_csv_filename)
|
||||
# if readlines(actual_csv_filename) != readlines(expected_csv_filename)
|
||||
# out_filename = replace(expected_csv_filename, ".csv" => "_actual.csv")
|
||||
# @error "$func: Unexpected CSV contents: $out_filename"
|
||||
# write(out_filename, read(actual_csv_filename))
|
||||
# @test false
|
||||
# end
|
||||
# end
|
||||
|
||||
@testset "Reports" begin
|
||||
# @testset "from fixture" begin
|
||||
# check(RELOG.write_plants_report, "fixtures/nimh_plants.csv")
|
||||
# check(RELOG.write_plant_outputs_report, "fixtures/nimh_plant_outputs.csv")
|
||||
# check(RELOG.write_plant_emissions_report, "fixtures/nimh_plant_emissions.csv")
|
||||
# check(RELOG.write_transportation_report, "fixtures/nimh_transportation.csv")
|
||||
# check(RELOG.write_transportation_emissions_report, "fixtures/nimh_transportation_emissions.csv")
|
||||
# end
|
||||
basedir = @__DIR__
|
||||
|
||||
function reports_test()
|
||||
@testset "Reports" begin
|
||||
@testset "from solve" begin
|
||||
solution = RELOG.solve("$(pwd())/../instances/s1.json")
|
||||
solution = RELOG.solve(fixture("instances/s1.json"))
|
||||
tmp_filename = tempname()
|
||||
# The following should not crash
|
||||
RELOG.write_plants_report(solution, tmp_filename)
|
||||
RELOG.write_plant_outputs_report(solution, tmp_filename)
|
||||
RELOG.write_plant_emissions_report(solution, tmp_filename)
|
||||
RELOG.write_transportation_report(solution, tmp_filename)
|
||||
RELOG.write_plant_outputs_report(solution, tmp_filename)
|
||||
RELOG.write_plants_report(solution, tmp_filename)
|
||||
RELOG.write_products_report(solution, tmp_filename)
|
||||
RELOG.write_transportation_emissions_report(solution, tmp_filename)
|
||||
RELOG.write_transportation_report(solution, tmp_filename)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -2,10 +2,46 @@
|
||||
# Written by Alinson Santos Xavier <axavier@anl.gov>
|
||||
|
||||
using Test
|
||||
using RELOG
|
||||
using Revise
|
||||
|
||||
@testset "RELOG" begin
|
||||
include("instance_test.jl")
|
||||
include("graph_test.jl")
|
||||
include("model_test.jl")
|
||||
include("reports_test.jl")
|
||||
includet("instance/compress_test.jl")
|
||||
includet("instance/geodb_test.jl")
|
||||
includet("instance/parse_test.jl")
|
||||
includet("graph/build_test.jl")
|
||||
includet("model/build_test.jl")
|
||||
includet("model/solve_test.jl")
|
||||
includet("reports_test.jl")
|
||||
|
||||
function fixture(path)
|
||||
for candidate in [
|
||||
"fixtures/$path",
|
||||
"test/fixtures/$path"
|
||||
]
|
||||
if isfile(candidate)
|
||||
return candidate
|
||||
end
|
||||
end
|
||||
error("Fixture not found: $path")
|
||||
end
|
||||
|
||||
function runtests()
|
||||
@testset "RELOG" begin
|
||||
@testset "Instance" begin
|
||||
compress_test()
|
||||
geodb_test()
|
||||
parse_test()
|
||||
end
|
||||
@testset "Graph" begin
|
||||
graph_build_test()
|
||||
end
|
||||
@testset "Model" begin
|
||||
model_build_test()
|
||||
model_solve_test()
|
||||
end
|
||||
reports_test()
|
||||
end
|
||||
return
|
||||
end
|
||||
|
||||
runtests()
|
||||
|
||||