12 Commits

43 changed files with 1622 additions and 9272 deletions

27
.github/workflows/lint.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
name: Lint
on:
push:
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@latest
with:
version: '1'
- uses: actions/checkout@v1
- name: Format check
shell: julia --color=yes {0}
run: |
using Pkg
Pkg.add(PackageSpec(name="JuliaFormatter", version="0.14.4"))
using JuliaFormatter
format("src", verbose=true)
format("test", verbose=true)
out = String(read(Cmd(`git diff`)))
if isempty(out)
exit(0)
end
@error "Some files have not been formatted !!!"
write(stdout, out)
exit(1)

View File

@@ -1,7 +1,9 @@
name: Build & Test
on:
- push
- pull_request
push:
pull_request:
schedule:
- cron: '45 10 * * *'
jobs:
test:
name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }}

1
.gitignore vendored
View File

@@ -8,3 +8,4 @@ instances/*.py
notebooks
.idea
*.lp
Manifest.toml

View File

@@ -17,6 +17,9 @@ clean:
docs:
mkdocs build -d ../docs/$(VERSION)/
format:
julia -e 'using JuliaFormatter; format(["src", "test"], verbose=true);'
test: build/test.log
test-watch:

View File

@@ -1,441 +0,0 @@
# This file is machine-generated - editing it directly is not advised
[[Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[BenchmarkTools]]
deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"]
git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260"
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
version = "0.5.0"
[[BinaryProvider]]
deps = ["Libdl", "Logging", "SHA"]
git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058"
uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
version = "0.5.10"
[[Bzip2_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "03a44490020826950c68005cafb336e5ba08b7e8"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.6+4"
[[CEnum]]
git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.4.1"
[[CSV]]
deps = ["CategoricalArrays", "DataFrames", "Dates", "Mmap", "Parsers", "PooledArrays", "SentinelArrays", "Tables", "Unicode"]
git-tree-sha1 = "a390152e6850405a48ca51bd7ca33d11a21d6230"
uuid = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
version = "0.7.7"
[[Calculus]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "f641eb0a4f00c343bbc32346e1217b86f3ce9dad"
uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
version = "0.5.1"
[[CategoricalArrays]]
deps = ["DataAPI", "Future", "JSON", "Missings", "Printf", "Statistics", "StructTypes", "Unicode"]
git-tree-sha1 = "2ac27f59196a68070e132b25713f9a5bbc5fa0d2"
uuid = "324d7699-5711-5eae-9e2f-1d82baa6b597"
version = "0.8.3"
[[Cbc]]
deps = ["BinaryProvider", "Libdl", "MathOptInterface", "MathProgBase", "SparseArrays", "Test"]
git-tree-sha1 = "62d80f448b5d77b3f0a59cecf6197aad2a3aa280"
uuid = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
version = "0.6.7"
[[Clp]]
deps = ["BinaryProvider", "CEnum", "Clp_jll", "Libdl", "MathOptInterface", "SparseArrays"]
git-tree-sha1 = "581763750759c1e38df2a35a0b3bdee496a062c7"
uuid = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
version = "0.8.1"
[[Clp_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "79263d9383ca89b35f31c33ab5b880536a8413a4"
uuid = "06985876-5285-5a41-9fcb-8948a742cc53"
version = "1.17.6+6"
[[CodecBzip2]]
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.7.2"
[[CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.0"
[[CoinUtils_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "ee1f06ab89337b7f194c29377ab174e752cdf60d"
uuid = "be027038-0da8-5614-b30d-e42594cb92df"
version = "2.11.3+3"
[[CommonSubexpressions]]
deps = ["MacroTools", "Test"]
git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7"
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
version = "0.3.0"
[[Compat]]
deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"]
git-tree-sha1 = "7c7f4cda0d58ec999189d70f5ee500348c4b4df1"
uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
version = "3.16.0"
[[CompilerSupportLibraries_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "7c4f882c41faa72118841185afc58a2eb00ef612"
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "0.3.3+0"
[[CoordinateTransformations]]
deps = ["LinearAlgebra", "StaticArrays"]
git-tree-sha1 = "c230b1d94db9fdd073168830437e64b9db627fcb"
uuid = "150eb455-5306-5404-9cee-2592286d6298"
version = "0.6.0"
[[DataAPI]]
git-tree-sha1 = "176e23402d80e7743fc26c19c681bfb11246af32"
uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a"
version = "1.3.0"
[[DataFrames]]
deps = ["CategoricalArrays", "Compat", "DataAPI", "Future", "InvertedIndices", "IteratorInterfaceExtensions", "Missings", "PooledArrays", "Printf", "REPL", "Reexport", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"]
git-tree-sha1 = "a7c1c9a6e47a92321bbc9d500dab9b04cc4a6a39"
uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
version = "0.21.7"
[[DataStructures]]
deps = ["InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "88d48e133e6d3dd68183309877eac74393daa7eb"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.17.20"
[[DataValueInterfaces]]
git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6"
uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464"
version = "1.0.0"
[[Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[DelimitedFiles]]
deps = ["Mmap"]
uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
[[DiffResults]]
deps = ["StaticArrays"]
git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc"
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
version = "1.0.2"
[[DiffRules]]
deps = ["NaNMath", "Random", "SpecialFunctions"]
git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1"
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
version = "1.0.1"
[[Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[ForwardDiff]]
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"]
git-tree-sha1 = "1d090099fb82223abc48f7ce176d3f7696ede36d"
uuid = "f6369f11-7733-5829-9624-2563aa707210"
version = "0.10.12"
[[Future]]
deps = ["Random"]
uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820"
[[GZip]]
deps = ["Libdl"]
git-tree-sha1 = "039be665faf0b8ae36e089cd694233f5dee3f7d6"
uuid = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
version = "0.5.1"
[[Geodesy]]
deps = ["CoordinateTransformations", "Dates", "LinearAlgebra", "StaticArrays", "Test"]
git-tree-sha1 = "f80ea86cb88db337a1906e245e495592f0b5cc25"
uuid = "0ef565a4-170c-5f04-8de2-149903a85f3d"
version = "0.5.0"
[[HTTP]]
deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"]
git-tree-sha1 = "c7ec02c4c6a039a98a15f955462cd7aea5df4508"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "0.8.19"
[[IniFile]]
deps = ["Test"]
git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8"
uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f"
version = "0.5.0"
[[InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[InvertedIndices]]
deps = ["Test"]
git-tree-sha1 = "15732c475062348b0165684ffe28e85ea8396afc"
uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f"
version = "1.0.0"
[[IteratorInterfaceExtensions]]
git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856"
uuid = "82899510-4779-5014-852e-03e436cf321d"
version = "1.0.0"
[[JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.1"
[[JSONSchema]]
deps = ["BinaryProvider", "HTTP", "JSON"]
git-tree-sha1 = "b0a7f9328967df5213691d318a03cf70ea8c76b1"
uuid = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
version = "0.2.0"
[[JuMP]]
deps = ["Calculus", "DataStructures", "ForwardDiff", "LinearAlgebra", "MathOptInterface", "MutableArithmetics", "NaNMath", "Random", "SparseArrays", "Statistics"]
git-tree-sha1 = "cbab42e2e912109d27046aa88f02a283a9abac7c"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "0.21.3"
[[LibGit2]]
deps = ["Printf"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[LinearAlgebra]]
deps = ["Libdl"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "f7d2e3f654af75f01ec49be82c231c382214223a"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.5"
[[Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[MathOptInterface]]
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "JSON", "JSONSchema", "LinearAlgebra", "MutableArithmetics", "OrderedCollections", "SparseArrays", "Test", "Unicode"]
git-tree-sha1 = "27f2ef85879b8f1d144266ab44f076ba0dfbd8a1"
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
version = "0.9.13"
[[MathProgBase]]
deps = ["LinearAlgebra", "SparseArrays"]
git-tree-sha1 = "9abbe463a1e9fc507f12a69e7f29346c2cdc472c"
uuid = "fdba3010-5040-5b88-9595-932c9decdf73"
version = "0.7.8"
[[MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"]
git-tree-sha1 = "426a6978b03a97ceb7ead77775a1da066343ec6e"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.0.2"
[[MbedTLS_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "c0b1286883cac4e2b617539de41111e0776d02e8"
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.16.8+0"
[[Missings]]
deps = ["DataAPI"]
git-tree-sha1 = "ed61674a0864832495ffe0a7e889c0da76b0f4c8"
uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28"
version = "0.4.4"
[[Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "6cf09794783b9de2e662c4e8b60d743021e338d0"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "0.2.10"
[[NaNMath]]
git-tree-sha1 = "c84c576296d0e2fbb3fc134d3e09086b3ea617cd"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "0.3.4"
[[OpenBLAS32_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "793b33911239d2651c356c823492b58d6490d36a"
uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2"
version = "0.3.9+4"
[[OpenSpecFun_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "d51c416559217d974a1113522d5919235ae67a87"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.3+3"
[[OrderedCollections]]
git-tree-sha1 = "16c08bf5dba06609fe45e30860092d6fa41fde7b"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.3.1"
[[Osi_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "bd436a97280df40938e66ae8d18e57aceb072856"
uuid = "7da25872-d9ce-5375-a4d3-7a845f58efdd"
version = "0.108.5+3"
[[PackageCompiler]]
deps = ["Libdl", "Pkg", "UUIDs"]
git-tree-sha1 = "98aa9c653e1dc3473bb5050caf8501293db9eee1"
uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
version = "1.2.1"
[[Parsers]]
deps = ["Dates", "Test"]
git-tree-sha1 = "8077624b3c450b15c087944363606a6ba12f925e"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "1.0.10"
[[Pkg]]
deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
[[PooledArrays]]
deps = ["DataAPI"]
git-tree-sha1 = "b1333d4eced1826e15adbdf01a4ecaccca9d353c"
uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
version = "0.5.3"
[[Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[ProgressBars]]
deps = ["Printf"]
git-tree-sha1 = "e66732bbdaad368cfc642cef1f639df5812dc818"
uuid = "49802e3a-d2f1-5c88-81d8-b72133a6f568"
version = "0.6.0"
[[REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[Random]]
deps = ["Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[Reexport]]
deps = ["Pkg"]
git-tree-sha1 = "7b1d07f411bc8ddb7977ec7f377b97b158514fe0"
uuid = "189a3867-3050-52da-a836-e630ba90ab69"
version = "0.2.0"
[[SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
[[SentinelArrays]]
deps = ["Dates", "Random"]
git-tree-sha1 = "7a74946ace3b34fbb6c10e61b6e250b33d7e758c"
uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c"
version = "1.2.15"
[[Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[SharedArrays]]
deps = ["Distributed", "Mmap", "Random", "Serialization"]
uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
[[Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[SortingAlgorithms]]
deps = ["DataStructures", "Random", "Test"]
git-tree-sha1 = "03f5898c9959f8115e30bc7226ada7d0df554ddd"
uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c"
version = "0.3.1"
[[SparseArrays]]
deps = ["LinearAlgebra", "Random"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[SpecialFunctions]]
deps = ["OpenSpecFun_jll"]
git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "0.10.3"
[[StaticArrays]]
deps = ["LinearAlgebra", "Random", "Statistics"]
git-tree-sha1 = "016d1e1a00fabc556473b07161da3d39726ded35"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "0.12.4"
[[Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
[[StructTypes]]
deps = ["Dates", "UUIDs"]
git-tree-sha1 = "1ed04f622a39d2e5a6747c3a70be040c00333933"
uuid = "856f2bd8-1eba-4b0a-8007-ebc267875bd4"
version = "1.1.0"
[[TableTraits]]
deps = ["IteratorInterfaceExtensions"]
git-tree-sha1 = "b1ad568ba658d8cbb3b892ed5380a6f3e781a81e"
uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c"
version = "1.0.0"
[[Tables]]
deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "TableTraits", "Test"]
git-tree-sha1 = "b7f762e9820b7fab47544c36f26f54ac59cf8abf"
uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
version = "1.0.5"
[[Test]]
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[TranscodingStreams]]
deps = ["Random", "Test"]
git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.9.5"
[[UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[Zlib_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "fdd89e5ab270ea0f2a0174bd9093e557d06d4bfa"
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.11+16"

View File

@@ -3,9 +3,23 @@
# Released under the modified BSD license. See COPYING.md for more details.
module RELOG
include("dotdict.jl")
include("instance.jl")
include("graph.jl")
include("model.jl")
include("reports.jl")
include("instance/structs.jl")
include("graph/structs.jl")
include("graph/build.jl")
include("graph/csv.jl")
include("instance/compress.jl")
include("instance/parse.jl")
include("instance/validate.jl")
include("model/build.jl")
include("model/getsol.jl")
include("model/solve.jl")
include("reports/plant_emissions.jl")
include("reports/plant_outputs.jl")
include("reports/plants.jl")
include("reports/tr_emissions.jl")
include("reports/tr.jl")
include("reports/write.jl")
end

View File

@@ -1,68 +0,0 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
struct DotDict
inner::Dict
end
DotDict() = DotDict(Dict())
function Base.setproperty!(d::DotDict, key::Symbol, value)
setindex!(getfield(d, :inner), value, key)
end
function Base.getproperty(d::DotDict, key::Symbol)
(key == :inner ? getfield(d, :inner) : d.inner[key])
end
function Base.getindex(d::DotDict, key::Int64)
d.inner[Symbol(key)]
end
function Base.getindex(d::DotDict, key::Symbol)
d.inner[key]
end
function Base.keys(d::DotDict)
keys(d.inner)
end
function Base.values(d::DotDict)
values(d.inner)
end
function Base.iterate(d::DotDict)
iterate(values(d.inner))
end
function Base.iterate(d::DotDict, v::Int64)
iterate(values(d.inner), v)
end
function Base.length(d::DotDict)
length(values(d.inner))
end
function Base.show(io::IO, d::DotDict)
print(io, "DotDict with $(length(keys(d.inner))) entries:\n")
count = 0
for k in keys(d.inner)
count += 1
if count > 10
print(io, " ...\n")
break
end
print(io, " :$(k) => $(d.inner[k])\n")
end
end
function recursive_to_dot_dict(el)
if typeof(el) == Dict{String, Any}
return DotDict(Dict(Symbol(k) => recursive_to_dot_dict(el[k]) for k in keys(el)))
else
return el
end
end
export recursive_to_dot_dict

View File

@@ -4,43 +4,12 @@
using Geodesy
abstract type Node
function calculate_distance(source_lat, source_lon, dest_lat, dest_lon)::Float64
x = LLA(source_lat, source_lon, 0.0)
y = LLA(dest_lat, dest_lon, 0.0)
return round(distance(x, y) / 1000.0, digits = 2)
end
mutable struct Arc
source::Node
dest::Node
values::Dict{String, Float64}
end
mutable struct ProcessNode <: Node
index::Int
location::Plant
incoming_arcs::Array{Arc}
outgoing_arcs::Array{Arc}
end
mutable struct ShippingNode <: Node
index::Int
location::Union{Plant, CollectionCenter}
product::Product
incoming_arcs::Array{Arc}
outgoing_arcs::Array{Arc}
end
mutable struct Graph
process_nodes::Array{ProcessNode}
plant_shipping_nodes::Array{ShippingNode}
collection_shipping_nodes::Array{ShippingNode}
arcs::Array{Arc}
end
function build_graph(instance::Instance)::Graph
arcs = []
next_index = 0
@@ -48,10 +17,9 @@ function build_graph(instance::Instance)::Graph
plant_shipping_nodes = ShippingNode[]
collection_shipping_nodes = ShippingNode[]
process_nodes_by_input_product = Dict(product => ProcessNode[]
for product in instance.products)
shipping_nodes_by_plant = Dict(plant => []
for plant in instance.plants)
process_nodes_by_input_product =
Dict(product => ProcessNode[] for product in instance.products)
shipping_nodes_by_plant = Dict(plant => [] for plant in instance.plants)
# Build collection center shipping nodes
for center in instance.collection_centers
@@ -78,10 +46,12 @@ function build_graph(instance::Instance)::Graph
# Build arcs from collection centers to plants, and from one plant to another
for source in [collection_shipping_nodes; plant_shipping_nodes]
for dest in process_nodes_by_input_product[source.product]
distance = calculate_distance(source.location.latitude,
distance = calculate_distance(
source.location.latitude,
source.location.longitude,
dest.location.latitude,
dest.location.longitude)
dest.location.longitude,
)
values = Dict("distance" => distance)
arc = Arc(source, dest, values)
push!(source.outgoing_arcs, arc)
@@ -103,24 +73,5 @@ function build_graph(instance::Instance)::Graph
end
end
return Graph(process_nodes,
plant_shipping_nodes,
collection_shipping_nodes,
arcs)
end
function to_csv(graph::Graph)
result = ""
for a in graph.arcs
result *= "$(a.source.index),$(a.dest.index)\n"
end
return result
end
function calculate_distance(source_lat, source_lon, dest_lat, dest_lon)::Float64
x = LLA(source_lat, source_lon, 0.0)
y = LLA(dest_lat, dest_lon, 0.0)
return round(distance(x, y) / 1000.0, digits=2)
return Graph(process_nodes, plant_shipping_nodes, collection_shipping_nodes, arcs)
end

11
src/graph/csv.jl Normal file
View File

@@ -0,0 +1,11 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
function to_csv(graph::Graph)
result = ""
for a in graph.arcs
result *= "$(a.source.index),$(a.dest.index)\n"
end
return result
end

35
src/graph/structs.jl Normal file
View File

@@ -0,0 +1,35 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Geodesy
abstract type Node end
mutable struct Arc
source::Node
dest::Node
values::Dict{String,Float64}
end
mutable struct ProcessNode <: Node
index::Int
location::Plant
incoming_arcs::Vector{Arc}
outgoing_arcs::Vector{Arc}
end
mutable struct ShippingNode <: Node
index::Int
location::Union{Plant,CollectionCenter}
product::Product
incoming_arcs::Vector{Arc}
outgoing_arcs::Vector{Arc}
end
mutable struct Graph
process_nodes::Vector{ProcessNode}
plant_shipping_nodes::Vector{ShippingNode}
collection_shipping_nodes::Vector{ShippingNode}
arcs::Vector{Arc}
end

View File

@@ -1,281 +0,0 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
mutable struct Product
name::String
transportation_cost::Array{Float64}
transportation_energy::Array{Float64}
transportation_emissions::Dict{String, Array{Float64}}
end
mutable struct CollectionCenter
index::Int64
name::String
latitude::Float64
longitude::Float64
product::Product
amount::Array{Float64}
end
mutable struct PlantSize
capacity::Float64
variable_operating_cost::Array{Float64}
fixed_operating_cost::Array{Float64}
opening_cost::Array{Float64}
end
mutable struct Plant
index::Int64
plant_name::String
location_name::String
input::Product
output::Dict{Product, Float64}
latitude::Float64
longitude::Float64
disposal_limit::Dict{Product, Array{Float64}}
disposal_cost::Dict{Product, Array{Float64}}
sizes::Array{PlantSize}
energy::Array{Float64}
emissions::Dict{String, Array{Float64}}
storage_limit::Float64
storage_cost::Array{Float64}
end
mutable struct Instance
time::Int64
products::Array{Product, 1}
collection_centers::Array{CollectionCenter, 1}
plants::Array{Plant, 1}
building_period::Array{Int64}
end
function validate(json, schema)
result = JSONSchema.validate(json, schema)
if result !== nothing
if result isa JSONSchema.SingleIssue
path = join(result.path, "")
if length(path) == 0
path = "root"
end
msg = "$(result.msg) in $(path)"
else
msg = convert(String, result)
end
throw(msg)
end
end
function parsefile(path::String)::Instance
return RELOG.parse(JSON.parsefile(path))
end
function parse(json)::Instance
basedir = dirname(@__FILE__)
json_schema = JSON.parsefile("$basedir/schemas/input.json")
validate(json, Schema(json_schema))
T = json["parameters"]["time horizon (years)"]
json_schema["definitions"]["TimeSeries"]["minItems"] = T
json_schema["definitions"]["TimeSeries"]["maxItems"] = T
validate(json, Schema(json_schema))
building_period = [1]
if "building period (years)" in keys(json)
building_period = json["building period (years)"]
end
plants = Plant[]
products = Product[]
collection_centers = CollectionCenter[]
prod_name_to_product = Dict{String, Product}()
# Create products
for (product_name, product_dict) in json["products"]
cost = product_dict["transportation cost (\$/km/tonne)"]
energy = zeros(T)
emissions = Dict()
if "transportation energy (J/km/tonne)" in keys(product_dict)
energy = product_dict["transportation energy (J/km/tonne)"]
end
if "transportation emissions (tonne/km/tonne)" in keys(product_dict)
emissions = product_dict["transportation emissions (tonne/km/tonne)"]
end
product = Product(product_name, cost, energy, emissions)
push!(products, product)
prod_name_to_product[product_name] = product
# Create collection centers
if "initial amounts" in keys(product_dict)
for (center_name, center_dict) in product_dict["initial amounts"]
center = CollectionCenter(length(collection_centers) + 1,
center_name,
center_dict["latitude (deg)"],
center_dict["longitude (deg)"],
product,
center_dict["amount (tonne)"])
push!(collection_centers, center)
end
end
end
# Create plants
for (plant_name, plant_dict) in json["plants"]
input = prod_name_to_product[plant_dict["input"]]
output = Dict()
# Plant outputs
if "outputs (tonne/tonne)" in keys(plant_dict)
output = Dict(prod_name_to_product[key] => value
for (key, value) in plant_dict["outputs (tonne/tonne)"]
if value > 0)
end
energy = zeros(T)
emissions = Dict()
if "energy (GJ/tonne)" in keys(plant_dict)
energy = plant_dict["energy (GJ/tonne)"]
end
if "emissions (tonne/tonne)" in keys(plant_dict)
emissions = plant_dict["emissions (tonne/tonne)"]
end
for (location_name, location_dict) in plant_dict["locations"]
sizes = PlantSize[]
disposal_limit = Dict(p => [0.0 for t in 1:T] for p in keys(output))
disposal_cost = Dict(p => [0.0 for t in 1:T] for p in keys(output))
# Disposal
if "disposal" in keys(location_dict)
for (product_name, disposal_dict) in location_dict["disposal"]
limit = [1e8 for t in 1:T]
if "limit (tonne)" in keys(disposal_dict)
limit = disposal_dict["limit (tonne)"]
end
disposal_limit[prod_name_to_product[product_name]] = limit
disposal_cost[prod_name_to_product[product_name]] = disposal_dict["cost (\$/tonne)"]
end
end
# Capacities
for (capacity_name, capacity_dict) in location_dict["capacities (tonne)"]
push!(sizes, PlantSize(Base.parse(Float64, capacity_name),
capacity_dict["variable operating cost (\$/tonne)"],
capacity_dict["fixed operating cost (\$)"],
capacity_dict["opening cost (\$)"]))
end
length(sizes) > 1 || push!(sizes, sizes[1])
sort!(sizes, by = x -> x.capacity)
# Storage
storage_limit = 0
storage_cost = zeros(T)
if "storage" in keys(location_dict)
storage_dict = location_dict["storage"]
storage_limit = storage_dict["limit (tonne)"]
storage_cost = storage_dict["cost (\$/tonne)"]
end
# Validation: Capacities
if length(sizes) != 2
throw("At most two capacities are supported")
end
if sizes[1].variable_operating_cost != sizes[2].variable_operating_cost
throw("Variable operating costs must be the same for all capacities")
end
plant = Plant(length(plants) + 1,
plant_name,
location_name,
input,
output,
location_dict["latitude (deg)"],
location_dict["longitude (deg)"],
disposal_limit,
disposal_cost,
sizes,
energy,
emissions,
storage_limit,
storage_cost)
push!(plants, plant)
end
end
@info @sprintf("%12d collection centers", length(collection_centers))
@info @sprintf("%12d candidate plant locations", length(plants))
return Instance(T, products, collection_centers, plants, building_period)
end
"""
_compress(instance::Instance)
Create a single-period instance from a multi-period one. Specifically,
replaces every time-dependent attribute, such as initial_amounts,
by a list with a single element, which is either a sum, an average,
or something else that makes sense to that specific attribute.
"""
function _compress(instance::Instance)::Instance
T = instance.time
compressed = deepcopy(instance)
compressed.time = 1
compressed.building_period = [1]
# Compress products
for p in compressed.products
p.transportation_cost = [mean(p.transportation_cost)]
p.transportation_energy = [mean(p.transportation_energy)]
for (emission_name, emission_value) in p.transportation_emissions
p.transportation_emissions[emission_name] = [mean(emission_value)]
end
end
# Compress collection centers
for c in compressed.collection_centers
c.amount = [maximum(c.amount) * T]
end
# Compress plants
for plant in compressed.plants
plant.energy = [mean(plant.energy)]
for (emission_name, emission_value) in plant.emissions
plant.emissions[emission_name] = [mean(emission_value)]
end
for s in plant.sizes
s.capacity *= T
s.variable_operating_cost = [mean(s.variable_operating_cost)]
s.opening_cost = [s.opening_cost[1]]
s.fixed_operating_cost = [sum(s.fixed_operating_cost)]
end
for (prod_name, disp_limit) in plant.disposal_limit
plant.disposal_limit[prod_name] = [sum(disp_limit)]
end
for (prod_name, disp_cost) in plant.disposal_cost
plant.disposal_cost[prod_name] = [mean(disp_cost)]
end
end
return compressed
end

60
src/instance/compress.jl Normal file
View File

@@ -0,0 +1,60 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
"""
_compress(instance::Instance)
Create a single-period instance from a multi-period one. Specifically,
replaces every time-dependent attribute, such as initial_amounts,
by a list with a single element, which is either a sum, an average,
or something else that makes sense to that specific attribute.
"""
function _compress(instance::Instance)::Instance
T = instance.time
compressed = deepcopy(instance)
compressed.time = 1
compressed.building_period = [1]
# Compress products
for p in compressed.products
p.transportation_cost = [mean(p.transportation_cost)]
p.transportation_energy = [mean(p.transportation_energy)]
for (emission_name, emission_value) in p.transportation_emissions
p.transportation_emissions[emission_name] = [mean(emission_value)]
end
end
# Compress collection centers
for c in compressed.collection_centers
c.amount = [maximum(c.amount) * T]
end
# Compress plants
for plant in compressed.plants
plant.energy = [mean(plant.energy)]
for (emission_name, emission_value) in plant.emissions
plant.emissions[emission_name] = [mean(emission_value)]
end
for s in plant.sizes
s.capacity *= T
s.variable_operating_cost = [mean(s.variable_operating_cost)]
s.opening_cost = [s.opening_cost[1]]
s.fixed_operating_cost = [sum(s.fixed_operating_cost)]
end
for (prod_name, disp_limit) in plant.disposal_limit
plant.disposal_limit[prod_name] = [sum(disp_limit)]
end
for (prod_name, disp_cost) in plant.disposal_cost
plant.disposal_cost[prod_name] = [mean(disp_cost)]
end
end
return compressed
end

168
src/instance/parse.jl Normal file
View File

@@ -0,0 +1,168 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
function parsefile(path::String)::Instance
return RELOG.parse(JSON.parsefile(path))
end
function parse(json)::Instance
basedir = dirname(@__FILE__)
json_schema = JSON.parsefile("$basedir/../schemas/input.json")
validate(json, Schema(json_schema))
T = json["parameters"]["time horizon (years)"]
json_schema["definitions"]["TimeSeries"]["minItems"] = T
json_schema["definitions"]["TimeSeries"]["maxItems"] = T
validate(json, Schema(json_schema))
building_period = [1]
if "building period (years)" in keys(json)
building_period = json["building period (years)"]
end
plants = Plant[]
products = Product[]
collection_centers = CollectionCenter[]
prod_name_to_product = Dict{String,Product}()
# Create products
for (product_name, product_dict) in json["products"]
cost = product_dict["transportation cost (\$/km/tonne)"]
energy = zeros(T)
emissions = Dict()
if "transportation energy (J/km/tonne)" in keys(product_dict)
energy = product_dict["transportation energy (J/km/tonne)"]
end
if "transportation emissions (tonne/km/tonne)" in keys(product_dict)
emissions = product_dict["transportation emissions (tonne/km/tonne)"]
end
product = Product(product_name, cost, energy, emissions)
push!(products, product)
prod_name_to_product[product_name] = product
# Create collection centers
if "initial amounts" in keys(product_dict)
for (center_name, center_dict) in product_dict["initial amounts"]
center = CollectionCenter(
length(collection_centers) + 1,
center_name,
center_dict["latitude (deg)"],
center_dict["longitude (deg)"],
product,
center_dict["amount (tonne)"],
)
push!(collection_centers, center)
end
end
end
# Create plants
for (plant_name, plant_dict) in json["plants"]
input = prod_name_to_product[plant_dict["input"]]
output = Dict()
# Plant outputs
if "outputs (tonne/tonne)" in keys(plant_dict)
output = Dict(
prod_name_to_product[key] => value for
(key, value) in plant_dict["outputs (tonne/tonne)"] if value > 0
)
end
energy = zeros(T)
emissions = Dict()
if "energy (GJ/tonne)" in keys(plant_dict)
energy = plant_dict["energy (GJ/tonne)"]
end
if "emissions (tonne/tonne)" in keys(plant_dict)
emissions = plant_dict["emissions (tonne/tonne)"]
end
for (location_name, location_dict) in plant_dict["locations"]
sizes = PlantSize[]
disposal_limit = Dict(p => [0.0 for t = 1:T] for p in keys(output))
disposal_cost = Dict(p => [0.0 for t = 1:T] for p in keys(output))
# Disposal
if "disposal" in keys(location_dict)
for (product_name, disposal_dict) in location_dict["disposal"]
limit = [1e8 for t = 1:T]
if "limit (tonne)" in keys(disposal_dict)
limit = disposal_dict["limit (tonne)"]
end
disposal_limit[prod_name_to_product[product_name]] = limit
disposal_cost[prod_name_to_product[product_name]] =
disposal_dict["cost (\$/tonne)"]
end
end
# Capacities
for (capacity_name, capacity_dict) in location_dict["capacities (tonne)"]
push!(
sizes,
PlantSize(
Base.parse(Float64, capacity_name),
capacity_dict["variable operating cost (\$/tonne)"],
capacity_dict["fixed operating cost (\$)"],
capacity_dict["opening cost (\$)"],
),
)
end
length(sizes) > 1 || push!(sizes, sizes[1])
sort!(sizes, by = x -> x.capacity)
# Storage
storage_limit = 0
storage_cost = zeros(T)
if "storage" in keys(location_dict)
storage_dict = location_dict["storage"]
storage_limit = storage_dict["limit (tonne)"]
storage_cost = storage_dict["cost (\$/tonne)"]
end
# Validation: Capacities
if length(sizes) != 2
throw("At most two capacities are supported")
end
if sizes[1].variable_operating_cost != sizes[2].variable_operating_cost
throw("Variable operating costs must be the same for all capacities")
end
plant = Plant(
length(plants) + 1,
plant_name,
location_name,
input,
output,
location_dict["latitude (deg)"],
location_dict["longitude (deg)"],
disposal_limit,
disposal_cost,
sizes,
energy,
emissions,
storage_limit,
storage_cost,
)
push!(plants, plant)
end
end
@info @sprintf("%12d collection centers", length(collection_centers))
@info @sprintf("%12d candidate plant locations", length(plants))
return Instance(T, products, collection_centers, plants, building_period)
end

57
src/instance/structs.jl Normal file
View File

@@ -0,0 +1,57 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
mutable struct Product
name::String
transportation_cost::Vector{Float64}
transportation_energy::Vector{Float64}
transportation_emissions::Dict{String,Vector{Float64}}
end
mutable struct CollectionCenter
index::Int64
name::String
latitude::Float64
longitude::Float64
product::Product
amount::Vector{Float64}
end
mutable struct PlantSize
capacity::Float64
variable_operating_cost::Vector{Float64}
fixed_operating_cost::Vector{Float64}
opening_cost::Vector{Float64}
end
mutable struct Plant
index::Int64
plant_name::String
location_name::String
input::Product
output::Dict{Product,Float64}
latitude::Float64
longitude::Float64
disposal_limit::Dict{Product,Vector{Float64}}
disposal_cost::Dict{Product,Vector{Float64}}
sizes::Vector{PlantSize}
energy::Vector{Float64}
emissions::Dict{String,Vector{Float64}}
storage_limit::Float64
storage_cost::Vector{Float64}
end
mutable struct Instance
time::Int64
products::Vector{Product}
collection_centers::Vector{CollectionCenter}
plants::Vector{Plant}
building_period::Vector{Int64}
end

25
src/instance/validate.jl Normal file
View File

@@ -0,0 +1,25 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
function validate(json, schema)
result = JSONSchema.validate(json, schema)
if result !== nothing
if result isa JSONSchema.SingleIssue
path = join(result.path, "")
if length(path) == 0
path = "root"
end
msg = "$(result.msg) in $(path)"
else
msg = convert(String, result)
end
throw(msg)
end
end

View File

@@ -1,535 +0,0 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, LinearAlgebra, Geodesy, Cbc, Clp, ProgressBars, Printf, DataStructures
mutable struct ManufacturingModel
mip::JuMP.Model
vars::DotDict
eqs::DotDict
instance::Instance
graph::Graph
end
function build_model(instance::Instance, graph::Graph, optimizer)::ManufacturingModel
model = ManufacturingModel(Model(optimizer), DotDict(), DotDict(), instance, graph)
create_vars!(model)
create_objective_function!(model)
create_shipping_node_constraints!(model)
create_process_node_constraints!(model)
return model
end
function create_vars!(model::ManufacturingModel)
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
vars.flow = Dict((a, t) => @variable(mip, lower_bound=0)
for a in graph.arcs, t in 1:T)
vars.dispose = Dict((n, t) => @variable(mip,
lower_bound=0,
upper_bound=n.location.disposal_limit[n.product][t])
for n in values(graph.plant_shipping_nodes), t in 1:T)
vars.store = Dict((n, t) => @variable(mip,
lower_bound=0,
upper_bound=n.location.storage_limit)
for n in values(graph.process_nodes), t in 1:T)
vars.process = Dict((n, t) => @variable(mip,
lower_bound = 0)
for n in values(graph.process_nodes), t in 1:T)
vars.open_plant = Dict((n, t) => @variable(mip, binary=true)
for n in values(graph.process_nodes), t in 1:T)
vars.is_open = Dict((n, t) => @variable(mip, binary=true)
for n in values(graph.process_nodes), t in 1:T)
vars.capacity = Dict((n, t) => @variable(mip,
lower_bound = 0,
upper_bound = n.location.sizes[2].capacity)
for n in values(graph.process_nodes), t in 1:T)
vars.expansion = Dict((n, t) => @variable(mip,
lower_bound = 0,
upper_bound = n.location.sizes[2].capacity -
n.location.sizes[1].capacity)
for n in values(graph.process_nodes), t in 1:T)
end
function slope_open(plant, t)
if plant.sizes[2].capacity <= plant.sizes[1].capacity
0.0
else
(plant.sizes[2].opening_cost[t] - plant.sizes[1].opening_cost[t]) /
(plant.sizes[2].capacity - plant.sizes[1].capacity)
end
end
function slope_fix_oper_cost(plant, t)
if plant.sizes[2].capacity <= plant.sizes[1].capacity
0.0
else
(plant.sizes[2].fixed_operating_cost[t] - plant.sizes[1].fixed_operating_cost[t]) /
(plant.sizes[2].capacity - plant.sizes[1].capacity)
end
end
function create_objective_function!(model::ManufacturingModel)
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
obj = AffExpr(0.0)
# Process node costs
for n in values(graph.process_nodes), t in 1:T
# Transportation and variable operating costs
for a in n.incoming_arcs
c = n.location.input.transportation_cost[t] * a.values["distance"]
add_to_expression!(obj, c, vars.flow[a, t])
end
# Opening costs
add_to_expression!(obj,
n.location.sizes[1].opening_cost[t],
vars.open_plant[n, t])
# Fixed operating costs (base)
add_to_expression!(obj,
n.location.sizes[1].fixed_operating_cost[t],
vars.is_open[n, t])
# Fixed operating costs (expansion)
add_to_expression!(obj,
slope_fix_oper_cost(n.location, t),
vars.expansion[n, t])
# Processing costs
add_to_expression!(obj,
n.location.sizes[1].variable_operating_cost[t],
vars.process[n, t])
# Storage costs
add_to_expression!(obj,
n.location.storage_cost[t],
vars.store[n, t])
# Expansion costs
if t < T
add_to_expression!(obj,
slope_open(n.location, t) - slope_open(n.location, t + 1),
vars.expansion[n, t])
else
add_to_expression!(obj,
slope_open(n.location, t),
vars.expansion[n, t])
end
end
# Shipping node costs
for n in values(graph.plant_shipping_nodes), t in 1:T
# Disposal costs
add_to_expression!(obj,
n.location.disposal_cost[n.product][t],
vars.dispose[n, t])
end
@objective(mip, Min, obj)
end
function create_shipping_node_constraints!(model::ManufacturingModel)
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
eqs = model.eqs
eqs.balance = OrderedDict()
for t in 1:T
# Collection centers
for n in graph.collection_shipping_nodes
eqs.balance[n, t] = @constraint(mip,
sum(vars.flow[a, t] for a in n.outgoing_arcs)
== n.location.amount[t])
end
# Plants
for n in graph.plant_shipping_nodes
@constraint(mip,
sum(vars.flow[a, t] for a in n.incoming_arcs) ==
sum(vars.flow[a, t] for a in n.outgoing_arcs) + vars.dispose[n, t])
end
end
end
function create_process_node_constraints!(model::ManufacturingModel)
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
for t in 1:T, n in graph.process_nodes
input_sum = AffExpr(0.0)
for a in n.incoming_arcs
add_to_expression!(input_sum, 1.0, vars.flow[a, t])
end
# Output amount is implied by amount processed
for a in n.outgoing_arcs
@constraint(mip, vars.flow[a, t] == a.values["weight"] * vars.process[n, t])
end
# If plant is closed, capacity is zero
@constraint(mip, vars.capacity[n, t] <= n.location.sizes[2].capacity * vars.is_open[n, t])
# If plant is open, capacity is greater than base
@constraint(mip, vars.capacity[n, t] >= n.location.sizes[1].capacity * vars.is_open[n, t])
# Capacity is linked to expansion
@constraint(mip, vars.capacity[n, t] <= n.location.sizes[1].capacity + vars.expansion[n, t])
# Can only process up to capacity
@constraint(mip, vars.process[n, t] <= vars.capacity[n, t])
if t > 1
# Plant capacity can only increase over time
@constraint(mip, vars.capacity[n, t] >= vars.capacity[n, t-1])
@constraint(mip, vars.expansion[n, t] >= vars.expansion[n, t-1])
end
# Amount received equals amount processed plus stored
store_in = 0
if t > 1
store_in = vars.store[n, t-1]
end
if t == T
@constraint(mip, vars.store[n, t] == 0)
end
@constraint(mip,
input_sum + store_in == vars.store[n, t] + vars.process[n, t])
# Plant is currently open if it was already open in the previous time period or
# if it was built just now
if t > 1
@constraint(mip, vars.is_open[n, t] == vars.is_open[n, t-1] + vars.open_plant[n, t])
else
@constraint(mip, vars.is_open[n, t] == vars.open_plant[n, t])
end
# Plant can only be opened during building period
if t model.instance.building_period
@constraint(mip, vars.open_plant[n, t] == 0)
end
end
end
default_milp_optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
default_lp_optimizer = optimizer_with_attributes(Clp.Optimizer, "LogLevel" => 0)
function solve(instance::Instance;
optimizer=nothing,
output=nothing,
marginal_costs=true,
)
milp_optimizer = lp_optimizer = optimizer
if optimizer == nothing
milp_optimizer = default_milp_optimizer
lp_optimizer = default_lp_optimizer
end
@info "Building graph..."
graph = RELOG.build_graph(instance)
@info @sprintf(" %12d time periods", instance.time)
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
@info @sprintf(" %12d shipping nodes (collection)", length(graph.collection_shipping_nodes))
@info @sprintf(" %12d arcs", length(graph.arcs))
@info "Building optimization model..."
model = RELOG.build_model(instance, graph, milp_optimizer)
@info "Optimizing MILP..."
JuMP.optimize!(model.mip)
if !has_values(model.mip)
@warn "No solution available"
return OrderedDict()
end
if marginal_costs
@info "Re-optimizing with integer variables fixed..."
all_vars = JuMP.all_variables(model.mip)
vals = OrderedDict(var => JuMP.value(var) for var in all_vars)
JuMP.set_optimizer(model.mip, lp_optimizer)
for var in all_vars
if JuMP.is_binary(var)
JuMP.unset_binary(var)
JuMP.fix(var, vals[var])
end
end
JuMP.optimize!(model.mip)
end
@info "Extracting solution..."
solution = get_solution(model, marginal_costs=marginal_costs)
if output != nothing
write(solution, output)
end
return solution
end
function solve(filename::AbstractString;
heuristic=false,
kwargs...,
)
@info "Reading $filename..."
instance = RELOG.parsefile(filename)
if heuristic && instance.time > 1
@info "Solving single-period version..."
compressed = _compress(instance)
csol = solve(compressed;
output=nothing,
marginal_costs=false,
kwargs...)
@info "Filtering candidate locations..."
selected_pairs = []
for (plant_name, plant_dict) in csol["Plants"]
for (location_name, location_dict) in plant_dict
push!(selected_pairs, (plant_name, location_name))
end
end
filtered_plants = []
for p in instance.plants
if (p.plant_name, p.location_name) in selected_pairs
push!(filtered_plants, p)
end
end
instance.plants = filtered_plants
@info "Solving original version..."
end
sol = solve(instance; kwargs...)
return sol
end
function get_solution(model::ManufacturingModel;
marginal_costs=true,
)
mip, vars, eqs, graph, instance = model.mip, model.vars, model.eqs, model.graph, model.instance
T = instance.time
output = OrderedDict(
"Plants" => OrderedDict(),
"Products" => OrderedDict(),
"Costs" => OrderedDict(
"Fixed operating (\$)" => zeros(T),
"Variable operating (\$)" => zeros(T),
"Opening (\$)" => zeros(T),
"Transportation (\$)" => zeros(T),
"Disposal (\$)" => zeros(T),
"Expansion (\$)" => zeros(T),
"Storage (\$)" => zeros(T),
"Total (\$)" => zeros(T),
),
"Energy" => OrderedDict(
"Plants (GJ)" => zeros(T),
"Transportation (GJ)" => zeros(T),
),
"Emissions" => OrderedDict(
"Plants (tonne)" => OrderedDict(),
"Transportation (tonne)" => OrderedDict(),
),
)
plant_to_process_node = OrderedDict(n.location => n for n in graph.process_nodes)
plant_to_shipping_nodes = OrderedDict()
for p in instance.plants
plant_to_shipping_nodes[p] = []
for a in plant_to_process_node[p].outgoing_arcs
push!(plant_to_shipping_nodes[p], a.dest)
end
end
# Products
if marginal_costs
for n in graph.collection_shipping_nodes
location_dict = OrderedDict{Any, Any}(
"Marginal cost (\$/tonne)" => [round(abs(JuMP.shadow_price(eqs.balance[n, t])), digits=2)
for t in 1:T]
)
if n.product.name keys(output["Products"])
output["Products"][n.product.name] = OrderedDict()
end
output["Products"][n.product.name][n.location.name] = location_dict
end
end
# Plants
for plant in instance.plants
skip_plant = true
process_node = plant_to_process_node[plant]
plant_dict = OrderedDict{Any, Any}(
"Input" => OrderedDict(),
"Output" => OrderedDict(
"Send" => OrderedDict(),
"Dispose" => OrderedDict(),
),
"Input product" => plant.input.name,
"Total input (tonne)" => [0.0 for t in 1:T],
"Total output" => OrderedDict(),
"Latitude (deg)" => plant.latitude,
"Longitude (deg)" => plant.longitude,
"Capacity (tonne)" => [JuMP.value(vars.capacity[process_node, t])
for t in 1:T],
"Opening cost (\$)" => [JuMP.value(vars.open_plant[process_node, t]) *
plant.sizes[1].opening_cost[t]
for t in 1:T],
"Fixed operating cost (\$)" => [JuMP.value(vars.is_open[process_node, t]) *
plant.sizes[1].fixed_operating_cost[t] +
JuMP.value(vars.expansion[process_node, t]) *
slope_fix_oper_cost(plant, t)
for t in 1:T],
"Expansion cost (\$)" => [(if t == 1
slope_open(plant, t) * JuMP.value(vars.expansion[process_node, t])
else
slope_open(plant, t) * (
JuMP.value(vars.expansion[process_node, t]) -
JuMP.value(vars.expansion[process_node, t - 1])
)
end)
for t in 1:T],
"Process (tonne)" => [JuMP.value(vars.process[process_node, t])
for t in 1:T],
"Variable operating cost (\$)" => [JuMP.value(vars.process[process_node, t]) *
plant.sizes[1].variable_operating_cost[t]
for t in 1:T],
"Storage (tonne)" => [JuMP.value(vars.store[process_node, t])
for t in 1:T],
"Storage cost (\$)" => [JuMP.value(vars.store[process_node, t]) *
plant.storage_cost[t]
for t in 1:T],
)
output["Costs"]["Fixed operating (\$)"] += plant_dict["Fixed operating cost (\$)"]
output["Costs"]["Variable operating (\$)"] += plant_dict["Variable operating cost (\$)"]
output["Costs"]["Opening (\$)"] += plant_dict["Opening cost (\$)"]
output["Costs"]["Expansion (\$)"] += plant_dict["Expansion cost (\$)"]
output["Costs"]["Storage (\$)"] += plant_dict["Storage cost (\$)"]
# Inputs
for a in process_node.incoming_arcs
vals = [JuMP.value(vars.flow[a, t]) for t in 1:T]
if sum(vals) <= 1e-3
continue
end
skip_plant = false
dict = OrderedDict{Any, Any}(
"Amount (tonne)" => vals,
"Distance (km)" => a.values["distance"],
"Latitude (deg)" => a.source.location.latitude,
"Longitude (deg)" => a.source.location.longitude,
"Transportation cost (\$)" => a.source.product.transportation_cost .*
vals .*
a.values["distance"],
"Transportation energy (J)" => vals .*
a.values["distance"] .*
a.source.product.transportation_energy,
"Emissions (tonne)" => OrderedDict(),
)
emissions_dict = output["Emissions"]["Transportation (tonne)"]
for (em_name, em_values) in a.source.product.transportation_emissions
dict["Emissions (tonne)"][em_name] = em_values .*
dict["Amount (tonne)"] .*
a.values["distance"]
if em_name keys(emissions_dict)
emissions_dict[em_name] = zeros(T)
end
emissions_dict[em_name] += dict["Emissions (tonne)"][em_name]
end
if a.source.location isa CollectionCenter
plant_name = "Origin"
location_name = a.source.location.name
else
plant_name = a.source.location.plant_name
location_name = a.source.location.location_name
end
if plant_name keys(plant_dict["Input"])
plant_dict["Input"][plant_name] = OrderedDict()
end
plant_dict["Input"][plant_name][location_name] = dict
plant_dict["Total input (tonne)"] += vals
output["Costs"]["Transportation (\$)"] += dict["Transportation cost (\$)"]
output["Energy"]["Transportation (GJ)"] += dict["Transportation energy (J)"] / 1e9
end
plant_dict["Energy (GJ)"] = plant_dict["Total input (tonne)"] .* plant.energy
output["Energy"]["Plants (GJ)"] += plant_dict["Energy (GJ)"]
plant_dict["Emissions (tonne)"] = OrderedDict()
emissions_dict = output["Emissions"]["Plants (tonne)"]
for (em_name, em_values) in plant.emissions
plant_dict["Emissions (tonne)"][em_name] = em_values .* plant_dict["Total input (tonne)"]
if em_name keys(emissions_dict)
emissions_dict[em_name] = zeros(T)
end
emissions_dict[em_name] += plant_dict["Emissions (tonne)"][em_name]
end
# Outputs
for shipping_node in plant_to_shipping_nodes[plant]
product_name = shipping_node.product.name
plant_dict["Total output"][product_name] = zeros(T)
plant_dict["Output"]["Send"][product_name] = product_dict = OrderedDict()
disposal_amount = [JuMP.value(vars.dispose[shipping_node, t]) for t in 1:T]
if sum(disposal_amount) > 1e-5
skip_plant = false
plant_dict["Output"]["Dispose"][product_name] = disposal_dict = OrderedDict()
disposal_dict["Amount (tonne)"] = [JuMP.value(model.vars.dispose[shipping_node, t])
for t in 1:T]
disposal_dict["Cost (\$)"] = [disposal_dict["Amount (tonne)"][t] *
plant.disposal_cost[shipping_node.product][t]
for t in 1:T]
plant_dict["Total output"][product_name] += disposal_amount
output["Costs"]["Disposal (\$)"] += disposal_dict["Cost (\$)"]
end
for a in shipping_node.outgoing_arcs
vals = [JuMP.value(vars.flow[a, t]) for t in 1:T]
if sum(vals) <= 1e-3
continue
end
skip_plant = false
dict = OrderedDict(
"Amount (tonne)" => vals,
"Distance (km)" => a.values["distance"],
"Latitude (deg)" => a.dest.location.latitude,
"Longitude (deg)" => a.dest.location.longitude,
)
if a.dest.location.plant_name keys(product_dict)
product_dict[a.dest.location.plant_name] = OrderedDict()
end
product_dict[a.dest.location.plant_name][a.dest.location.location_name] = dict
plant_dict["Total output"][product_name] += vals
end
end
if !skip_plant
if plant.plant_name keys(output["Plants"])
output["Plants"][plant.plant_name] = OrderedDict()
end
output["Plants"][plant.plant_name][plant.location_name] = plant_dict
end
end
output["Costs"]["Total (\$)"] = sum(values(output["Costs"]))
return output
end

250
src/model/build.jl Normal file
View File

@@ -0,0 +1,250 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, LinearAlgebra, Geodesy, Cbc, Clp, ProgressBars, Printf, DataStructures
function build_model(instance::Instance, graph::Graph, optimizer)::JuMP.Model
model = Model(optimizer)
model[:instance] = instance
model[:graph] = graph
create_vars!(model)
create_objective_function!(model)
create_shipping_node_constraints!(model)
create_process_node_constraints!(model)
return model
end
function create_vars!(model::JuMP.Model)
graph, T = model[:graph], model[:instance].time
model[:flow] =
Dict((a, t) => @variable(model, lower_bound = 0) for a in graph.arcs, t = 1:T)
model[:dispose] = Dict(
(n, t) => @variable(
model,
lower_bound = 0,
upper_bound = n.location.disposal_limit[n.product][t]
) for n in values(graph.plant_shipping_nodes), t = 1:T
)
model[:store] = Dict(
(n, t) =>
@variable(model, lower_bound = 0, upper_bound = n.location.storage_limit)
for n in values(graph.process_nodes), t = 1:T
)
model[:process] = Dict(
(n, t) => @variable(model, lower_bound = 0) for
n in values(graph.process_nodes), t = 1:T
)
model[:open_plant] = Dict(
(n, t) => @variable(model, binary = true) for n in values(graph.process_nodes),
t = 1:T
)
model[:is_open] = Dict(
(n, t) => @variable(model, binary = true) for n in values(graph.process_nodes),
t = 1:T
)
model[:capacity] = Dict(
(n, t) => @variable(
model,
lower_bound = 0,
upper_bound = n.location.sizes[2].capacity
) for n in values(graph.process_nodes), t = 1:T
)
model[:expansion] = Dict(
(n, t) => @variable(
model,
lower_bound = 0,
upper_bound = n.location.sizes[2].capacity - n.location.sizes[1].capacity
) for n in values(graph.process_nodes), t = 1:T
)
end
function slope_open(plant, t)
if plant.sizes[2].capacity <= plant.sizes[1].capacity
0.0
else
(plant.sizes[2].opening_cost[t] - plant.sizes[1].opening_cost[t]) /
(plant.sizes[2].capacity - plant.sizes[1].capacity)
end
end
function slope_fix_oper_cost(plant, t)
if plant.sizes[2].capacity <= plant.sizes[1].capacity
0.0
else
(plant.sizes[2].fixed_operating_cost[t] - plant.sizes[1].fixed_operating_cost[t]) /
(plant.sizes[2].capacity - plant.sizes[1].capacity)
end
end
function create_objective_function!(model::JuMP.Model)
graph, T = model[:graph], model[:instance].time
obj = AffExpr(0.0)
# Process node costs
for n in values(graph.process_nodes), t = 1:T
# Transportation and variable operating costs
for a in n.incoming_arcs
c = n.location.input.transportation_cost[t] * a.values["distance"]
add_to_expression!(obj, c, model[:flow][a, t])
end
# Opening costs
add_to_expression!(
obj,
n.location.sizes[1].opening_cost[t],
model[:open_plant][n, t],
)
# Fixed operating costs (base)
add_to_expression!(
obj,
n.location.sizes[1].fixed_operating_cost[t],
model[:is_open][n, t],
)
# Fixed operating costs (expansion)
add_to_expression!(obj, slope_fix_oper_cost(n.location, t), model[:expansion][n, t])
# Processing costs
add_to_expression!(
obj,
n.location.sizes[1].variable_operating_cost[t],
model[:process][n, t],
)
# Storage costs
add_to_expression!(obj, n.location.storage_cost[t], model[:store][n, t])
# Expansion costs
if t < T
add_to_expression!(
obj,
slope_open(n.location, t) - slope_open(n.location, t + 1),
model[:expansion][n, t],
)
else
add_to_expression!(obj, slope_open(n.location, t), model[:expansion][n, t])
end
end
# Shipping node costs
for n in values(graph.plant_shipping_nodes), t = 1:T
# Disposal costs
add_to_expression!(
obj,
n.location.disposal_cost[n.product][t],
model[:dispose][n, t],
)
end
@objective(model, Min, obj)
end
function create_shipping_node_constraints!(model::JuMP.Model)
graph, T = model[:graph], model[:instance].time
model[:eq_balance] = OrderedDict()
for t = 1:T
# Collection centers
for n in graph.collection_shipping_nodes
model[:eq_balance][n, t] = @constraint(
model,
sum(model[:flow][a, t] for a in n.outgoing_arcs) == n.location.amount[t]
)
end
# Plants
for n in graph.plant_shipping_nodes
@constraint(
model,
sum(model[:flow][a, t] for a in n.incoming_arcs) ==
sum(model[:flow][a, t] for a in n.outgoing_arcs) + model[:dispose][n, t]
)
end
end
end
function create_process_node_constraints!(model::JuMP.Model)
graph, T = model[:graph], model[:instance].time
for t = 1:T, n in graph.process_nodes
input_sum = AffExpr(0.0)
for a in n.incoming_arcs
add_to_expression!(input_sum, 1.0, model[:flow][a, t])
end
# Output amount is implied by amount processed
for a in n.outgoing_arcs
@constraint(
model,
model[:flow][a, t] == a.values["weight"] * model[:process][n, t]
)
end
# If plant is closed, capacity is zero
@constraint(
model,
model[:capacity][n, t] <= n.location.sizes[2].capacity * model[:is_open][n, t]
)
# If plant is open, capacity is greater than base
@constraint(
model,
model[:capacity][n, t] >= n.location.sizes[1].capacity * model[:is_open][n, t]
)
# Capacity is linked to expansion
@constraint(
model,
model[:capacity][n, t] <=
n.location.sizes[1].capacity + model[:expansion][n, t]
)
# Can only process up to capacity
@constraint(model, model[:process][n, t] <= model[:capacity][n, t])
if t > 1
# Plant capacity can only increase over time
@constraint(model, model[:capacity][n, t] >= model[:capacity][n, t-1])
@constraint(model, model[:expansion][n, t] >= model[:expansion][n, t-1])
end
# Amount received equals amount processed plus stored
store_in = 0
if t > 1
store_in = model[:store][n, t-1]
end
if t == T
@constraint(model, model[:store][n, t] == 0)
end
@constraint(
model,
input_sum + store_in == model[:store][n, t] + model[:process][n, t]
)
# Plant is currently open if it was already open in the previous time period or
# if it was built just now
if t > 1
@constraint(
model,
model[:is_open][n, t] == model[:is_open][n, t-1] + model[:open_plant][n, t]
)
else
@constraint(model, model[:is_open][n, t] == model[:open_plant][n, t])
end
# Plant can only be opened during building period
if t model[:instance].building_period
@constraint(model, model[:open_plant][n, t] == 0)
end
end
end

224
src/model/getsol.jl Normal file
View File

@@ -0,0 +1,224 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, LinearAlgebra, Geodesy, Cbc, Clp, ProgressBars, Printf, DataStructures
function get_solution(model::JuMP.Model; marginal_costs = true)
graph, instance = model[:graph], model[:instance]
T = instance.time
output = OrderedDict(
"Plants" => OrderedDict(),
"Products" => OrderedDict(),
"Costs" => OrderedDict(
"Fixed operating (\$)" => zeros(T),
"Variable operating (\$)" => zeros(T),
"Opening (\$)" => zeros(T),
"Transportation (\$)" => zeros(T),
"Disposal (\$)" => zeros(T),
"Expansion (\$)" => zeros(T),
"Storage (\$)" => zeros(T),
"Total (\$)" => zeros(T),
),
"Energy" =>
OrderedDict("Plants (GJ)" => zeros(T), "Transportation (GJ)" => zeros(T)),
"Emissions" => OrderedDict(
"Plants (tonne)" => OrderedDict(),
"Transportation (tonne)" => OrderedDict(),
),
)
plant_to_process_node = OrderedDict(n.location => n for n in graph.process_nodes)
plant_to_shipping_nodes = OrderedDict()
for p in instance.plants
plant_to_shipping_nodes[p] = []
for a in plant_to_process_node[p].outgoing_arcs
push!(plant_to_shipping_nodes[p], a.dest)
end
end
# Products
if marginal_costs
for n in graph.collection_shipping_nodes
location_dict = OrderedDict{Any,Any}(
"Marginal cost (\$/tonne)" => [
round(abs(JuMP.shadow_price(model[:eq_balance][n, t])), digits = 2) for t = 1:T
],
)
if n.product.name keys(output["Products"])
output["Products"][n.product.name] = OrderedDict()
end
output["Products"][n.product.name][n.location.name] = location_dict
end
end
# Plants
for plant in instance.plants
skip_plant = true
process_node = plant_to_process_node[plant]
plant_dict = OrderedDict{Any,Any}(
"Input" => OrderedDict(),
"Output" =>
OrderedDict("Send" => OrderedDict(), "Dispose" => OrderedDict()),
"Input product" => plant.input.name,
"Total input (tonne)" => [0.0 for t = 1:T],
"Total output" => OrderedDict(),
"Latitude (deg)" => plant.latitude,
"Longitude (deg)" => plant.longitude,
"Capacity (tonne)" =>
[JuMP.value(model[:capacity][process_node, t]) for t = 1:T],
"Opening cost (\$)" => [
JuMP.value(model[:open_plant][process_node, t]) *
plant.sizes[1].opening_cost[t] for t = 1:T
],
"Fixed operating cost (\$)" => [
JuMP.value(model[:is_open][process_node, t]) *
plant.sizes[1].fixed_operating_cost[t] +
JuMP.value(model[:expansion][process_node, t]) *
slope_fix_oper_cost(plant, t) for t = 1:T
],
"Expansion cost (\$)" => [
(
if t == 1
slope_open(plant, t) * JuMP.value(model[:expansion][process_node, t])
else
slope_open(plant, t) * (
JuMP.value(model[:expansion][process_node, t]) -
JuMP.value(model[:expansion][process_node, t-1])
)
end
) for t = 1:T
],
"Process (tonne)" =>
[JuMP.value(model[:process][process_node, t]) for t = 1:T],
"Variable operating cost (\$)" => [
JuMP.value(model[:process][process_node, t]) *
plant.sizes[1].variable_operating_cost[t] for t = 1:T
],
"Storage (tonne)" =>
[JuMP.value(model[:store][process_node, t]) for t = 1:T],
"Storage cost (\$)" => [
JuMP.value(model[:store][process_node, t]) * plant.storage_cost[t]
for t = 1:T
],
)
output["Costs"]["Fixed operating (\$)"] += plant_dict["Fixed operating cost (\$)"]
output["Costs"]["Variable operating (\$)"] +=
plant_dict["Variable operating cost (\$)"]
output["Costs"]["Opening (\$)"] += plant_dict["Opening cost (\$)"]
output["Costs"]["Expansion (\$)"] += plant_dict["Expansion cost (\$)"]
output["Costs"]["Storage (\$)"] += plant_dict["Storage cost (\$)"]
# Inputs
for a in process_node.incoming_arcs
vals = [JuMP.value(model[:flow][a, t]) for t = 1:T]
if sum(vals) <= 1e-3
continue
end
skip_plant = false
dict = OrderedDict{Any,Any}(
"Amount (tonne)" => vals,
"Distance (km)" => a.values["distance"],
"Latitude (deg)" => a.source.location.latitude,
"Longitude (deg)" => a.source.location.longitude,
"Transportation cost (\$)" =>
a.source.product.transportation_cost .* vals .* a.values["distance"],
"Transportation energy (J)" =>
vals .* a.values["distance"] .* a.source.product.transportation_energy,
"Emissions (tonne)" => OrderedDict(),
)
emissions_dict = output["Emissions"]["Transportation (tonne)"]
for (em_name, em_values) in a.source.product.transportation_emissions
dict["Emissions (tonne)"][em_name] =
em_values .* dict["Amount (tonne)"] .* a.values["distance"]
if em_name keys(emissions_dict)
emissions_dict[em_name] = zeros(T)
end
emissions_dict[em_name] += dict["Emissions (tonne)"][em_name]
end
if a.source.location isa CollectionCenter
plant_name = "Origin"
location_name = a.source.location.name
else
plant_name = a.source.location.plant_name
location_name = a.source.location.location_name
end
if plant_name keys(plant_dict["Input"])
plant_dict["Input"][plant_name] = OrderedDict()
end
plant_dict["Input"][plant_name][location_name] = dict
plant_dict["Total input (tonne)"] += vals
output["Costs"]["Transportation (\$)"] += dict["Transportation cost (\$)"]
output["Energy"]["Transportation (GJ)"] +=
dict["Transportation energy (J)"] / 1e9
end
plant_dict["Energy (GJ)"] = plant_dict["Total input (tonne)"] .* plant.energy
output["Energy"]["Plants (GJ)"] += plant_dict["Energy (GJ)"]
plant_dict["Emissions (tonne)"] = OrderedDict()
emissions_dict = output["Emissions"]["Plants (tonne)"]
for (em_name, em_values) in plant.emissions
plant_dict["Emissions (tonne)"][em_name] =
em_values .* plant_dict["Total input (tonne)"]
if em_name keys(emissions_dict)
emissions_dict[em_name] = zeros(T)
end
emissions_dict[em_name] += plant_dict["Emissions (tonne)"][em_name]
end
# Outputs
for shipping_node in plant_to_shipping_nodes[plant]
product_name = shipping_node.product.name
plant_dict["Total output"][product_name] = zeros(T)
plant_dict["Output"]["Send"][product_name] = product_dict = OrderedDict()
disposal_amount = [JuMP.value(model[:dispose][shipping_node, t]) for t = 1:T]
if sum(disposal_amount) > 1e-5
skip_plant = false
plant_dict["Output"]["Dispose"][product_name] =
disposal_dict = OrderedDict()
disposal_dict["Amount (tonne)"] =
[JuMP.value(model[:dispose][shipping_node, t]) for t = 1:T]
disposal_dict["Cost (\$)"] = [
disposal_dict["Amount (tonne)"][t] *
plant.disposal_cost[shipping_node.product][t] for t = 1:T
]
plant_dict["Total output"][product_name] += disposal_amount
output["Costs"]["Disposal (\$)"] += disposal_dict["Cost (\$)"]
end
for a in shipping_node.outgoing_arcs
vals = [JuMP.value(model[:flow][a, t]) for t = 1:T]
if sum(vals) <= 1e-3
continue
end
skip_plant = false
dict = OrderedDict(
"Amount (tonne)" => vals,
"Distance (km)" => a.values["distance"],
"Latitude (deg)" => a.dest.location.latitude,
"Longitude (deg)" => a.dest.location.longitude,
)
if a.dest.location.plant_name keys(product_dict)
product_dict[a.dest.location.plant_name] = OrderedDict()
end
product_dict[a.dest.location.plant_name][a.dest.location.location_name] =
dict
plant_dict["Total output"][product_name] += vals
end
end
if !skip_plant
if plant.plant_name keys(output["Plants"])
output["Plants"][plant.plant_name] = OrderedDict()
end
output["Plants"][plant.plant_name][plant.location_name] = plant_dict
end
end
output["Costs"]["Total (\$)"] = sum(values(output["Costs"]))
return output
end

94
src/model/solve.jl Normal file
View File

@@ -0,0 +1,94 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, LinearAlgebra, Geodesy, Cbc, Clp, ProgressBars, Printf, DataStructures
default_milp_optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
default_lp_optimizer = optimizer_with_attributes(Clp.Optimizer, "LogLevel" => 0)
function solve(
instance::Instance;
optimizer = nothing,
output = nothing,
marginal_costs = true,
)
milp_optimizer = lp_optimizer = optimizer
if optimizer == nothing
milp_optimizer = default_milp_optimizer
lp_optimizer = default_lp_optimizer
end
@info "Building graph..."
graph = RELOG.build_graph(instance)
@info @sprintf(" %12d time periods", instance.time)
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
@info @sprintf(
" %12d shipping nodes (collection)",
length(graph.collection_shipping_nodes)
)
@info @sprintf(" %12d arcs", length(graph.arcs))
@info "Building optimization model..."
model = RELOG.build_model(instance, graph, milp_optimizer)
@info "Optimizing MILP..."
JuMP.optimize!(model)
if !has_values(model)
@warn "No solution available"
return OrderedDict()
end
if marginal_costs
@info "Re-optimizing with integer variables fixed..."
all_vars = JuMP.all_variables(model)
vals = OrderedDict(var => JuMP.value(var) for var in all_vars)
JuMP.set_optimizer(model, lp_optimizer)
for var in all_vars
if JuMP.is_binary(var)
JuMP.unset_binary(var)
JuMP.fix(var, vals[var])
end
end
JuMP.optimize!(model)
end
@info "Extracting solution..."
solution = get_solution(model, marginal_costs = marginal_costs)
if output != nothing
write(solution, output)
end
return solution
end
function solve(filename::AbstractString; heuristic = false, kwargs...)
@info "Reading $filename..."
instance = RELOG.parsefile(filename)
if heuristic && instance.time > 1
@info "Solving single-period version..."
compressed = _compress(instance)
csol = solve(compressed; output = nothing, marginal_costs = false, kwargs...)
@info "Filtering candidate locations..."
selected_pairs = []
for (plant_name, plant_dict) in csol["Plants"]
for (location_name, location_dict) in plant_dict
push!(selected_pairs, (plant_name, location_name))
end
end
filtered_plants = []
for p in instance.plants
if (p.plant_name, p.location_name) in selected_pairs
push!(filtered_plants, p)
end
end
instance.plants = filtered_plants
@info "Solving original version..."
end
sol = solve(instance; kwargs...)
return sol
end

View File

@@ -1,278 +0,0 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function plants_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."latitude (deg)" = Float64[]
df."longitude (deg)" = Float64[]
df."capacity (tonne)" = Float64[]
df."amount processed (tonne)" = Float64[]
df."amount received (tonne)" = Float64[]
df."amount in storage (tonne)" = Float64[]
df."utilization factor (%)" = Float64[]
df."energy (GJ)" = Float64[]
df."opening cost (\$)" = Float64[]
df."expansion cost (\$)" = Float64[]
df."fixed operating cost (\$)" = Float64[]
df."variable operating cost (\$)" = Float64[]
df."storage cost (\$)" = Float64[]
df."total cost (\$)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for year in 1:T
capacity = round(location_dict["Capacity (tonne)"][year], digits=2)
received = round(location_dict["Total input (tonne)"][year], digits=2)
processed = round(location_dict["Process (tonne)"][year], digits=2)
in_storage = round(location_dict["Storage (tonne)"][year], digits=2)
utilization_factor = round(processed / capacity * 100.0, digits=2)
energy = round(location_dict["Energy (GJ)"][year], digits=2)
latitude = round(location_dict["Latitude (deg)"], digits=6)
longitude = round(location_dict["Longitude (deg)"], digits=6)
opening_cost = round(location_dict["Opening cost (\$)"][year], digits=2)
expansion_cost = round(location_dict["Expansion cost (\$)"][year], digits=2)
fixed_cost = round(location_dict["Fixed operating cost (\$)"][year], digits=2)
var_cost = round(location_dict["Variable operating cost (\$)"][year], digits=2)
storage_cost = round(location_dict["Storage cost (\$)"][year], digits=2)
total_cost = round(opening_cost + expansion_cost + fixed_cost +
var_cost + storage_cost, digits=2)
push!(df, [
plant_name,
location_name,
year,
latitude,
longitude,
capacity,
processed,
received,
in_storage,
utilization_factor,
energy,
opening_cost,
expansion_cost,
fixed_cost,
var_cost,
storage_cost,
total_cost,
])
end
end
end
return df
end
function plant_outputs_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."product name" = String[]
df."amount produced (tonne)" = Float64[]
df."amount sent (tonne)" = Float64[]
df."amount disposed (tonne)" = Float64[]
df."disposal cost (\$)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for (product_name, amount_produced) in location_dict["Total output"]
send_dict = location_dict["Output"]["Send"]
disposal_dict = location_dict["Output"]["Dispose"]
sent = zeros(T)
if product_name in keys(send_dict)
for (dst_plant_name, dst_plant_dict) in send_dict[product_name]
for (dst_location_name, dst_location_dict) in dst_plant_dict
sent += dst_location_dict["Amount (tonne)"]
end
end
end
sent = round.(sent, digits=2)
disposal_amount = zeros(T)
disposal_cost = zeros(T)
if product_name in keys(disposal_dict)
disposal_amount += disposal_dict[product_name]["Amount (tonne)"]
disposal_cost += disposal_dict[product_name]["Cost (\$)"]
end
disposal_amount = round.(disposal_amount, digits=2)
disposal_cost = round.(disposal_cost, digits=2)
for year in 1:T
push!(df, [
plant_name,
location_name,
year,
product_name,
round(amount_produced[year], digits=2),
sent[year],
disposal_amount[year],
disposal_cost[year],
])
end
end
end
end
return df
end
function plant_emissions_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."emission type" = String[]
df."emission amount (tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for (emission_name, emission_amount) in location_dict["Emissions (tonne)"]
for year in 1:T
push!(df, [
plant_name,
location_name,
year,
emission_name,
round(emission_amount[year], digits=2),
])
end
end
end
end
return df
end
function transportation_report(solution)::DataFrame
df = DataFrame()
df."source type" = String[]
df."source location name" = String[]
df."source latitude (deg)" = Float64[]
df."source longitude (deg)" = Float64[]
df."destination type" = String[]
df."destination location name" = String[]
df."destination latitude (deg)" = Float64[]
df."destination longitude (deg)" = Float64[]
df."product" = String[]
df."year" = Int[]
df."distance (km)" = Float64[]
df."amount (tonne)" = Float64[]
df."amount-distance (tonne-km)" = Float64[]
df."transportation cost (\$)" = Float64[]
df."transportation energy (GJ)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
for (dst_location_name, dst_location_dict) in dst_plant_dict
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
for (src_location_name, src_location_dict) in src_plant_dict
for year in 1:T
push!(df, [
src_plant_name,
src_location_name,
round(src_location_dict["Latitude (deg)"], digits=6),
round(src_location_dict["Longitude (deg)"], digits=6),
dst_plant_name,
dst_location_name,
round(dst_location_dict["Latitude (deg)"], digits=6),
round(dst_location_dict["Longitude (deg)"], digits=6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits=2),
round(src_location_dict["Amount (tonne)"][year], digits=2),
round(src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits=2),
round(src_location_dict["Transportation cost (\$)"][year], digits=2),
round(src_location_dict["Transportation energy (J)"][year] / 1e9, digits=2),
])
end
end
end
end
end
return df
end
function transportation_emissions_report(solution)::DataFrame
df = DataFrame()
df."source type" = String[]
df."source location name" = String[]
df."source latitude (deg)" = Float64[]
df."source longitude (deg)" = Float64[]
df."destination type" = String[]
df."destination location name" = String[]
df."destination latitude (deg)" = Float64[]
df."destination longitude (deg)" = Float64[]
df."product" = String[]
df."year" = Int[]
df."distance (km)" = Float64[]
df."shipped amount (tonne)" = Float64[]
df."shipped amount-distance (tonne-km)" = Float64[]
df."emission type" = String[]
df."emission amount (tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
for (dst_location_name, dst_location_dict) in dst_plant_dict
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
for (src_location_name, src_location_dict) in src_plant_dict
for (emission_name, emission_amount) in src_location_dict["Emissions (tonne)"]
for year in 1:T
push!(df, [
src_plant_name,
src_location_name,
round(src_location_dict["Latitude (deg)"], digits=6),
round(src_location_dict["Longitude (deg)"], digits=6),
dst_plant_name,
dst_location_name,
round(dst_location_dict["Latitude (deg)"], digits=6),
round(dst_location_dict["Longitude (deg)"], digits=6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits=2),
round(src_location_dict["Amount (tonne)"][year], digits=2),
round(src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits=2),
emission_name,
round(emission_amount[year], digits=2),
])
end
end
end
end
end
end
return df
end
function write(solution::AbstractDict, filename::AbstractString)
@info "Writing solution: $filename"
open(filename, "w") do file
JSON.print(file, solution, 2)
end
end
write_plants_report(solution, filename) =
CSV.write(filename, plants_report(solution))
write_plant_outputs_report(solution, filename) =
CSV.write(filename, plant_outputs_report(solution))
write_plant_emissions_report(solution, filename) =
CSV.write(filename, plant_emissions_report(solution))
write_transportation_report(solution, filename) =
CSV.write(filename, transportation_report(solution))
write_transportation_emissions_report(solution, filename) =
CSV.write(filename, transportation_emissions_report(solution))

View File

@@ -0,0 +1,38 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function plant_emissions_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."emission type" = String[]
df."emission amount (tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for (emission_name, emission_amount) in location_dict["Emissions (tonne)"]
for year = 1:T
push!(
df,
[
plant_name,
location_name,
year,
emission_name,
round(emission_amount[year], digits = 2),
],
)
end
end
end
end
return df
end
write_plant_emissions_report(solution, filename) =
CSV.write(filename, plant_emissions_report(solution))

View File

@@ -0,0 +1,66 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function plant_outputs_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."product name" = String[]
df."amount produced (tonne)" = Float64[]
df."amount sent (tonne)" = Float64[]
df."amount disposed (tonne)" = Float64[]
df."disposal cost (\$)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for (product_name, amount_produced) in location_dict["Total output"]
send_dict = location_dict["Output"]["Send"]
disposal_dict = location_dict["Output"]["Dispose"]
sent = zeros(T)
if product_name in keys(send_dict)
for (dst_plant_name, dst_plant_dict) in send_dict[product_name]
for (dst_location_name, dst_location_dict) in dst_plant_dict
sent += dst_location_dict["Amount (tonne)"]
end
end
end
sent = round.(sent, digits = 2)
disposal_amount = zeros(T)
disposal_cost = zeros(T)
if product_name in keys(disposal_dict)
disposal_amount += disposal_dict[product_name]["Amount (tonne)"]
disposal_cost += disposal_dict[product_name]["Cost (\$)"]
end
disposal_amount = round.(disposal_amount, digits = 2)
disposal_cost = round.(disposal_cost, digits = 2)
for year = 1:T
push!(
df,
[
plant_name,
location_name,
year,
product_name,
round(amount_produced[year], digits = 2),
sent[year],
disposal_amount[year],
disposal_cost[year],
],
)
end
end
end
end
return df
end
write_plant_outputs_report(solution, filename) =
CSV.write(filename, plant_outputs_report(solution))

79
src/reports/plants.jl Normal file
View File

@@ -0,0 +1,79 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function plants_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."latitude (deg)" = Float64[]
df."longitude (deg)" = Float64[]
df."capacity (tonne)" = Float64[]
df."amount processed (tonne)" = Float64[]
df."amount received (tonne)" = Float64[]
df."amount in storage (tonne)" = Float64[]
df."utilization factor (%)" = Float64[]
df."energy (GJ)" = Float64[]
df."opening cost (\$)" = Float64[]
df."expansion cost (\$)" = Float64[]
df."fixed operating cost (\$)" = Float64[]
df."variable operating cost (\$)" = Float64[]
df."storage cost (\$)" = Float64[]
df."total cost (\$)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for year = 1:T
capacity = round(location_dict["Capacity (tonne)"][year], digits = 2)
received = round(location_dict["Total input (tonne)"][year], digits = 2)
processed = round(location_dict["Process (tonne)"][year], digits = 2)
in_storage = round(location_dict["Storage (tonne)"][year], digits = 2)
utilization_factor = round(processed / capacity * 100.0, digits = 2)
energy = round(location_dict["Energy (GJ)"][year], digits = 2)
latitude = round(location_dict["Latitude (deg)"], digits = 6)
longitude = round(location_dict["Longitude (deg)"], digits = 6)
opening_cost = round(location_dict["Opening cost (\$)"][year], digits = 2)
expansion_cost =
round(location_dict["Expansion cost (\$)"][year], digits = 2)
fixed_cost =
round(location_dict["Fixed operating cost (\$)"][year], digits = 2)
var_cost =
round(location_dict["Variable operating cost (\$)"][year], digits = 2)
storage_cost = round(location_dict["Storage cost (\$)"][year], digits = 2)
total_cost = round(
opening_cost + expansion_cost + fixed_cost + var_cost + storage_cost,
digits = 2,
)
push!(
df,
[
plant_name,
location_name,
year,
latitude,
longitude,
capacity,
processed,
received,
in_storage,
utilization_factor,
energy,
opening_cost,
expansion_cost,
fixed_cost,
var_cost,
storage_cost,
total_cost,
],
)
end
end
end
return df
end
write_plants_report(solution, filename) = CSV.write(filename, plants_report(solution))

75
src/reports/tr.jl Normal file
View File

@@ -0,0 +1,75 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function transportation_report(solution)::DataFrame
df = DataFrame()
df."source type" = String[]
df."source location name" = String[]
df."source latitude (deg)" = Float64[]
df."source longitude (deg)" = Float64[]
df."destination type" = String[]
df."destination location name" = String[]
df."destination latitude (deg)" = Float64[]
df."destination longitude (deg)" = Float64[]
df."product" = String[]
df."year" = Int[]
df."distance (km)" = Float64[]
df."amount (tonne)" = Float64[]
df."amount-distance (tonne-km)" = Float64[]
df."transportation cost (\$)" = Float64[]
df."transportation energy (GJ)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
for (dst_location_name, dst_location_dict) in dst_plant_dict
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
for (src_location_name, src_location_dict) in src_plant_dict
for year = 1:T
push!(
df,
[
src_plant_name,
src_location_name,
round(src_location_dict["Latitude (deg)"], digits = 6),
round(src_location_dict["Longitude (deg)"], digits = 6),
dst_plant_name,
dst_location_name,
round(dst_location_dict["Latitude (deg)"], digits = 6),
round(dst_location_dict["Longitude (deg)"], digits = 6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits = 2),
round(
src_location_dict["Amount (tonne)"][year],
digits = 2,
),
round(
src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits = 2,
),
round(
src_location_dict["Transportation cost (\$)"][year],
digits = 2,
),
round(
src_location_dict["Transportation energy (J)"][year] /
1e9,
digits = 2,
),
],
)
end
end
end
end
end
return df
end
write_transportation_report(solution, filename) =
CSV.write(filename, transportation_report(solution))

View File

@@ -0,0 +1,71 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function transportation_emissions_report(solution)::DataFrame
df = DataFrame()
df."source type" = String[]
df."source location name" = String[]
df."source latitude (deg)" = Float64[]
df."source longitude (deg)" = Float64[]
df."destination type" = String[]
df."destination location name" = String[]
df."destination latitude (deg)" = Float64[]
df."destination longitude (deg)" = Float64[]
df."product" = String[]
df."year" = Int[]
df."distance (km)" = Float64[]
df."shipped amount (tonne)" = Float64[]
df."shipped amount-distance (tonne-km)" = Float64[]
df."emission type" = String[]
df."emission amount (tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
for (dst_location_name, dst_location_dict) in dst_plant_dict
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
for (src_location_name, src_location_dict) in src_plant_dict
for (emission_name, emission_amount) in
src_location_dict["Emissions (tonne)"]
for year = 1:T
push!(
df,
[
src_plant_name,
src_location_name,
round(src_location_dict["Latitude (deg)"], digits = 6),
round(src_location_dict["Longitude (deg)"], digits = 6),
dst_plant_name,
dst_location_name,
round(dst_location_dict["Latitude (deg)"], digits = 6),
round(dst_location_dict["Longitude (deg)"], digits = 6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits = 2),
round(
src_location_dict["Amount (tonne)"][year],
digits = 2,
),
round(
src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits = 2,
),
emission_name,
round(emission_amount[year], digits = 2),
],
)
end
end
end
end
end
end
return df
end
write_transportation_emissions_report(solution, filename) =
CSV.write(filename, transportation_emissions_report(solution))

13
src/reports/write.jl Normal file
View File

@@ -0,0 +1,13 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function write(solution::AbstractDict, filename::AbstractString)
@info "Writing solution: $filename"
open(filename, "w") do file
JSON.print(file, solution, 2)
end
end

View File

@@ -9,14 +9,7 @@ using JuMP
using MathOptInterface
using ProgressBars
pkg = [:Cbc,
:Clp,
:Geodesy,
:JSON,
:JSONSchema,
:JuMP,
:MathOptInterface,
:ProgressBars]
pkg = [:Cbc, :Clp, :Geodesy, :JSON, :JSONSchema, :JuMP, :MathOptInterface, :ProgressBars]
@info "Building system image..."
create_sysimage(pkg, sysimage_path="build/sysimage.so")
create_sysimage(pkg, sysimage_path = "build/sysimage.so")

View File

@@ -1,7 +0,0 @@
plant type,location name,year,emission type,emission amount (tonne)
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,CO2,40711.3
Rare Earth Recycling Plant,"Stanly, North Carolina",1,CO2,23336.47
Rare Earth Recycling Plant,"Lynn, Texas",1,CO2,52927.44
Mega Plant,"Sebastian, Arkansas",1,CO2,110818.84
Mega Plant,"District of Columbia, District of Columbia",1,CO2,63523.43
Mega Plant,"Maricopa, Arizona",1,CO2,144072.0
1 plant type location name year emission type emission amount (tonne)
2 Rare Earth Recycling Plant Sebastian, Arkansas 1 CO2 40711.3
3 Rare Earth Recycling Plant Stanly, North Carolina 1 CO2 23336.47
4 Rare Earth Recycling Plant Lynn, Texas 1 CO2 52927.44
5 Mega Plant Sebastian, Arkansas 1 CO2 110818.84
6 Mega Plant District of Columbia, District of Columbia 1 CO2 63523.43
7 Mega Plant Maricopa, Arizona 1 CO2 144072.0

View File

@@ -1,37 +0,0 @@
plant type,location name,year,product name,amount produced (tonne),amount sent (tonne),amount disposed (tonne),disposal cost ($)
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,rare earth cela,18045.12,0.0,18045.12,0.0
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,rare earth diddy,7624.02,0.0,7624.02,0.0
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,salt,6434.8,0.0,6434.8,324314.03
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,rare earth misch,2188.78,0.0,2188.78,0.0
Rare Earth Recycling Plant,"Stanly, North Carolina",1,rare earth cela,10343.8,0.0,10343.8,0.0
Rare Earth Recycling Plant,"Stanly, North Carolina",1,rare earth diddy,4370.23,0.0,4370.23,0.0
Rare Earth Recycling Plant,"Stanly, North Carolina",1,salt,3688.55,0.0,3688.55,185902.85
Rare Earth Recycling Plant,"Stanly, North Carolina",1,rare earth misch,1254.65,0.0,1254.65,0.0
Rare Earth Recycling Plant,"Lynn, Texas",1,rare earth cela,23459.88,0.0,23459.88,0.0
Rare Earth Recycling Plant,"Lynn, Texas",1,rare earth diddy,9911.74,0.0,9911.74,0.0
Rare Earth Recycling Plant,"Lynn, Texas",1,salt,8365.68,0.0,8365.68,421630.2
Rare Earth Recycling Plant,"Lynn, Texas",1,rare earth misch,2845.56,0.0,2845.56,0.0
Mega Plant,"Sebastian, Arkansas",1,iron-nickel scrap,35656.28,0.0,35656.28,0.0
Mega Plant,"Sebastian, Arkansas",1,mixed-hydroxides,73141.86,0.0,73141.86,0.0
Mega Plant,"Sebastian, Arkansas",1,leach residue,31470.35,0.0,31470.35,6.38848022e6
Mega Plant,"Sebastian, Arkansas",1,plastic pack,96503.37,0.0,96503.37,4.86376966e6
Mega Plant,"Sebastian, Arkansas",1,salt,285304.6,0.0,285304.6,1.437935192e7
Mega Plant,"Sebastian, Arkansas",1,rare earth mix,44145.84,44145.84,0.0,0.0
Mega Plant,"Sebastian, Arkansas",1,nickel-iron scrap,178655.26,0.0,178655.26,0.0
Mega Plant,"Sebastian, Arkansas",1,nickel,37931.36,0.0,37931.36,0.0
Mega Plant,"District of Columbia, District of Columbia",1,iron-nickel scrap,20438.84,0.0,20438.84,0.0
Mega Plant,"District of Columbia, District of Columbia",1,mixed-hydroxides,41926.28,0.0,41926.28,0.0
Mega Plant,"District of Columbia, District of Columbia",1,leach residue,18039.39,0.0,18039.39,3.66199595e6
Mega Plant,"District of Columbia, District of Columbia",1,plastic pack,55317.53,0.0,55317.53,2.78800343e6
Mega Plant,"District of Columbia, District of Columbia",1,salt,163541.92,0.0,163541.92,8.24251257e6
Mega Plant,"District of Columbia, District of Columbia",1,rare earth mix,25305.22,25305.22,0.0,0.0
Mega Plant,"District of Columbia, District of Columbia",1,nickel-iron scrap,102408.52,0.0,102408.52,0.0
Mega Plant,"District of Columbia, District of Columbia",1,nickel,21742.96,0.0,21742.96,0.0
Mega Plant,"Maricopa, Arizona",1,iron-nickel scrap,46355.57,0.0,46355.57,0.0
Mega Plant,"Maricopa, Arizona",1,mixed-hydroxides,95089.37,0.0,95089.37,0.0
Mega Plant,"Maricopa, Arizona",1,leach residue,40913.58,0.0,40913.58,8.30545698e6
Mega Plant,"Maricopa, Arizona",1,plastic pack,125460.91,0.0,125460.91,6.32322998e6
Mega Plant,"Maricopa, Arizona",1,salt,370915.31,0.0,370915.31,1.869413141e7
Mega Plant,"Maricopa, Arizona",1,rare earth mix,57392.58,57392.58,0.0,0.0
Mega Plant,"Maricopa, Arizona",1,nickel-iron scrap,232263.93,0.0,232263.93,0.0
Mega Plant,"Maricopa, Arizona",1,nickel,49313.34,0.0,49313.34,0.0
1 plant type location name year product name amount produced (tonne) amount sent (tonne) amount disposed (tonne) disposal cost ($)
2 Rare Earth Recycling Plant Sebastian, Arkansas 1 rare earth cela 18045.12 0.0 18045.12 0.0
3 Rare Earth Recycling Plant Sebastian, Arkansas 1 rare earth diddy 7624.02 0.0 7624.02 0.0
4 Rare Earth Recycling Plant Sebastian, Arkansas 1 salt 6434.8 0.0 6434.8 324314.03
5 Rare Earth Recycling Plant Sebastian, Arkansas 1 rare earth misch 2188.78 0.0 2188.78 0.0
6 Rare Earth Recycling Plant Stanly, North Carolina 1 rare earth cela 10343.8 0.0 10343.8 0.0
7 Rare Earth Recycling Plant Stanly, North Carolina 1 rare earth diddy 4370.23 0.0 4370.23 0.0
8 Rare Earth Recycling Plant Stanly, North Carolina 1 salt 3688.55 0.0 3688.55 185902.85
9 Rare Earth Recycling Plant Stanly, North Carolina 1 rare earth misch 1254.65 0.0 1254.65 0.0
10 Rare Earth Recycling Plant Lynn, Texas 1 rare earth cela 23459.88 0.0 23459.88 0.0
11 Rare Earth Recycling Plant Lynn, Texas 1 rare earth diddy 9911.74 0.0 9911.74 0.0
12 Rare Earth Recycling Plant Lynn, Texas 1 salt 8365.68 0.0 8365.68 421630.2
13 Rare Earth Recycling Plant Lynn, Texas 1 rare earth misch 2845.56 0.0 2845.56 0.0
14 Mega Plant Sebastian, Arkansas 1 iron-nickel scrap 35656.28 0.0 35656.28 0.0
15 Mega Plant Sebastian, Arkansas 1 mixed-hydroxides 73141.86 0.0 73141.86 0.0
16 Mega Plant Sebastian, Arkansas 1 leach residue 31470.35 0.0 31470.35 6.38848022e6
17 Mega Plant Sebastian, Arkansas 1 plastic pack 96503.37 0.0 96503.37 4.86376966e6
18 Mega Plant Sebastian, Arkansas 1 salt 285304.6 0.0 285304.6 1.437935192e7
19 Mega Plant Sebastian, Arkansas 1 rare earth mix 44145.84 44145.84 0.0 0.0
20 Mega Plant Sebastian, Arkansas 1 nickel-iron scrap 178655.26 0.0 178655.26 0.0
21 Mega Plant Sebastian, Arkansas 1 nickel 37931.36 0.0 37931.36 0.0
22 Mega Plant District of Columbia, District of Columbia 1 iron-nickel scrap 20438.84 0.0 20438.84 0.0
23 Mega Plant District of Columbia, District of Columbia 1 mixed-hydroxides 41926.28 0.0 41926.28 0.0
24 Mega Plant District of Columbia, District of Columbia 1 leach residue 18039.39 0.0 18039.39 3.66199595e6
25 Mega Plant District of Columbia, District of Columbia 1 plastic pack 55317.53 0.0 55317.53 2.78800343e6
26 Mega Plant District of Columbia, District of Columbia 1 salt 163541.92 0.0 163541.92 8.24251257e6
27 Mega Plant District of Columbia, District of Columbia 1 rare earth mix 25305.22 25305.22 0.0 0.0
28 Mega Plant District of Columbia, District of Columbia 1 nickel-iron scrap 102408.52 0.0 102408.52 0.0
29 Mega Plant District of Columbia, District of Columbia 1 nickel 21742.96 0.0 21742.96 0.0
30 Mega Plant Maricopa, Arizona 1 iron-nickel scrap 46355.57 0.0 46355.57 0.0
31 Mega Plant Maricopa, Arizona 1 mixed-hydroxides 95089.37 0.0 95089.37 0.0
32 Mega Plant Maricopa, Arizona 1 leach residue 40913.58 0.0 40913.58 8.30545698e6
33 Mega Plant Maricopa, Arizona 1 plastic pack 125460.91 0.0 125460.91 6.32322998e6
34 Mega Plant Maricopa, Arizona 1 salt 370915.31 0.0 370915.31 1.869413141e7
35 Mega Plant Maricopa, Arizona 1 rare earth mix 57392.58 57392.58 0.0 0.0
36 Mega Plant Maricopa, Arizona 1 nickel-iron scrap 232263.93 0.0 232263.93 0.0
37 Mega Plant Maricopa, Arizona 1 nickel 49313.34 0.0 49313.34 0.0

View File

@@ -1,7 +0,0 @@
plant type,location name,year,latitude (deg),longitude (deg),capacity (tonne),amount processed (tonne),utilization factor (%),energy (GJ),opening cost ($),expansion cost ($),fixed operating cost ($),variable operating cost ($),total cost ($)
Rare Earth Recycling Plant,"Sebastian, Arkansas",1,35.23416,-94.212943,44145.84,44145.84,100.0,1.13360359e6,6.9926855e6,1.793555439e7,1.677420707e7,1.0080261442e8,1.4250506138e8
Rare Earth Recycling Plant,"Stanly, North Carolina",1,35.334445,-80.223231,25305.22,25305.22,100.0,649802.71,7.1653444e6,9.98954108e6,1.126536154e7,5.778193764e7,8.620218466e7
Rare Earth Recycling Plant,"Lynn, Texas",1,33.166444,-101.793455,57392.58,57392.58,100.0,1.47376145e6,7.4243328e6,2.5154042e7,2.06474474e7,1.310502261e8,1.842760483e8
Mega Plant,"Sebastian, Arkansas",1,35.23416,-94.212943,553817.3,553817.3,100.0,3.08574408e6,1.6858178e7,4.012879371e7,3.834652057e7,4.2898688058e8,5.2432037286e8
Mega Plant,"District of Columbia, District of Columbia",1,38.930028,-76.974164,317458.4,317458.4,100.0,1.76880602e6,2.12288167e7,2.746685387e7,2.602109584e7,2.4590327664e8,3.2062004305e8
Mega Plant,"Maricopa, Arizona",1,33.647365,-111.893669,720000.0,720000.0,100.0,4.0116763e6,2.10206911e7,6.60955172e7,4.70124619e7,5.57712e8,6.918406702e8
1 plant type location name year latitude (deg) longitude (deg) capacity (tonne) amount processed (tonne) utilization factor (%) energy (GJ) opening cost ($) expansion cost ($) fixed operating cost ($) variable operating cost ($) total cost ($)
2 Rare Earth Recycling Plant Sebastian, Arkansas 1 35.23416 -94.212943 44145.84 44145.84 100.0 1.13360359e6 6.9926855e6 1.793555439e7 1.677420707e7 1.0080261442e8 1.4250506138e8
3 Rare Earth Recycling Plant Stanly, North Carolina 1 35.334445 -80.223231 25305.22 25305.22 100.0 649802.71 7.1653444e6 9.98954108e6 1.126536154e7 5.778193764e7 8.620218466e7
4 Rare Earth Recycling Plant Lynn, Texas 1 33.166444 -101.793455 57392.58 57392.58 100.0 1.47376145e6 7.4243328e6 2.5154042e7 2.06474474e7 1.310502261e8 1.842760483e8
5 Mega Plant Sebastian, Arkansas 1 35.23416 -94.212943 553817.3 553817.3 100.0 3.08574408e6 1.6858178e7 4.012879371e7 3.834652057e7 4.2898688058e8 5.2432037286e8
6 Mega Plant District of Columbia, District of Columbia 1 38.930028 -76.974164 317458.4 317458.4 100.0 1.76880602e6 2.12288167e7 2.746685387e7 2.602109584e7 2.4590327664e8 3.2062004305e8
7 Mega Plant Maricopa, Arizona 1 33.647365 -111.893669 720000.0 720000.0 100.0 4.0116763e6 2.10206911e7 6.60955172e7 4.70124619e7 5.57712e8 6.918406702e8

Binary file not shown.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

39
test/graph/build_test.jl Normal file
View File

@@ -0,0 +1,39 @@
# Copyright (C) 2020 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using RELOG
@testset "build_graph" begin
basedir = dirname(@__FILE__)
instance = RELOG.parsefile("$basedir/../../instances/s1.json")
graph = RELOG.build_graph(instance)
process_node_by_location_name =
Dict(n.location.location_name => n for n in graph.process_nodes)
@test length(graph.plant_shipping_nodes) == 8
@test length(graph.collection_shipping_nodes) == 10
@test length(graph.process_nodes) == 6
node = graph.collection_shipping_nodes[1]
@test node.location.name == "C1"
@test length(node.incoming_arcs) == 0
@test length(node.outgoing_arcs) == 2
@test node.outgoing_arcs[1].source.location.name == "C1"
@test node.outgoing_arcs[1].dest.location.plant_name == "F1"
@test node.outgoing_arcs[1].dest.location.location_name == "L1"
@test node.outgoing_arcs[1].values["distance"] == 1095.62
node = process_node_by_location_name["L1"]
@test node.location.plant_name == "F1"
@test node.location.location_name == "L1"
@test length(node.incoming_arcs) == 10
@test length(node.outgoing_arcs) == 2
node = process_node_by_location_name["L3"]
@test node.location.plant_name == "F2"
@test node.location.location_name == "L3"
@test length(node.incoming_arcs) == 2
@test length(node.outgoing_arcs) == 2
@test length(graph.arcs) == 38
end

View File

@@ -1,42 +0,0 @@
# Copyright (C) 2020 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using RELOG
@testset "Graph" begin
@testset "build_graph" begin
basedir = dirname(@__FILE__)
instance = RELOG.parsefile("$basedir/../instances/s1.json")
graph = RELOG.build_graph(instance)
process_node_by_location_name = Dict(n.location.location_name => n
for n in graph.process_nodes)
@test length(graph.plant_shipping_nodes) == 8
@test length(graph.collection_shipping_nodes) == 10
@test length(graph.process_nodes) == 6
node = graph.collection_shipping_nodes[1]
@test node.location.name == "C1"
@test length(node.incoming_arcs) == 0
@test length(node.outgoing_arcs) == 2
@test node.outgoing_arcs[1].source.location.name == "C1"
@test node.outgoing_arcs[1].dest.location.plant_name == "F1"
@test node.outgoing_arcs[1].dest.location.location_name == "L1"
@test node.outgoing_arcs[1].values["distance"] == 1095.62
node = process_node_by_location_name["L1"]
@test node.location.plant_name == "F1"
@test node.location.location_name == "L1"
@test length(node.incoming_arcs) == 10
@test length(node.outgoing_arcs) == 2
node = process_node_by_location_name["L3"]
@test node.location.plant_name == "F2"
@test node.location.location_name == "L3"
@test length(node.incoming_arcs) == 2
@test length(node.outgoing_arcs) == 2
@test length(graph.arcs) == 38
end
end

View File

@@ -0,0 +1,53 @@
# Copyright (C) 2020 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using RELOG
@testset "compress" begin
basedir = dirname(@__FILE__)
instance = RELOG.parsefile("$basedir/../../instances/s1.json")
compressed = RELOG._compress(instance)
product_name_to_product = Dict(p.name => p for p in compressed.products)
location_name_to_facility = Dict()
for p in compressed.plants
location_name_to_facility[p.location_name] = p
end
for c in compressed.collection_centers
location_name_to_facility[c.name] = c
end
p1 = product_name_to_product["P1"]
p2 = product_name_to_product["P2"]
p3 = product_name_to_product["P3"]
c1 = location_name_to_facility["C1"]
l1 = location_name_to_facility["L1"]
@test compressed.time == 1
@test compressed.building_period == [1]
@test p1.name == "P1"
@test p1.transportation_cost [0.015]
@test p1.transportation_energy [0.115]
@test p1.transportation_emissions["CO2"] [0.051]
@test p1.transportation_emissions["CH4"] [0.0025]
@test c1.name == "C1"
@test c1.amount [1869.12]
@test l1.plant_name == "F1"
@test l1.location_name == "L1"
@test l1.energy [0.115]
@test l1.emissions["CO2"] [0.051]
@test l1.emissions["CH4"] [0.0025]
@test l1.sizes[1].opening_cost [500]
@test l1.sizes[2].opening_cost [1250]
@test l1.sizes[1].fixed_operating_cost [60]
@test l1.sizes[2].fixed_operating_cost [60]
@test l1.sizes[1].variable_operating_cost [30]
@test l1.sizes[2].variable_operating_cost [30]
@test l1.disposal_limit[p2] [2.0]
@test l1.disposal_limit[p3] [2.0]
@test l1.disposal_cost[p2] [-10.0]
@test l1.disposal_cost[p3] [-10.0]
end

View File

@@ -0,0 +1,76 @@
# Copyright (C) 2020 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using RELOG
@testset "parse" begin
basedir = dirname(@__FILE__)
instance = RELOG.parsefile("$basedir/../../instances/s1.json")
centers = instance.collection_centers
plants = instance.plants
products = instance.products
location_name_to_plant = Dict(p.location_name => p for p in plants)
product_name_to_product = Dict(p.name => p for p in products)
@test length(centers) == 10
@test centers[1].name == "C1"
@test centers[1].latitude == 7
@test centers[1].latitude == 7
@test centers[1].longitude == 7
@test centers[1].amount == [934.56, 934.56]
@test centers[1].product.name == "P1"
@test length(plants) == 6
plant = location_name_to_plant["L1"]
@test plant.plant_name == "F1"
@test plant.location_name == "L1"
@test plant.input.name == "P1"
@test plant.latitude == 0
@test plant.longitude == 0
@test length(plant.sizes) == 2
@test plant.sizes[1].capacity == 250
@test plant.sizes[1].opening_cost == [500, 500]
@test plant.sizes[1].fixed_operating_cost == [30, 30]
@test plant.sizes[1].variable_operating_cost == [30, 30]
@test plant.sizes[2].capacity == 1000
@test plant.sizes[2].opening_cost == [1250, 1250]
@test plant.sizes[2].fixed_operating_cost == [30, 30]
@test plant.sizes[2].variable_operating_cost == [30, 30]
p2 = product_name_to_product["P2"]
p3 = product_name_to_product["P3"]
@test length(plant.output) == 2
@test plant.output[p2] == 0.2
@test plant.output[p3] == 0.5
@test plant.disposal_limit[p2] == [1, 1]
@test plant.disposal_limit[p3] == [1, 1]
@test plant.disposal_cost[p2] == [-10, -10]
@test plant.disposal_cost[p3] == [-10, -10]
plant = location_name_to_plant["L3"]
@test plant.location_name == "L3"
@test plant.input.name == "P2"
@test plant.latitude == 25
@test plant.longitude == 65
@test length(plant.sizes) == 2
@test plant.sizes[1].capacity == 1000.0
@test plant.sizes[1].opening_cost == [3000, 3000]
@test plant.sizes[1].fixed_operating_cost == [50, 50]
@test plant.sizes[1].variable_operating_cost == [50, 50]
@test plant.sizes[1] == plant.sizes[2]
p4 = product_name_to_product["P4"]
@test plant.output[p3] == 0.05
@test plant.output[p4] == 0.8
@test plant.disposal_limit[p3] == [1e8, 1e8]
@test plant.disposal_limit[p4] == [0, 0]
end
@testset "parse (invalid)" begin
basedir = dirname(@__FILE__)
@test_throws String RELOG.parsefile("$basedir/../fixtures/s1-wrong-length.json")
end

View File

@@ -1,127 +0,0 @@
# Copyright (C) 2020 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using RELOG
@testset "Instance" begin
@testset "load" begin
basedir = dirname(@__FILE__)
instance = RELOG.parsefile("$basedir/../instances/s1.json")
centers = instance.collection_centers
plants = instance.plants
products = instance.products
location_name_to_plant = Dict(p.location_name => p for p in plants)
product_name_to_product = Dict(p.name => p for p in products)
@test length(centers) == 10
@test centers[1].name == "C1"
@test centers[1].latitude == 7
@test centers[1].latitude == 7
@test centers[1].longitude == 7
@test centers[1].amount == [934.56, 934.56]
@test centers[1].product.name == "P1"
@test length(plants) == 6
plant = location_name_to_plant["L1"]
@test plant.plant_name == "F1"
@test plant.location_name == "L1"
@test plant.input.name == "P1"
@test plant.latitude == 0
@test plant.longitude == 0
@test length(plant.sizes) == 2
@test plant.sizes[1].capacity == 250
@test plant.sizes[1].opening_cost == [500, 500]
@test plant.sizes[1].fixed_operating_cost == [30, 30]
@test plant.sizes[1].variable_operating_cost == [30, 30]
@test plant.sizes[2].capacity == 1000
@test plant.sizes[2].opening_cost == [1250, 1250]
@test plant.sizes[2].fixed_operating_cost == [30, 30]
@test plant.sizes[2].variable_operating_cost == [30, 30]
p2 = product_name_to_product["P2"]
p3 = product_name_to_product["P3"]
@test length(plant.output) == 2
@test plant.output[p2] == 0.2
@test plant.output[p3] == 0.5
@test plant.disposal_limit[p2] == [1, 1]
@test plant.disposal_limit[p3] == [1, 1]
@test plant.disposal_cost[p2] == [-10, -10]
@test plant.disposal_cost[p3] == [-10, -10]
plant = location_name_to_plant["L3"]
@test plant.location_name == "L3"
@test plant.input.name == "P2"
@test plant.latitude == 25
@test plant.longitude == 65
@test length(plant.sizes) == 2
@test plant.sizes[1].capacity == 1000.0
@test plant.sizes[1].opening_cost == [3000, 3000]
@test plant.sizes[1].fixed_operating_cost == [50, 50]
@test plant.sizes[1].variable_operating_cost == [50, 50]
@test plant.sizes[1] == plant.sizes[2]
p4 = product_name_to_product["P4"]
@test plant.output[p3] == 0.05
@test plant.output[p4] == 0.8
@test plant.disposal_limit[p3] == [1e8, 1e8]
@test plant.disposal_limit[p4] == [0, 0]
end
@testset "validate timeseries" begin
@test_throws String RELOG.parsefile("fixtures/s1-wrong-length.json")
end
@testset "compress" begin
basedir = dirname(@__FILE__)
instance = RELOG.parsefile("$basedir/../instances/s1.json")
compressed = RELOG._compress(instance)
product_name_to_product = Dict(p.name => p for p in compressed.products)
location_name_to_facility = Dict()
for p in compressed.plants
location_name_to_facility[p.location_name] = p
end
for c in compressed.collection_centers
location_name_to_facility[c.name] = c
end
p1 = product_name_to_product["P1"]
p2 = product_name_to_product["P2"]
p3 = product_name_to_product["P3"]
c1 = location_name_to_facility["C1"]
l1 = location_name_to_facility["L1"]
@test compressed.time == 1
@test compressed.building_period == [1]
@test p1.name == "P1"
@test p1.transportation_cost [0.015]
@test p1.transportation_energy [0.115]
@test p1.transportation_emissions["CO2"] [0.051]
@test p1.transportation_emissions["CH4"] [0.0025]
@test c1.name == "C1"
@test c1.amount [1869.12]
@test l1.plant_name == "F1"
@test l1.location_name == "L1"
@test l1.energy [0.115]
@test l1.emissions["CO2"] [0.051]
@test l1.emissions["CH4"] [0.0025]
@test l1.sizes[1].opening_cost [500]
@test l1.sizes[2].opening_cost [1250]
@test l1.sizes[1].fixed_operating_cost [60]
@test l1.sizes[2].fixed_operating_cost [60]
@test l1.sizes[1].variable_operating_cost [30]
@test l1.sizes[2].variable_operating_cost [30]
@test l1.disposal_limit[p2] [2.0]
@test l1.disposal_limit[p3] [2.0]
@test l1.disposal_cost[p2] [-10.0]
@test l1.disposal_cost[p3] [-10.0]
end
end

38
test/model/build_test.jl Normal file
View File

@@ -0,0 +1,38 @@
# Copyright (C) 2020 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using RELOG, Cbc, JuMP, Printf, JSON, MathOptInterface.FileFormats
@testset "build" begin
basedir = dirname(@__FILE__)
instance = RELOG.parsefile("$basedir/../../instances/s1.json")
graph = RELOG.build_graph(instance)
model = RELOG.build_model(instance, graph, Cbc.Optimizer)
set_optimizer_attribute(model, "logLevel", 0)
process_node_by_location_name =
Dict(n.location.location_name => n for n in graph.process_nodes)
shipping_node_by_loc_and_prod_names = Dict(
(n.location.location_name, n.product.name) => n for n in graph.plant_shipping_nodes
)
@test length(model[:flow]) == 76
@test length(model[:dispose]) == 16
@test length(model[:open_plant]) == 12
@test length(model[:capacity]) == 12
@test length(model[:expansion]) == 12
l1 = process_node_by_location_name["L1"]
v = model[:capacity][l1, 1]
@test lower_bound(v) == 0.0
@test upper_bound(v) == 1000.0
v = model[:expansion][l1, 1]
@test lower_bound(v) == 0.0
@test upper_bound(v) == 750.0
v = model[:dispose][shipping_node_by_loc_and_prod_names["L1", "P2"], 1]
@test lower_bound(v) == 0.0
@test upper_bound(v) == 1.0
end

61
test/model/solve_test.jl Normal file
View File

@@ -0,0 +1,61 @@
# Copyright (C) 2020 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using RELOG, Cbc, JuMP, Printf, JSON, MathOptInterface.FileFormats
basedir = dirname(@__FILE__)
@testset "solve (exact)" begin
solution_filename_a = tempname()
solution_filename_b = tempname()
solution = RELOG.solve("$basedir/../../instances/s1.json", output = solution_filename_a)
@test isfile(solution_filename_a)
RELOG.write(solution, solution_filename_b)
@test isfile(solution_filename_b)
@test "Costs" in keys(solution)
@test "Fixed operating (\$)" in keys(solution["Costs"])
@test "Transportation (\$)" in keys(solution["Costs"])
@test "Variable operating (\$)" in keys(solution["Costs"])
@test "Total (\$)" in keys(solution["Costs"])
@test "Plants" in keys(solution)
@test "F1" in keys(solution["Plants"])
@test "F2" in keys(solution["Plants"])
@test "F3" in keys(solution["Plants"])
@test "F4" in keys(solution["Plants"])
end
@testset "solve (heuristic)" begin
# Should not crash
solution = RELOG.solve("$basedir/../../instances/s1.json", heuristic = true)
end
@testset "solve (infeasible)" begin
json = JSON.parsefile("$basedir/../../instances/s1.json")
for (location_name, location_dict) in json["products"]["P1"]["initial amounts"]
location_dict["amount (tonne)"] *= 1000
end
RELOG.solve(RELOG.parse(json))
end
@testset "solve (with storage)" begin
basedir = dirname(@__FILE__)
filename = "$basedir/../fixtures/storage.json"
instance = RELOG.parsefile(filename)
@test instance.plants[1].storage_limit == 50.0
@test instance.plants[1].storage_cost == [2.0, 1.5, 1.0]
solution = RELOG.solve(filename)
plant_dict = solution["Plants"]["mega plant"]["Chicago"]
@test plant_dict["Variable operating cost (\$)"] == [500.0, 0.0, 100.0]
@test plant_dict["Process (tonne)"] == [50.0, 0.0, 50.0]
@test plant_dict["Storage (tonne)"] == [50.0, 50.0, 0.0]
@test plant_dict["Storage cost (\$)"] == [100.0, 75.0, 0.0]
@test solution["Costs"]["Variable operating (\$)"] == [500.0, 0.0, 100.0]
@test solution["Costs"]["Storage (\$)"] == [100.0, 75.0, 0.0]
@test solution["Costs"]["Total (\$)"] == [600.0, 75.0, 100.0]
end

View File

@@ -1,100 +0,0 @@
# Copyright (C) 2020 Argonne National Laboratory
# Written by Alinson Santos Xavier <axavier@anl.gov>
using RELOG, Cbc, JuMP, Printf, JSON, MathOptInterface.FileFormats
@testset "Model" begin
@testset "build" begin
basedir = dirname(@__FILE__)
instance = RELOG.parsefile("$basedir/../instances/s1.json")
graph = RELOG.build_graph(instance)
model = RELOG.build_model(instance, graph, Cbc.Optimizer)
set_optimizer_attribute(model.mip, "logLevel", 0)
process_node_by_location_name = Dict(n.location.location_name => n
for n in graph.process_nodes)
shipping_node_by_location_and_product_names = Dict((n.location.location_name, n.product.name) => n
for n in graph.plant_shipping_nodes)
@test length(model.vars.flow) == 76
@test length(model.vars.dispose) == 16
@test length(model.vars.open_plant) == 12
@test length(model.vars.capacity) == 12
@test length(model.vars.expansion) == 12
l1 = process_node_by_location_name["L1"]
v = model.vars.capacity[l1, 1]
@test lower_bound(v) == 0.0
@test upper_bound(v) == 1000.0
v = model.vars.expansion[l1, 1]
@test lower_bound(v) == 0.0
@test upper_bound(v) == 750.0
v = model.vars.dispose[shipping_node_by_location_and_product_names["L1", "P2"], 1]
@test lower_bound(v) == 0.0
@test upper_bound(v) == 1.0
# dest = FileFormats.Model(format = FileFormats.FORMAT_LP)
# MOI.copy_to(dest, model.mip)
# MOI.write_to_file(dest, "model.lp")
end
@testset "solve (exact)" begin
solution_filename_a = tempname()
solution_filename_b = tempname()
solution = RELOG.solve("$(pwd())/../instances/s1.json",
output=solution_filename_a)
@test isfile(solution_filename_a)
RELOG.write(solution, solution_filename_b)
@test isfile(solution_filename_b)
@test "Costs" in keys(solution)
@test "Fixed operating (\$)" in keys(solution["Costs"])
@test "Transportation (\$)" in keys(solution["Costs"])
@test "Variable operating (\$)" in keys(solution["Costs"])
@test "Total (\$)" in keys(solution["Costs"])
@test "Plants" in keys(solution)
@test "F1" in keys(solution["Plants"])
@test "F2" in keys(solution["Plants"])
@test "F3" in keys(solution["Plants"])
@test "F4" in keys(solution["Plants"])
end
@testset "solve (heuristic)" begin
# Should not crash
solution = RELOG.solve("$(pwd())/../instances/s1.json", heuristic=true)
end
@testset "infeasible solve" begin
json = JSON.parsefile("$(pwd())/../instances/s1.json")
for (location_name, location_dict) in json["products"]["P1"]["initial amounts"]
location_dict["amount (tonne)"] *= 1000
end
RELOG.solve(RELOG.parse(json))
end
@testset "storage" begin
basedir = dirname(@__FILE__)
filename = "$basedir/fixtures/storage.json"
instance = RELOG.parsefile(filename)
@test instance.plants[1].storage_limit == 50.0
@test instance.plants[1].storage_cost == [2.0, 1.5, 1.0]
solution = RELOG.solve(filename)
plant_dict = solution["Plants"]["mega plant"]["Chicago"]
@test plant_dict["Variable operating cost (\$)"] == [500.0, 0.0, 100.0]
@test plant_dict["Process (tonne)"] == [50.0, 0.0, 50.0]
@test plant_dict["Storage (tonne)"] == [50.0, 50.0, 0.0]
@test plant_dict["Storage cost (\$)"] == [100.0, 75.0, 0.0]
@test solution["Costs"]["Variable operating (\$)"] == [500.0, 0.0, 100.0]
@test solution["Costs"]["Storage (\$)"] == [100.0, 75.0, 0.0]
@test solution["Costs"]["Total (\$)"] == [600.0, 75.0, 100.0]
end
end

View File

@@ -4,30 +4,7 @@
using RELOG, JSON, GZip
load_json_gz(filename) = JSON.parse(GZip.gzopen(filename))
# function check(func, expected_csv_filename::String)
# solution = load_json_gz("fixtures/nimh_solution.json.gz")
# actual_csv_filename = tempname()
# func(solution, actual_csv_filename)
# @test isfile(actual_csv_filename)
# if readlines(actual_csv_filename) != readlines(expected_csv_filename)
# out_filename = replace(expected_csv_filename, ".csv" => "_actual.csv")
# @error "$func: Unexpected CSV contents: $out_filename"
# write(out_filename, read(actual_csv_filename))
# @test false
# end
# end
@testset "Reports" begin
# @testset "from fixture" begin
# check(RELOG.write_plants_report, "fixtures/nimh_plants.csv")
# check(RELOG.write_plant_outputs_report, "fixtures/nimh_plant_outputs.csv")
# check(RELOG.write_plant_emissions_report, "fixtures/nimh_plant_emissions.csv")
# check(RELOG.write_transportation_report, "fixtures/nimh_transportation.csv")
# check(RELOG.write_transportation_emissions_report, "fixtures/nimh_transportation_emissions.csv")
# end
@testset "from solve" begin
solution = RELOG.solve("$(pwd())/../instances/s1.json")
tmp_filename = tempname()

View File

@@ -4,8 +4,16 @@
using Test
@testset "RELOG" begin
include("instance_test.jl")
include("graph_test.jl")
include("model_test.jl")
@testset "Instance" begin
include("instance/compress_test.jl")
include("instance/parse_test.jl")
end
@testset "Graph" begin
include("graph/build_test.jl")
end
@testset "Model" begin
include("model/build_test.jl")
include("model/solve_test.jl")
end
include("reports_test.jl")
end