80 Commits

Author SHA1 Message Date
7bce105428 Fix formatting 2023-02-15 13:47:00 -06:00
1aa01b7b2b Merge branch 'master' into relog-web 2023-02-15 13:27:47 -06:00
e86ae0f818 Add RELOG.version() 2023-02-15 13:22:47 -06:00
22d73c9ded Move tests to a separate package; update GitHub CI and docs 2023-02-15 12:32:29 -06:00
a8e4491ea3 Merge branch 'master' into feature/collection-disposal 2023-02-15 11:01:10 -06:00
50d53f628f Reformat source code 2023-01-24 10:31:40 -06:00
79748e3c13 Dist: Drop NaN in training dataset 2023-01-24 10:27:41 -06:00
d1f6796c96 Update CHANGELOG.md 2022-12-16 15:33:16 -06:00
51ff8eb130 Restrict NearestNeighbors version; remove debug statement 2022-12-15 11:03:44 -06:00
9191474df8 Bump version to 0.6 2022-12-15 10:36:44 -06:00
841fbf16fb Make distance metric configurable; fix building period bug 2022-12-15 10:26:10 -06:00
48bd3c403f Switch from Euclidean to approximate driving distance 2022-12-15 09:49:38 -06:00
23b3b33146 Update README.md 2022-10-28 14:05:53 -05:00
86dee7558b Replace Cbc/Clp by HiGHS 2022-09-08 12:14:49 -05:00
d84b74a8a7 relog-web: Make time limit configurable 2022-09-08 11:35:11 -05:00
bae39a4ff4 Merge tag 'v0.5.2' into feature/gui
[Diff since v0.5.1](https://github.com/ANL-CEEESA/RELOG/compare/v0.5.1...v0.5.2)
2022-08-26 14:34:17 -05:00
da158eb961 Update CHANGELOG 2022-08-26 13:26:15 -05:00
e7eec937cb Update README.md 2022-08-26 13:21:42 -05:00
19bec961bd GH Actions: Update Julia versions 2022-08-26 13:12:20 -05:00
8f52c04702 Fix broken image link 2022-08-26 13:08:22 -05:00
19a34fb5d2 Update dependencies; switch to Documenter.jl 2022-08-26 13:04:47 -05:00
8bf2baf809 Enable marginal costs 2022-08-22 11:13:43 -05:00
027ffcd94c Update gitignore 2022-08-22 11:12:29 -05:00
84cf4ddcd9 Casebuilder: Add MapBlock; fix zip paths 2022-06-10 16:53:32 -05:00
8bce7c047b Case Builder: Disable buttons on submit 2022-06-09 16:49:48 -05:00
ee767b9ebd Create solver page; add Dockerfile 2022-06-06 13:46:22 -05:00
9112d9fde5 casebuilder: Validate JSON schema on load/save 2022-05-27 14:36:54 -05:00
3d03dfc722 Fix plant input 2022-04-01 16:42:06 -05:00
01a4c6626d Disable download button when no data is available 2022-04-01 13:53:28 -05:00
310f5c389e Fix import/export of default values 2022-04-01 13:41:53 -05:00
1273110419 Hide disposal for non-primary products 2022-03-18 16:47:28 -05:00
e797cb98e0 Minor changes to case builder 2022-03-18 10:33:16 -05:00
0beb30800e Enable controls 2022-03-18 10:05:08 -05:00
02a81e5fdd Switch to IndexedDB 2022-03-18 10:04:58 -05:00
096d95a1aa Implement inflation 2022-03-18 09:43:01 -05:00
01452441dc Reformat source code; implement import/export 2022-03-17 15:44:21 -05:00
56b673fb9e Initial amount CSV upload/download 2022-03-15 16:06:18 -05:00
af2a8b67be Implement form validation 2022-03-15 10:23:51 -05:00
524299a3c2 Make pipeline, parameters & product interactive 2022-03-15 09:29:53 -05:00
0e53a4334e Implement initial, static version of input GUI 2022-03-14 11:10:51 -05:00
a03b9169fd Allow product disposal at collection centers 2021-10-15 09:11:41 -05:00
ee58af73f0 Update sysimage and build scripts 2021-10-15 08:14:04 -05:00
92d30460b9 Update README.md 2021-09-03 18:07:40 -05:00
9ebb2e49f9 Fix validation error on JSONSchema 0.3 2021-08-06 14:56:46 -05:00
505e3a8e1e Update CHANGELOG 2021-07-23 17:42:48 -05:00
d4fa75297f Fix OrderedCollections version 2021-07-23 17:42:29 -05:00
881957d6b5 Implement resolve 2021-07-21 14:53:49 -05:00
86cf7f5bd9 Throw exception for infeasible models 2021-07-21 14:18:10 -05:00
a8c7047e2d Add custom show function for Instance and Graph
Without these functions, Julia 1.5 enters an infinite loop whenever it
tries to generate a stack trace, so any error (such as a missing method)
causes the program to hang, instead of an error message to appear.
2021-07-21 14:11:01 -05:00
099e0fae3a Docs: Minor fixes to what-if analsis section 2021-07-21 14:07:00 -05:00
1b8f392852 Docs: Add description of resolve 2021-07-21 14:07:00 -05:00
7a95aa66f6 Update CHANGELOG 2021-07-21 11:49:54 -05:00
40d28c727a Add products report 2021-07-16 11:25:40 -05:00
a9ac164833 Fix GeoDB download 2021-07-16 10:31:21 -05:00
e244ded51d GH Actions: Add Julia 1.6, remove nightly 2021-07-16 10:18:10 -05:00
7180651cfa Reformat source code 2021-07-16 10:15:41 -05:00
0c9465411f Document GeoDB; remove unused code; minor fixes 2021-07-16 10:13:58 -05:00
658d5ddbdc Add population to region; disable zip codes 2021-07-01 17:14:00 -05:00
399db41f86 Temporarily disable failing test 2021-07-01 16:13:38 -05:00
e407a53ecf Download and join population 2021-07-01 16:10:55 -05:00
33ab4c5f76 GeoDB: Prepare for population 2021-07-01 14:56:08 -05:00
c9391dd299 Update JSONSchema 2021-07-01 14:56:08 -05:00
6c70d9acd5 GeoDB: Add 2018-us-zcta and us-state 2021-07-01 14:56:08 -05:00
339255bf9b Enable geodb in input files 2021-07-01 14:56:08 -05:00
ca187fe78e Implement geodb.jl 2021-07-01 14:56:08 -05:00
c256cd8b75 Update CHANGELOG.md 2021-06-25 06:16:20 -05:00
05d48e2cbf Update tagbot.yml 2021-06-25 06:13:27 -05:00
9446b1921d Add tagbot.yml 2021-06-22 11:10:15 -05:00
1b0cc141bb Remove Manifest.toml from repository 2021-06-22 11:00:11 -05:00
a333ab0b04 Remove unused code and test resources 2021-06-22 10:58:46 -05:00
630ae49d4a Replace Array by Vector 2021-06-22 10:53:40 -05:00
9df416ed75 Split files 2021-06-22 10:50:11 -05:00
849f902562 Reformat code 2021-06-22 10:21:17 -05:00
1990563476 Remove dotdict 2021-06-22 10:20:40 -05:00
7e783c8b91 Replace ManufacturingModel by JuMP.Model 2021-06-22 10:19:31 -05:00
93cc6fbf32 Remove model.eqs 2021-06-22 10:10:48 -05:00
a7938b7260 Remove model.vars 2021-06-22 10:08:01 -05:00
56ef1f7bc2 Update dependencies 2021-06-22 10:07:40 -05:00
b00b24ffbc Reformat source code; set up lint GH Action 2021-06-22 09:49:45 -05:00
823db2838b GH Actions: Run tests daily 2021-06-22 09:41:01 -05:00
124 changed files with 36583 additions and 9688 deletions

4
.dockerignore Normal file
View File

@@ -0,0 +1,4 @@
build
jobs
relog-web/node_modules
relog-web/build

27
.github/workflows/lint.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
name: Lint
on:
push:
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@latest
with:
version: '1'
- uses: actions/checkout@v1
- name: Format check
shell: julia --color=yes {0}
run: |
using Pkg
Pkg.add(PackageSpec(name="JuliaFormatter", version="1"))
using JuliaFormatter
format("src", verbose=true)
format("test/src", verbose=true)
out = String(read(Cmd(`git diff`)))
if isempty(out)
exit(0)
end
@error "Some files have not been formatted !!!"
write(stdout, out)
exit(1)

15
.github/workflows/tagbot.yml vendored Normal file
View File

@@ -0,0 +1,15 @@
name: TagBot
on:
issue_comment:
types:
- created
workflow_dispatch:
jobs:
TagBot:
if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot'
runs-on: ubuntu-latest
steps:
- uses: JuliaRegistries/TagBot@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
ssh: ${{ secrets.DOCUMENTER_KEY }}

View File

@@ -1,14 +1,16 @@
name: Build & Test name: Build & Test
on: on:
- push push:
- pull_request pull_request:
schedule:
- cron: '45 10 * * *'
jobs: jobs:
test: test:
name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }}
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
version: ['1.3', '1.4', '1.5', 'nightly'] version: ['1.6', '1.7', '1.8']
os: os:
- ubuntu-latest - ubuntu-latest
arch: arch:
@@ -19,5 +21,15 @@ jobs:
with: with:
version: ${{ matrix.version }} version: ${{ matrix.version }}
arch: ${{ matrix.arch }} arch: ${{ matrix.arch }}
- uses: julia-actions/julia-buildpkg@v1 - name: Run tests
- uses: julia-actions/julia-runtest@v1 shell: julia --color=yes --project=test {0}
run: |
using Pkg
Pkg.develop(path=".")
Pkg.update()
using RELOGT
try
runtests()
catch
exit(1)
end

8
.gitignore vendored
View File

@@ -8,3 +8,11 @@ instances/*.py
notebooks notebooks
.idea .idea
*.lp *.lp
Manifest.toml
data
build
benchmark
run.jl
relog-web-legacy
.vscode
jobs

View File

@@ -1,28 +1,60 @@
# Version 0.5.0 (TBD) # Changelog
All notable changes to this project will be documented in this file.
- The format is based on [Keep a Changelog][changelog].
- This project adheres to [Semantic Versioning][semver].
- For versions before 1.0, we follow the [Pkg.jl convention][pkjjl]
that `0.a.b` is compatible with `0.a.c`.
[changelog]: https://keepachangelog.com/en/1.0.0/
[semver]: https://semver.org/spec/v2.0.0.html
[pkjjl]: https://pkgdocs.julialang.org/v1/compatibility/#compat-pre-1.0
## [0.6.0] -- 2022-12-15
### Added
- Allow RELOG to calculate approximate driving distances, instead of just straight-line distances between points.
### Fixed
- Fix bug that caused building period parameter to be ignored
## [0.5.2] -- 2022-08-26
### Changed
- Update to JuMP 1.x
## [0.5.1] -- 2021-07-23
### Added
- Allow user to specify locations as unique identifiers, instead of latitude and longitude (e.g. `us-state:IL` or `2018-us-county:17043`)
- Add what-if scenarios.
- Add products report.
## [0.5.0] -- 2021-01-06
### Added
- Allow plants to store input material for processing in later years - Allow plants to store input material for processing in later years
# Version 0.4.0 (Sep 18, 2020) ## [0.4.0] -- 2020-09-18
### Added
- Generate simplified solution reports (CSV) - Generate simplified solution reports (CSV)
# Version 0.3.3 (Aug 13, 2020) ## [0.3.3] -- 2020-10-13
### Added
- Add option to write solution to JSON file in RELOG.solve - Add option to write solution to JSON file in RELOG.solve
- Improve error message when instance is infeasible - Improve error message when instance is infeasible
- Make output file more readable - Make output file more readable
# Version 0.3.2 (Aug 7, 2020) ## [0.3.2] -- 2020-10-07
### Added
- Add "building period" parameter - Add "building period" parameter
# Version 0.3.1 (July 17, 2020) ## [0.3.1] -- 2020-07-17
### Fixed
- Fix expansion cost breakdown - Fix expansion cost breakdown
# Version 0.3.0 (June 25, 2020) ## [0.3.0] -- 2020-06-25
### Added
- Track emissions and energy (transportation and plants) - Track emissions and energy (transportation and plants)
### Changed
- Minor changes to input file format: - Minor changes to input file format:
- Make all dictionary keys lowercase - Make all dictionary keys lowercase
- Rename "outputs (tonne)" to "outputs (tonne/tonne)" - Rename "outputs (tonne)" to "outputs (tonne/tonne)"

29
Dockerfile Normal file
View File

@@ -0,0 +1,29 @@
FROM julia:1.7-buster
ENV RELOG_TIME_LIMIT_SEC=3600
# Install Node.js & zip
RUN apt-get update -yq && \
apt-get -yq install curl gnupg ca-certificates && \
curl -L https://deb.nodesource.com/setup_18.x | bash && \
apt-get update -yq && \
apt-get install -yq nodejs zip
# Install Julia dependencies
ADD Project.toml /app/
ADD src/RELOG.jl /app/src/
RUN julia --project=/app -e 'using Pkg; Pkg.update()'
# Install JS dependencies
ADD relog-web/package*.json /app/relog-web/
RUN cd /app/relog-web && npm install
# Copy source code
ADD . /app
RUN julia --project=/app -e 'using Pkg; Pkg.precompile()'
# Build JS app
RUN cd /app/relog-web && npm run build
WORKDIR /app
CMD julia --project=/app -e 'import RELOG; RELOG.web("0.0.0.0")'

View File

@@ -1,25 +1,31 @@
JULIA := julia --color=yes --project=@. VERSION := 0.6
SRC_FILES := $(wildcard src/*.jl test/*.jl) PKG := ghcr.io/anl-ceeesa/relog-web
VERSION := 0.5
all: docs test
build/sysimage.so: src/sysimage.jl Project.toml Manifest.toml
mkdir -p build
$(JULIA) src/sysimage.jl
build/test.log: $(SRC_FILES) build/sysimage.so
cd test; $(JULIA) --sysimage ../build/sysimage.so runtests.jl
clean: clean:
rm -rf build/* rm -rfv build Manifest.toml test/Manifest.toml deps/formatter/build deps/formatter/Manifest.toml
docs: docs:
mkdocs build -d ../docs/$(VERSION)/ cd docs; julia --project=. make.jl; cd ..
rsync -avP --delete-after docs/build/ ../docs/$(VERSION)/
docker-build:
docker build --tag $(PKG):$(VERSION) .
docker build --tag $(PKG):latest .
test: build/test.log docker-push:
docker push $(PKG):$(VERSION)
docker push $(PKG):latest
test-watch: docker-run:
bash -c "while true; do make test --quiet; sleep 1; done" docker run -it --rm --name relog --volume $(PWD)/jobs:/app/jobs --publish 8000:8080 $(PKG):$(VERSION)
.PHONY: docs test format:
cd deps/formatter; ../../juliaw format.jl
test: test/Manifest.toml
./juliaw test/runtests.jl
test/Manifest.toml: test/Project.toml
julia --project=test -e "using Pkg; Pkg.instantiate()"
.PHONY: docs test format

View File

@@ -1,441 +0,0 @@
# This file is machine-generated - editing it directly is not advised
[[Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[BenchmarkTools]]
deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"]
git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260"
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
version = "0.5.0"
[[BinaryProvider]]
deps = ["Libdl", "Logging", "SHA"]
git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058"
uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
version = "0.5.10"
[[Bzip2_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "03a44490020826950c68005cafb336e5ba08b7e8"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.6+4"
[[CEnum]]
git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.4.1"
[[CSV]]
deps = ["CategoricalArrays", "DataFrames", "Dates", "Mmap", "Parsers", "PooledArrays", "SentinelArrays", "Tables", "Unicode"]
git-tree-sha1 = "a390152e6850405a48ca51bd7ca33d11a21d6230"
uuid = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
version = "0.7.7"
[[Calculus]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "f641eb0a4f00c343bbc32346e1217b86f3ce9dad"
uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
version = "0.5.1"
[[CategoricalArrays]]
deps = ["DataAPI", "Future", "JSON", "Missings", "Printf", "Statistics", "StructTypes", "Unicode"]
git-tree-sha1 = "2ac27f59196a68070e132b25713f9a5bbc5fa0d2"
uuid = "324d7699-5711-5eae-9e2f-1d82baa6b597"
version = "0.8.3"
[[Cbc]]
deps = ["BinaryProvider", "Libdl", "MathOptInterface", "MathProgBase", "SparseArrays", "Test"]
git-tree-sha1 = "62d80f448b5d77b3f0a59cecf6197aad2a3aa280"
uuid = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
version = "0.6.7"
[[Clp]]
deps = ["BinaryProvider", "CEnum", "Clp_jll", "Libdl", "MathOptInterface", "SparseArrays"]
git-tree-sha1 = "581763750759c1e38df2a35a0b3bdee496a062c7"
uuid = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
version = "0.8.1"
[[Clp_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Osi_jll", "Pkg"]
git-tree-sha1 = "79263d9383ca89b35f31c33ab5b880536a8413a4"
uuid = "06985876-5285-5a41-9fcb-8948a742cc53"
version = "1.17.6+6"
[[CodecBzip2]]
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.7.2"
[[CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.0"
[[CoinUtils_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "ee1f06ab89337b7f194c29377ab174e752cdf60d"
uuid = "be027038-0da8-5614-b30d-e42594cb92df"
version = "2.11.3+3"
[[CommonSubexpressions]]
deps = ["MacroTools", "Test"]
git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7"
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
version = "0.3.0"
[[Compat]]
deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"]
git-tree-sha1 = "7c7f4cda0d58ec999189d70f5ee500348c4b4df1"
uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
version = "3.16.0"
[[CompilerSupportLibraries_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "7c4f882c41faa72118841185afc58a2eb00ef612"
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "0.3.3+0"
[[CoordinateTransformations]]
deps = ["LinearAlgebra", "StaticArrays"]
git-tree-sha1 = "c230b1d94db9fdd073168830437e64b9db627fcb"
uuid = "150eb455-5306-5404-9cee-2592286d6298"
version = "0.6.0"
[[DataAPI]]
git-tree-sha1 = "176e23402d80e7743fc26c19c681bfb11246af32"
uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a"
version = "1.3.0"
[[DataFrames]]
deps = ["CategoricalArrays", "Compat", "DataAPI", "Future", "InvertedIndices", "IteratorInterfaceExtensions", "Missings", "PooledArrays", "Printf", "REPL", "Reexport", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"]
git-tree-sha1 = "a7c1c9a6e47a92321bbc9d500dab9b04cc4a6a39"
uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
version = "0.21.7"
[[DataStructures]]
deps = ["InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "88d48e133e6d3dd68183309877eac74393daa7eb"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.17.20"
[[DataValueInterfaces]]
git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6"
uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464"
version = "1.0.0"
[[Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[DelimitedFiles]]
deps = ["Mmap"]
uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
[[DiffResults]]
deps = ["StaticArrays"]
git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc"
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
version = "1.0.2"
[[DiffRules]]
deps = ["NaNMath", "Random", "SpecialFunctions"]
git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1"
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
version = "1.0.1"
[[Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[ForwardDiff]]
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"]
git-tree-sha1 = "1d090099fb82223abc48f7ce176d3f7696ede36d"
uuid = "f6369f11-7733-5829-9624-2563aa707210"
version = "0.10.12"
[[Future]]
deps = ["Random"]
uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820"
[[GZip]]
deps = ["Libdl"]
git-tree-sha1 = "039be665faf0b8ae36e089cd694233f5dee3f7d6"
uuid = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
version = "0.5.1"
[[Geodesy]]
deps = ["CoordinateTransformations", "Dates", "LinearAlgebra", "StaticArrays", "Test"]
git-tree-sha1 = "f80ea86cb88db337a1906e245e495592f0b5cc25"
uuid = "0ef565a4-170c-5f04-8de2-149903a85f3d"
version = "0.5.0"
[[HTTP]]
deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"]
git-tree-sha1 = "c7ec02c4c6a039a98a15f955462cd7aea5df4508"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "0.8.19"
[[IniFile]]
deps = ["Test"]
git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8"
uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f"
version = "0.5.0"
[[InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[InvertedIndices]]
deps = ["Test"]
git-tree-sha1 = "15732c475062348b0165684ffe28e85ea8396afc"
uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f"
version = "1.0.0"
[[IteratorInterfaceExtensions]]
git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856"
uuid = "82899510-4779-5014-852e-03e436cf321d"
version = "1.0.0"
[[JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.1"
[[JSONSchema]]
deps = ["BinaryProvider", "HTTP", "JSON"]
git-tree-sha1 = "b0a7f9328967df5213691d318a03cf70ea8c76b1"
uuid = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
version = "0.2.0"
[[JuMP]]
deps = ["Calculus", "DataStructures", "ForwardDiff", "LinearAlgebra", "MathOptInterface", "MutableArithmetics", "NaNMath", "Random", "SparseArrays", "Statistics"]
git-tree-sha1 = "cbab42e2e912109d27046aa88f02a283a9abac7c"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "0.21.3"
[[LibGit2]]
deps = ["Printf"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[LinearAlgebra]]
deps = ["Libdl"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "f7d2e3f654af75f01ec49be82c231c382214223a"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.5"
[[Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[MathOptInterface]]
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "JSON", "JSONSchema", "LinearAlgebra", "MutableArithmetics", "OrderedCollections", "SparseArrays", "Test", "Unicode"]
git-tree-sha1 = "27f2ef85879b8f1d144266ab44f076ba0dfbd8a1"
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
version = "0.9.13"
[[MathProgBase]]
deps = ["LinearAlgebra", "SparseArrays"]
git-tree-sha1 = "9abbe463a1e9fc507f12a69e7f29346c2cdc472c"
uuid = "fdba3010-5040-5b88-9595-932c9decdf73"
version = "0.7.8"
[[MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"]
git-tree-sha1 = "426a6978b03a97ceb7ead77775a1da066343ec6e"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.0.2"
[[MbedTLS_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "c0b1286883cac4e2b617539de41111e0776d02e8"
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.16.8+0"
[[Missings]]
deps = ["DataAPI"]
git-tree-sha1 = "ed61674a0864832495ffe0a7e889c0da76b0f4c8"
uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28"
version = "0.4.4"
[[Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "6cf09794783b9de2e662c4e8b60d743021e338d0"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "0.2.10"
[[NaNMath]]
git-tree-sha1 = "c84c576296d0e2fbb3fc134d3e09086b3ea617cd"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "0.3.4"
[[OpenBLAS32_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "793b33911239d2651c356c823492b58d6490d36a"
uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2"
version = "0.3.9+4"
[[OpenSpecFun_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "d51c416559217d974a1113522d5919235ae67a87"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.3+3"
[[OrderedCollections]]
git-tree-sha1 = "16c08bf5dba06609fe45e30860092d6fa41fde7b"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.3.1"
[[Osi_jll]]
deps = ["CoinUtils_jll", "CompilerSupportLibraries_jll", "Libdl", "OpenBLAS32_jll", "Pkg"]
git-tree-sha1 = "bd436a97280df40938e66ae8d18e57aceb072856"
uuid = "7da25872-d9ce-5375-a4d3-7a845f58efdd"
version = "0.108.5+3"
[[PackageCompiler]]
deps = ["Libdl", "Pkg", "UUIDs"]
git-tree-sha1 = "98aa9c653e1dc3473bb5050caf8501293db9eee1"
uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
version = "1.2.1"
[[Parsers]]
deps = ["Dates", "Test"]
git-tree-sha1 = "8077624b3c450b15c087944363606a6ba12f925e"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "1.0.10"
[[Pkg]]
deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
[[PooledArrays]]
deps = ["DataAPI"]
git-tree-sha1 = "b1333d4eced1826e15adbdf01a4ecaccca9d353c"
uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
version = "0.5.3"
[[Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[ProgressBars]]
deps = ["Printf"]
git-tree-sha1 = "e66732bbdaad368cfc642cef1f639df5812dc818"
uuid = "49802e3a-d2f1-5c88-81d8-b72133a6f568"
version = "0.6.0"
[[REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[Random]]
deps = ["Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[Reexport]]
deps = ["Pkg"]
git-tree-sha1 = "7b1d07f411bc8ddb7977ec7f377b97b158514fe0"
uuid = "189a3867-3050-52da-a836-e630ba90ab69"
version = "0.2.0"
[[SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
[[SentinelArrays]]
deps = ["Dates", "Random"]
git-tree-sha1 = "7a74946ace3b34fbb6c10e61b6e250b33d7e758c"
uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c"
version = "1.2.15"
[[Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[SharedArrays]]
deps = ["Distributed", "Mmap", "Random", "Serialization"]
uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
[[Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[SortingAlgorithms]]
deps = ["DataStructures", "Random", "Test"]
git-tree-sha1 = "03f5898c9959f8115e30bc7226ada7d0df554ddd"
uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c"
version = "0.3.1"
[[SparseArrays]]
deps = ["LinearAlgebra", "Random"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[SpecialFunctions]]
deps = ["OpenSpecFun_jll"]
git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "0.10.3"
[[StaticArrays]]
deps = ["LinearAlgebra", "Random", "Statistics"]
git-tree-sha1 = "016d1e1a00fabc556473b07161da3d39726ded35"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "0.12.4"
[[Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
[[StructTypes]]
deps = ["Dates", "UUIDs"]
git-tree-sha1 = "1ed04f622a39d2e5a6747c3a70be040c00333933"
uuid = "856f2bd8-1eba-4b0a-8007-ebc267875bd4"
version = "1.1.0"
[[TableTraits]]
deps = ["IteratorInterfaceExtensions"]
git-tree-sha1 = "b1ad568ba658d8cbb3b892ed5380a6f3e781a81e"
uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c"
version = "1.0.0"
[[Tables]]
deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "TableTraits", "Test"]
git-tree-sha1 = "b7f762e9820b7fab47544c36f26f54ac59cf8abf"
uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
version = "1.0.5"
[[Test]]
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[TranscodingStreams]]
deps = ["Random", "Test"]
git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.9.5"
[[UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[Zlib_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "fdd89e5ab270ea0f2a0174bd9093e557d06d4bfa"
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.11+16"

View File

@@ -1,39 +1,50 @@
name = "RELOG" name = "RELOG"
uuid = "a2afcdf7-cf04-4913-85f9-c0d81ddf2008" uuid = "a2afcdf7-cf04-4913-85f9-c0d81ddf2008"
authors = ["Alinson S Xavier <axavier@anl.gov>"] authors = ["Alinson S Xavier <axavier@anl.gov>"]
version = "0.5.0" version = "0.6.0"
[deps] [deps]
CRC = "44b605c4-b955-5f2b-9b6d-d2bd01d3d205"
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
Cbc = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
Clp = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
GZip = "92fee26a-97fe-5a0c-ad85-20a5f3185b63" GZip = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
Geodesy = "0ef565a4-170c-5f04-8de2-149903a85f3d" Geodesy = "0ef565a4-170c-5f04-8de2-149903a85f3d"
HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3"
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
JSONSchema = "7d188eb4-7ad8-530c-ae41-71a32a6d4692" JSONSchema = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572" JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
PackageCompiler = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d" NearestNeighbors = "b8a86587-4115-5ab1-83bc-aa920d37bbce"
OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
ProgressBars = "49802e3a-d2f1-5c88-81d8-b72133a6f568" ProgressBars = "49802e3a-d2f1-5c88-81d8-b72133a6f568"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Shapefile = "8e980c4a-a4fe-5da2-b3a7-4b4b0353a2f4"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
ZipFile = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
[compat] [compat]
CSV = "0.7" CRC = "4"
Cbc = "0.6" CSV = "0.10"
Clp = "0.8" DataFrames = "1"
DataFrames = "0.21" DataStructures = "0.18"
DataStructures = "0.17"
GZip = "0.5" GZip = "0.5"
Geodesy = "0.5" Geodesy = "1"
HTTP = "0.9"
JSON = "0.21" JSON = "0.21"
JSONSchema = "0.2" JSONSchema = "1"
JuMP = "0.21" JuMP = "1"
MathOptInterface = "0.9" MathOptInterface = "1"
PackageCompiler = "1" NearestNeighbors = "0.4"
ProgressBars = "0.6" OrderedCollections = "1"
ProgressBars = "1"
Shapefile = "0.8"
ZipFile = "0.10"
julia = "1" julia = "1"

View File

@@ -15,19 +15,22 @@
<img src="https://anl-ceeesa.github.io/RELOG/0.5/images/ex_transportation.png" width="1000px"/> <img src="https://anl-ceeesa.github.io/RELOG/0.6/assets/ex_transportation.png" width="1000px"/>
### Documentation ### Documentation
* [Usage](https://anl-ceeesa.github.io/RELOG/0.5/usage) * [Usage](https://anl-ceeesa.github.io/RELOG/0.6/usage)
* [Input and Output Data Formats](https://anl-ceeesa.github.io/RELOG/0.5/format) * [Input and Output Data Formats](https://anl-ceeesa.github.io/RELOG/0.6/format)
* [Simplified Solution Reports](https://anl-ceeesa.github.io/RELOG/0.5/reports) * [Simplified Solution Reports](https://anl-ceeesa.github.io/RELOG/0.6/reports)
* [Optimization Model](https://anl-ceeesa.github.io/RELOG/0.5/model) * [Optimization Model](https://anl-ceeesa.github.io/RELOG/0.6/model)
### Authors ### Authors
* **Alinson S. Xavier,** Argonne National Laboratory <<axavier@anl.gov>> * **Alinson S. Xavier** <<axavier@anl.gov>>
* **Nwike Iloeje,** Argonne National Laboratory <<ciloeje@anl.gov>> * **Nwike Iloeje** <<ciloeje@anl.gov>>
* **John Atkins**
* **Kyle Sun**
* **Audrey Gallier**
### License ### License

4
docs/Project.toml Normal file
View File

@@ -0,0 +1,4 @@
[deps]
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
RELOG = "a2afcdf7-cf04-4913-85f9-c0d81ddf2008"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"

19
docs/make.jl Normal file
View File

@@ -0,0 +1,19 @@
using Documenter, RELOG
function make()
makedocs(
sitename="RELOG",
pages=[
"Home" => "index.md",
"usage.md",
"format.md",
"reports.md",
"model.md",
],
format = Documenter.HTML(
assets=["assets/custom.css"],
)
)
end
make()

View File

@@ -0,0 +1,36 @@
@media screen and (min-width: 1056px) {
#documenter .docs-main {
max-width: 65rem !important;
}
}
tbody, thead, pre {
border: 1px solid rgba(0, 0, 0, 0.25);
}
table td, th {
padding: 8px;
}
table p {
margin-bottom: 0;
}
table td code {
white-space: nowrap;
}
table tr,
table th {
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
}
table tr:last-child {
border-bottom: 0;
}
code {
background-color: transparent;
color: rgb(232, 62, 140);
}

View File

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 52 KiB

View File

Before

Width:  |  Height:  |  Size: 37 KiB

After

Width:  |  Height:  |  Size: 37 KiB

View File

Before

Width:  |  Height:  |  Size: 31 KiB

After

Width:  |  Height:  |  Size: 31 KiB

View File

Before

Width:  |  Height:  |  Size: 91 KiB

After

Width:  |  Height:  |  Size: 91 KiB

View File

Before

Width:  |  Height:  |  Size: 586 KiB

After

Width:  |  Height:  |  Size: 586 KiB

View File

Before

Width:  |  Height:  |  Size: 29 KiB

After

Width:  |  Height:  |  Size: 29 KiB

View File

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 32 KiB

View File

@@ -11,9 +11,10 @@ RELOG accepts as input a JSON file with three sections: `parameters`, `products`
The **parameters** section describes details about the simulation itself. The **parameters** section describes details about the simulation itself.
| Key | Description | Key | Description
|:--------------------------|---------------| |:--------------------------|:---------------|
|`time horizon (years)` | Number of years in the simulation. |`time horizon (years)` | Number of years in the simulation.
|`building period (years)` | List of years in which we are allowed to open new plants. For example, if this parameter is set to `[1,2,3]`, we can only open plants during the first three years. By default, this equals `[1]`; that is, plants can only be opened during the first year. | |`building period (years)` | List of years in which we are allowed to open new plants. For example, if this parameter is set to `[1,2,3]`, we can only open plants during the first three years. By default, this equals `[1]`; that is, plants can only be opened during the first year. |
|`distance metric` | Metric used to compute distances between pairs of locations. Valid options are: `"Euclidean"`, for the straight-line distance between points; or `"driving"` for an approximated driving distance. If not specified, defaults to `"Euclidean"`.
#### Example #### Example
@@ -21,7 +22,8 @@ The **parameters** section describes details about the simulation itself.
{ {
"parameters": { "parameters": {
"time horizon (years)": 2, "time horizon (years)": 2,
"building period (years)": [1] "building period (years)": [1],
"distance metric": "driving",
} }
} }
``` ```
@@ -31,16 +33,18 @@ The **parameters** section describes details about the simulation itself.
The **products** section describes all products and subproducts in the simulation. The field `instance["Products"]` is a dictionary mapping the name of the product to a dictionary which describes its characteristics. Each product description contains the following keys: The **products** section describes all products and subproducts in the simulation. The field `instance["Products"]` is a dictionary mapping the name of the product to a dictionary which describes its characteristics. Each product description contains the following keys:
| Key | Description | Key | Description
|:--------------------------------------|---------------| |:--------------------------------------|:---------------|
|`transportation cost ($/km/tonne)` | The cost to transport this product. Must be a time series. |`transportation cost ($/km/tonne)` | The cost to transport this product. Must be a time series.
|`transportation energy (J/km/tonne)` | The energy required to transport this product. Must be a time series. Optional. |`transportation energy (J/km/tonne)` | The energy required to transport this product. Must be a time series. Optional.
|`transportation emissions (tonne/km/tonne)` | A dictionary mapping the name of each greenhouse gas, produced to transport one tonne of this product along one kilometer, to the amount of gas produced (in tonnes). Must be a time series. Optional. |`transportation emissions (tonne/km/tonne)` | A dictionary mapping the name of each greenhouse gas, produced to transport one tonne of this product along one kilometer, to the amount of gas produced (in tonnes). Must be a time series. Optional.
|`initial amounts` | A dictionary mapping the name of each location to its description (see below). If this product is not initially available, this key may be omitted. Must be a time series. |`initial amounts` | A dictionary mapping the name of each location to its description (see below). If this product is not initially available, this key may be omitted. Must be a time series.
| `disposal limit (tonne)` | Total amount of product that can be disposed of across all collection centers. If omitted, all product must be processed. This parameter has no effect on product disposal at plants.
| `disposal cost ($/tonne)` | Cost of disposing one tonne of this product at a collection center. If omitted, defaults to zero. This parameter has no effect on product disposal costs at plants.
Each product may have some amount available at the beginning of each time period. In this case, the key `initial amounts` maps to a dictionary with the following keys: Each product may have some amount available at the beginning of each time period. In this case, the key `initial amounts` maps to a dictionary with the following keys:
| Key | Description | Key | Description
|:------------------------|---------------| |:------------------------|:---------------|
| `latitude (deg)` | The latitude of the location. | `latitude (deg)` | The latitude of the location.
| `longitude (deg)` | The longitude of the location. | `longitude (deg)` | The longitude of the location.
| `amount (tonne)` | The amount of the product initially available at the location. Must be a time series. | `amount (tonne)` | The amount of the product initially available at the location. Must be a time series.
@@ -73,7 +77,9 @@ Each product may have some amount available at the beginning of each time period
"transportation emissions (tonne/km/tonne)": { "transportation emissions (tonne/km/tonne)": {
"CO2": [0.052, 0.050], "CO2": [0.052, 0.050],
"CH4": [0.003, 0.002] "CH4": [0.003, 0.002]
} },
"disposal cost ($/tonne)": [-10.0, -12.0],
"disposal limit (tonne)": [1.0, 1.0],
}, },
"P2": { "P2": {
"transportation cost ($/km/tonne)": [0.022, 0.020] "transportation cost ($/km/tonne)": [0.022, 0.020]
@@ -93,7 +99,7 @@ Each product may have some amount available at the beginning of each time period
The **plants** section describes the available types of reverse manufacturing plants, their potential locations and associated costs, as well as their inputs and outputs. The field `instance["Plants"]` is a dictionary mapping the name of the plant to a dictionary with the following keys: The **plants** section describes the available types of reverse manufacturing plants, their potential locations and associated costs, as well as their inputs and outputs. The field `instance["Plants"]` is a dictionary mapping the name of the plant to a dictionary with the following keys:
| Key | Description | Key | Description
|:------------------------|---------------| |:------------------------|:---------------|
| `input` | The name of the product that this plant takes as input. Only one input is accepted per plant. | `input` | The name of the product that this plant takes as input. Only one input is accepted per plant.
| `outputs (tonne/tonne)` | A dictionary specifying how many tonnes of each product is produced for each tonnes of input. For example, if the plant outputs 0.5 tonnes of P2 and 0.25 tonnes of P3 for each tonnes of P1 provided, then this entry should be `{"P2": 0.5, "P3": 0.25}`. If the plant does not output anything, this key may be omitted. | `outputs (tonne/tonne)` | A dictionary specifying how many tonnes of each product is produced for each tonnes of input. For example, if the plant outputs 0.5 tonnes of P2 and 0.25 tonnes of P3 for each tonnes of P1 provided, then this entry should be `{"P2": 0.5, "P3": 0.25}`. If the plant does not output anything, this key may be omitted.
|`energy (GJ/tonne)` | The energy required to process 1 tonne of the input. Must be a time series. Optional. |`energy (GJ/tonne)` | The energy required to process 1 tonne of the input. Must be a time series. Optional.
@@ -113,14 +119,14 @@ Each type of plant is associated with a set of potential locations where it can
The `storage` dictionary should contain the following keys: The `storage` dictionary should contain the following keys:
| Key | Description | Key | Description
|:------------------------|---------------| |:------------------------|:---------------|
| `cost ($/tonne)` | The cost to store a tonne of input product for one time period. Must be a time series. | `cost ($/tonne)` | The cost to store a tonne of input product for one time period. Must be a time series.
| `limit (tonne)` | The maximum amount of input product this plant can have in storage at any given time. | `limit (tonne)` | The maximum amount of input product this plant can have in storage at any given time.
The keys in the `disposal` dictionary should be the names of the products. The values are dictionaries with the following keys: The keys in the `disposal` dictionary should be the names of the products. The values are dictionaries with the following keys:
| Key | Description | Key | Description
|:------------------------|---------------| |:------------------------|:---------------|
| `cost ($/tonne)` | The cost to dispose of the product. Must be a time series. | `cost ($/tonne)` | The cost to dispose of the product. Must be a time series.
| `limit (tonne)` | The maximum amount that can be disposed of. If an unlimited amount can be disposed, this key may be omitted. Must be a time series. | `limit (tonne)` | The maximum amount that can be disposed of. If an unlimited amount can be disposed, this key may be omitted. Must be a time series.
@@ -128,7 +134,7 @@ The keys in the `disposal` dictionary should be the names of the products. The v
The keys in the `capacities (tonne)` dictionary should be the amounts (in tonnes). The values are dictionaries with the following keys: The keys in the `capacities (tonne)` dictionary should be the amounts (in tonnes). The values are dictionaries with the following keys:
| Key | Description | Key | Description
|:--------------------------------------|---------------| |:--------------------------------------|:---------------|
| `opening cost ($)` | The cost to open a plant of this size. | `opening cost ($)` | The cost to open a plant of this size.
| `fixed operating cost ($)` | The cost to keep the plant open, even if the plant doesn't process anything. Must be a time series. | `fixed operating cost ($)` | The cost to keep the plant open, even if the plant doesn't process anything. Must be a time series.
| `variable operating cost ($/tonne)` | The cost that the plant incurs to process each tonne of input. Must be a time series. | `variable operating cost ($/tonne)` | The cost that the plant incurs to process each tonne of input. Must be a time series.
@@ -182,14 +188,46 @@ The keys in the `capacities (tonne)` dictionary should be the amounts (in tonnes
} }
``` ```
### Geographic database
Instead of specifying locations using latitudes and longitudes, it is also possible to specify them using unique identifiers, such as the name of a US state, or the county FIPS code. This works anywhere `latitude (deg)` and `longitude (deg)` are expected. For example, instead of:
```json
{
"initial amounts": {
"C1": {
"latitude (deg)": 37.27182,
"longitude (deg)": -119.2704,
"amount (tonne)": [934.56, 934.56]
},
}
}
```
is is possible to write:
```json
{
"initial amounts": {
"C1": {
"location": "us-state:CA",
"amount (tonne)": [934.56, 934.56]
},
}
}
```
Location names follow the format `db:id`, where `db` is the name of the database and `id` is the identifier for a specific location. RELOG currently includes the following databases:
Database | Description | Examples
:--------|:------------|:---------
`us-state`| List of states of the United States. | `us-state:IL` (State of Illinois)
`2018-us-county` | List of United States counties, as of 2018. IDs are 5-digit FIPS codes. | `2018-us-county:17043` (DuPage county in Illinois)
### Current limitations ### Current limitations
* Each plant can only be opened exactly once. After open, the plant remains open until the end of the simulation. * Each plant can only be opened exactly once. After open, the plant remains open until the end of the simulation.
* Plants can be expanded at any time, even long after they are open. * Plants can be expanded at any time, even long after they are open.
* All material available at the beginning of a time period must be entirely processed by the end of that time period. It is not possible to store unprocessed materials from one time period to the next. * All material available at the beginning of a time period must be entirely processed by the end of that time period. It is not possible to store unprocessed materials from one time period to the next.
* Up to two plant sizes are currently supported. Variable operating costs must be the same for all plant sizes. * Up to two plant sizes are currently supported. Variable operating costs must be the same for all plant sizes.
* Accurate driving distances are only available for the continental United States.
## Output Data Format (JSON) ## Output Data Format (JSON)
To be documented. To be documented.

View File

@@ -1,25 +1,29 @@
# RELOG: Reverse Logistics Optimization # RELOG: Reverse Logistics Optimization
**RELOG** is an open-source supply chain optimization package focusing on reverse logistics and reverse manufacturing. The package uses Mixed-Integer Linear Programming to determine where to build recycling plants, what size should these plants have and which customers should be served by which plants. The package supports custom reverse logistics pipelines, with multiple types of plants, multiple types of product and multiple time periods. **RELOG** is an open-source supply chain optimization package focusing on reverse logistics and reverse manufacturing. The package uses Mixed-Integer Linear Programming to determine where to build recycling plants, what size should these plants have and which customers should be served by which plants. The package supports custom reverse logistics pipelines, with multiple types of plants, multiple types of product and multiple time periods.
<img src="images/ex_transportation.png" width="1000px"/> ```@raw html
<center>
<img src="assets/ex_transportation.png" width="1000px"/>
</center>
```
### Table of Contents ### Table of Contents
* [Usage](usage.md) ```@contents
* [Input and Output Data Formats](format.md) Pages = ["usage.md", "format.md", "reports.md", "model.md"]
* [Simplified Solution Reports](reports.md) Depth = 3
* [Optimization Model](model.md) ```
### Source Code ### Source Code
* [https://github.com/ANL-CEEESA/RELOG](https://github.com/ANL-CEEESA/RELOG) * [https://github.com/ANL-CEEESA/RELOG](https://github.com/ANL-CEEESA/RELOG)
### Authors ### Authors
* **Alinson S. Xavier,** Argonne National Laboratory <<axavier@anl.gov>> * **Alinson S. Xavier,** Argonne National Laboratory <axavier@anl.gov>
* **Nwike Iloeje,** Argonne National Laboratory <<ciloeje@anl.gov>> * **Nwike Iloeje,** Argonne National Laboratory <ciloeje@anl.gov>
### License ### License

View File

@@ -6,53 +6,65 @@ In this page, we describe the precise mathematical optimization model used by RE
### Sets ### Sets
* $L$ - Set of locations holding the original material to be recycled Symbol | Description
* $M$ - Set of materials recovered during the reverse manufacturing process :-------|:------------
* $P$ - Set of potential plants to open $L$ | Set of locations holding the original material to be recycled
* $T = \{ 1, \ldots, t^{max} \} $ - Set of time periods $M$ | Set of materials recovered during the reverse manufacturing process
$P$ | Set of potential plants to open
$T = \{ 1, \ldots, t^{max} \}$ | Set of time periods
### Constants ### Constants
**Plants:** #### Plants
* $c^\text{disp}_{pmt}$ - Cost of disposing one tonne of material $m$ at plant $p$ during time $t$ (`$/tonne/km`) Symbol | Description | Unit
* $c^\text{exp}_{pt}$ - Cost of adding one tonne of capacity to plant $p$ at time $t$ (`$/tonne`) :-------|:------------|:---
* $c^\text{open}_{pt}$ - Cost of opening plant $p$ at time $t$, at minimum capacity (`$`) $c^\text{disp}_{pmt}$ | Cost of disposing one tonne of material $m$ at plant $p$ during time $t$ | \$/tonne/km
* $c^\text{f-base}_{pt}$ - Fixed cost of keeping plant $p$ open during time period $t$ (`$`) $c^\text{exp}_{pt}$ | Cost of adding one tonne of capacity to plant $p$ at time $t$ | \$/tonne
* $c^\text{f-exp}_{pt}$ - Increase in fixed cost for each additional tonne of capacity (`$/tonne`) $c^\text{open}_{pt}$ | Cost of opening plant $p$ at time $t$, at minimum capacity | $
* $c^\text{var}_{pt}$ - Variable cost of processing one tonne of input at plant $p$ at time $t$ (`$/tonne`) $c^\text{f-base}_{pt}$ | Fixed cost of keeping plant $p$ open during time period $t$ | $
* $c^\text{store}_{pt}$ - Cost of storing one tonne of original material at plant $p$ at time $t$ (`$/tonne`) $c^\text{f-exp}_{pt}$ | Increase in fixed cost for each additional tonne of capacity | \$/tonne
* $m^\text{min}_p$ - Minimum capacity of plant $p$ (`tonne`) $c^\text{var}_{pt}$ | Variable cost of processing one tonne of input at plant $p$ at time $t$ | \$/tonne
* $m^\text{max}_p$ - Maximum capacity of plant $p$ (`tonne`) $c^\text{store}_{pt}$ | Cost of storing one tonne of original material at plant $p$ at time $t$ | \$/tonne
* $m^\text{disp}_{pmt}$ - Maximum amount of material $m$ that plant $p$ can dispose of during time $t$ (`tonne`) $m^\text{min}_p$ | Minimum capacity of plant $p$ | tonne
* $m^\text{store}_p$ - Maximum amount of original material that plant $p$ can store for later processing. $m^\text{max}_p$ | Maximum capacity of plant $p$ | tonne
$m^\text{disp}_{pmt}$ | Maximum amount of material $m$ that plant $p$ can dispose of during time $t$ | tonne
$m^\text{store}_p$ | Maximum amount of original material that plant $p$ can store for later processing. | tonne
**Products:** #### Products
* $\alpha_{pm}$ - Amount of material $m$ recovered by plant $t$ for each tonne of original material (`tonne/tonne`) Symbol | Description | Unit
* $m^\text{initial}_{lt}$ - Amount of original material to be recycled at location $l$ during time $t$ (`tonne`) :-------|:------------|:---
$\alpha_{pm}$ | Amount of material $m$ recovered by plant $t$ for each tonne of original material | tonne/tonne
$m^\text{initial}_{lt}$ | Amount of original material to be recycled at location $l$ during time $t$ | tonne
**Transportation:** #### Transportation
* $c^\text{tr}_{t}$ - Transportation cost during time $t$ (`$/tonne/km`) Symbol | Description | Unit
* $d_{lp}$ - Distance between plant $p$ and location $l$ (`km`) :-------|:------------|:---
$c^\text{tr}_{t}$ | Transportation cost during time $t$ | \$/tonne/km
$d_{lp}$ | Distance between plant $p$ and location $l$ | km
### Decision variables ### Decision variables
* $q_{mpt}$ - Amount of material $m$ recovered by plant $p$ during time $t$ (`tonne`)
* $u_{pt}$ - Binary variable that equals 1 if plant $p$ starts operating at time $t$ (`bool`) Symbol | Description | Unit
* $w_{pt}$ - Extra capacity (amount above the minimum) added to plant $p$ during time $t$ (`tonne`) :-------|:------------|:---
* $x_{pt}$ - Binary variable that equals 1 if plant $p$ is operational at time $t$ (`bool`) $q_{mpt}$ | Amount of material $m$ recovered by plant $p$ during time $t$ | tonne
* $y_{lpt}$ - Amount of product sent from location $l$ to plant $p$ during time $t$ (`tonne`) $u_{pt}$ | Binary variable that equals 1 if plant $p$ starts operating at time $t$ | Boolean
* $z^{\text{disp}}_{mpt}$ - Amount of material $m$ disposed of by plant $p$ during time $t$ (`tonne`) $w_{pt}$ | Extra capacity (amount above the minimum) added to plant $p$ during time $t$ | tonne
* $z^{\text{store}}_{pt}$ - Amount of original material in storage at plant $p$ by the end of time period $t$ (`tonne`) $x_{pt}$ | Binary variable that equals 1 if plant $p$ is operational at time $t$ | Boolean
* $z^{\text{proc}}_{mpt}$ - Amount of original material processed by plant $p$ during time period $t$ (`tonne`) $y_{lpt}$ | Amount of product sent from location $l$ to plant $p$ during time $t$ | tonne
$z^{\text{disp}}_{mpt}$ | Amount of material $m$ disposed of by plant $p$ during time $t$ | tonne
$z^{\text{store}}_{pt}$ | Amount of original material in storage at plant $p$ by the end of time period $t$ | tonne
$z^{\text{proc}}_{mpt}$ | Amount of original material processed by plant $p$ during time period $t$ | tonne
### Objective function ### Objective function
RELOG minimizes the overall capital, production and transportation costs: RELOG minimizes the overall capital, production and transportation costs:
```math
\begin{align*} \begin{align*}
\text{minimize} \;\; & \text{minimize} \;\; &
\sum_{t \in T} \sum_{p \in P} \left[ \sum_{t \in T} \sum_{p \in P} \left[
@@ -73,6 +85,7 @@ RELOG minimizes the overall capital, production and transportation costs:
& &
\sum_{t \in T} \sum_{p \in P} \sum_{m \in M} c^{\text{disp}}_{pmt} z_{pmt} \sum_{t \in T} \sum_{p \in P} \sum_{m \in M} c^{\text{disp}}_{pmt} z_{pmt}
\end{align*} \end{align*}
```
In the first line, we have (i) opening costs, if plant starts operating at time $t$, (ii) fixed operating costs, if plant is operational, (iii) additional fixed operating costs coming from expansion performed in all previous time periods up to the current one, and finally (iv) the expansion costs during the current time period. In the first line, we have (i) opening costs, if plant starts operating at time $t$, (ii) fixed operating costs, if plant is operational, (iii) additional fixed operating costs coming from expansion performed in all previous time periods up to the current one, and finally (iv) the expansion costs during the current time period.
In the second line, we have storage and variable processing costs. In the second line, we have storage and variable processing costs.
@@ -83,14 +96,17 @@ In the fourth line, we have the disposal costs.
* All original materials must be sent to a plant: * All original materials must be sent to a plant:
\begin{align} ```math
\begin{align*}
& \sum_{p \in P} y_{lpt} = m^\text{initial}_{lt} & \sum_{p \in P} y_{lpt} = m^\text{initial}_{lt}
& \forall l \in L, t \in T & \forall l \in L, t \in T
\end{align} \end{align*}
```
* Amount received equals amount processed plus stored. Furthermore, all original material should be processed by the end of the simulation. * Amount received equals amount processed plus stored. Furthermore, all original material should be processed by the end of the simulation.
\begin{align} ```math
\begin{align*}
& \sum_{l \in L} y_{lpt} + z^{\text{store}}_{p,t-1} & \sum_{l \in L} y_{lpt} + z^{\text{store}}_{p,t-1}
= z^{\text{proc}}_{pt} + z^{\text{store}}_{p,t} = z^{\text{proc}}_{pt} + z^{\text{store}}_{p,t}
& \forall p \in P, t \in T \\ & \forall p \in P, t \in T \\
@@ -98,56 +114,70 @@ In the fourth line, we have the disposal costs.
& \forall p \in P \\ & \forall p \in P \\
& z^{\text{store}}_{p,t^{\max}} = 0 & z^{\text{store}}_{p,t^{\max}} = 0
& \forall p \in P & \forall p \in P
\end{align} \end{align*}
```
* Plants have a limited processing capacity. Furthermore, if a plant is closed, it has zero processing capacity: * Plants have a limited processing capacity. Furthermore, if a plant is closed, it has zero processing capacity:
\begin{align} ```math
\begin{align*}
& z^{\text{proc}}_{pt} \leq m^\text{min}_p x_p + \sum_{i=1}^t w_p & z^{\text{proc}}_{pt} \leq m^\text{min}_p x_p + \sum_{i=1}^t w_p
& \forall p \in P, t \in T & \forall p \in P, t \in T
\end{align} \end{align*}
```
* Plants have limited storage capacity. Furthermore, if a plant is closed, is has zero storage capacity: * Plants have limited storage capacity. Furthermore, if a plant is closed, is has zero storage capacity:
\begin{align} ```math
\begin{align*}
& z^{\text{store}}_{pt} \leq m^\text{store}_p x_p & z^{\text{store}}_{pt} \leq m^\text{store}_p x_p
& \forall p \in P, t \in T & \forall p \in P, t \in T
\end{align} \end{align*}
```
* Plants can only be expanded up to their maximum capacity. Furthermore, if a plant is closed, it cannot be expanded: * Plants can only be expanded up to their maximum capacity. Furthermore, if a plant is closed, it cannot be expanded:
\begin{align} ```math
\begin{align*}
& \sum_{i=1}^t w_p \leq m^\text{max}_p x_p & \sum_{i=1}^t w_p \leq m^\text{max}_p x_p
& \forall p \in P, t \in T & \forall p \in P, t \in T
\end{align} \end{align*}
```
* Amount of recovered material is proportional to amount processed: * Amount of recovered material is proportional to amount processed:
\begin{align} ```math
\begin{align*}
& q_{mpt} = \alpha_{pm} z^{\text{proc}}_{pt} & q_{mpt} = \alpha_{pm} z^{\text{proc}}_{pt}
& \forall m \in M, p \in P, t \in T & \forall m \in M, p \in P, t \in T
\end{align} \end{align*}
```
* Because we only consider a single type of plant, all recovered material must be immediately disposed of. In RELOG's full model, recovered materials may be sent to another plant for further processing. * Because we only consider a single type of plant, all recovered material must be immediately disposed of. In RELOG's full model, recovered materials may be sent to another plant for further processing.
\begin{align} ```math
\begin{align*}
& q_{mpt} = z_{mpt} & q_{mpt} = z_{mpt}
& \forall m \in M, p \in P, t \in T & \forall m \in M, p \in P, t \in T
\end{align} \end{align*}
```
* A plant is operational at time $t$ if it was operational at time $t-1$ or it was built at time $t$. This constraint also prevents a plant from being built multiple times. * A plant is operational at time $t$ if it was operational at time $t-1$ or it was built at time $t$. This constraint also prevents a plant from being built multiple times.
\begin{align} ```math
\begin{align*}
& x_{pt} = x_{p,t-1} + u_{pt} & x_{pt} = x_{p,t-1} + u_{pt}
& \forall p \in P, t \in T \setminus \{1\} \\ & \forall p \in P, t \in T \setminus \{1\} \\
& x_{p,1} = u_{p,1} & x_{p,1} = u_{p,1}
& \forall p \in P & \forall p \in P
\end{align} \end{align*}
```
* Variable bounds: * Variable bounds:
\begin{align} ```math
\begin{align*}
& q_{mpt} \geq 0 & q_{mpt} \geq 0
& \forall m \in M, p \in P, t \in T \\ & \forall m \in M, p \in P, t \in T \\
& u_{pt} \in \{0,1\} & u_{pt} \in \{0,1\}
@@ -162,4 +192,5 @@ In the fourth line, we have the disposal costs.
& p \in P, t \in T \\ & p \in P, t \in T \\
& z^{\text{disp}}_{mpt}, z^{\text{proc}}_{mpt} \geq 0 & z^{\text{disp}}_{mpt}, z^{\text{proc}}_{mpt} \geq 0
& \forall m \in M, p \in P, t \in T & \forall m \in M, p \in P, t \in T
\end{align} \end{align*}
```

View File

@@ -6,15 +6,13 @@ In this page, we also illustrate what types of charts and visualizations can be
## Plants report ## Plants report
Report showing plant costs, capacities, energy expenditure and utilization factors. Report showing plant costs, capacities, energy expenditure and utilization factors. Generated by `RELOG.write_plants_report(solution, filename)`.
Generated by `RELOG.write_plants_report(solution, filename)`. For a concrete example, see [nimh_plants.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_plants.csv).
| Column | Description | Column | Description
|:--------------------------------------|---------------| |:--------------------------------------|:---------------|
| `plant type` | Plant type. | `plant type` | Plant type.
| `location name` | Location name. | `location name` | Location name.
| `year` | What year this row corresponds to. This reports includes one row for each year in the simulation. | `year` | What year this row corresponds to. This reports includes one row for each year.
| `latitude (deg)` | Latitude of the plant. | `latitude (deg)` | Latitude of the plant.
| `longitude (deg)` | Longitude of the plant. | `longitude (deg)` | Longitude of the plant.
| `capacity (tonne)` | Capacity of the plant at this point in time. | `capacity (tonne)` | Capacity of the plant at this point in time.
@@ -47,7 +45,9 @@ sns.barplot(x="year",
.reset_index()); .reset_index());
``` ```
<img src="../images/ex_plant_cost_per_year.png" width="500px"/> ```@raw html
<img src="../assets/ex_plant_cost_per_year.png" width="500px"/>
```
* Map showing plant locations (in Python): * Map showing plant locations (in Python):
```python ```python
@@ -67,21 +67,20 @@ points = gp.points_from_xy(data["longitude (deg)"],
gp.GeoDataFrame(data, geometry=points).plot(ax=ax); gp.GeoDataFrame(data, geometry=points).plot(ax=ax);
``` ```
<img src="../images/ex_plant_locations.png" width="1000px"/> ```@raw html
<img src="../assets/ex_plant_locations.png" width="1000px"/>
```
## Plant outputs report ## Plant outputs report
Report showing amount of products produced, sent and disposed of by each plant, as well as disposal costs. Report showing amount of products produced, sent and disposed of by each plant, as well as disposal costs. Generated by `RELOG.write_plant_outputs_report(solution, filename)`.
Generated by `RELOG.write_plant_outputs_report(solution, filename)`. For a concrete example, see [nimh_plant_outputs.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_plant_outputs.csv).
| Column | Description | Column | Description
|:--------------------------------------|---------------| |:--------------------------------------|:---------------|
| `plant type` | Plant type. | `plant type` | Plant type.
| `location name` | Location name. | `location name` | Location name.
| `year` | What year this row corresponds to. This reports includes one row for each year in the simulation. | `year` | What year this row corresponds to. This reports includes one row for each year.
| `product name` | Product being produced. | `product name` | Product being produced.
| `amount produced (tonne)` | Amount of product produced this year. | `amount produced (tonne)` | Amount of product produced this year.
| `amount sent (tonne)` | Amount of product produced by this plant and sent to another plant for further processing this year. | `amount sent (tonne)` | Amount of product produced by this plant and sent to another plant for further processing this year.
@@ -105,17 +104,17 @@ sns.barplot(x="amount produced (tonne)",
.reset_index()); .reset_index());
``` ```
<img src="../images/ex_amount_produced.png" width="500px"/> ```@raw html
<img src="../assets/ex_amount_produced.png" width="500px"/>
```
## Plant emissions report ## Plant emissions report
Report showing amount of emissions produced by each plant. Report showing amount of emissions produced by each plant. Generated by `RELOG.write_plant_emissions_report(solution, filename)`.
Generated by `RELOG.write_plant_emissions_report(solution, filename)`. For a concrete example, see [nimh_plant_emissions.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_plant_emissions.csv).
| Column | Description | Column | Description
|:--------------------------------------|---------------| |:--------------------------------------|:---------------|
| `plant type` | Plant type. | `plant type` | Plant type.
| `location name` | Location name. | `location name` | Location name.
| `year` | Year. | `year` | Year.
@@ -139,17 +138,33 @@ sns.barplot(x="plant type",
.reset_index()); .reset_index());
``` ```
<img src="../images/ex_emissions.png" width="500px"/> ```@raw html
<img src="../assets/ex_emissions.png" width="500px"/>
```
## Products report
Report showing primary product amounts, locations and marginal costs. Generated by `RELOG.write_products_report(solution, filename)`.
| Column | Description
|:--------------------------------------|:---------------|
| `product name` | Product name.
| `location name` | Name of the collection center.
| `latitude (deg)` | Latitude of the collection center.
| `longitude (deg)` | Longitude of the collection center.
| `year` | What year this row corresponds to. This reports includes one row for each year.
| `amount (tonne)` | Amount of product available at this collection center.
| `amount disposed (tonne)` | Amount of product disposed of at this collection center.
| `marginal cost ($/tonne)` | Cost to process one additional tonne of this product coming from this collection center.
## Transportation report ## Transportation report
Report showing amount of product sent from initial locations to plants, and from one plant to another. Includes the distance between each pair of locations, amount-distance shipped, transportation costs and energy expenditure. Report showing amount of product sent from initial locations to plants, and from one plant to another. Includes the distance between each pair of locations, amount-distance shipped, transportation costs and energy expenditure. Generated by `RELOG.write_transportation_report(solution, filename)`.
Generated by `RELOG.write_transportation_report(solution, filename)`. For a concrete example, see [nimh_transportation.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_transportation.csv).
| Column | Description | Column | Description
|:--------------------------------------|---------------| |:--------------------------------------|:---------------|
| `source type` | If product is being shipped from an initial location, equals `Origin`. If product is being shipped from a plant, equals plant type. | `source type` | If product is being shipped from an initial location, equals `Origin`. If product is being shipped from a plant, equals plant type.
| `source location name` | Name of the location where the product is being shipped from. | `source location name` | Name of the location where the product is being shipped from.
| `source latitude (deg)` | Latitude of the source location. | `source latitude (deg)` | Latitude of the source location.
@@ -183,7 +198,9 @@ sns.barplot(x="product",
.reset_index()); .reset_index());
``` ```
<img src="../images/ex_transportation_amount_distance.png" width="500px"/> ```@raw html
<img src="../assets/ex_transportation_amount_distance.png" width="500px"/>
```
* Map of transportation lines (in Python): * Map of transportation lines (in Python):
@@ -226,17 +243,17 @@ gp.GeoDataFrame(data, geometry=points).plot(ax=ax,
markersize=50); markersize=50);
``` ```
<img src="../images/ex_transportation.png" width="1000px"/> ```@raw html
<img src="../assets/ex_transportation.png" width="1000px"/>
```
## Transportation emissions report ## Transportation emissions report
Report showing emissions for each trip between initial locations and plants, and between pairs of plants. Report showing emissions for each trip between initial locations and plants, and between pairs of plants. Generated by `RELOG.write_transportation_emissions_report(solution, filename)`.
Generated by `RELOG.write_transportation_emissions_report(solution, filename)`. For a concrete example, see [nimh_transportation_emissions.csv](https://github.com/ANL-CEEESA/RELOG/blob/master/test/fixtures/nimh_transportation_emissions.csv).
| Column | Description | Column | Description
|:--------------------------------------|---------------| |:--------------------------------------|:---------------|
| `source type` | If product is being shipped from an initial location, equals `Origin`. If product is being shipped from a plant, equals plant type. | `source type` | If product is being shipped from an initial location, equals `Origin`. If product is being shipped from a plant, equals plant type.
| `source location name` | Name of the location where the product is being shipped from. | `source location name` | Name of the location where the product is being shipped from.
| `source latitude (deg)` | Latitude of the source location. | `source latitude (deg)` | Latitude of the source location.
@@ -270,4 +287,6 @@ sns.barplot(x="emission type",
.reset_index()); .reset_index());
``` ```
<img src="../images/ex_transportation_emissions.png" width="500px"/> ```@raw html
<img src="../assets/ex_transportation_emissions.png" width="500px"/>
```

View File

@@ -3,22 +3,11 @@ Usage
## 1. Installation ## 1. Installation
To use RELOG, the first step is to install the [Julia programming language](https://julialang.org/) on your machine. Note that RELOG was developed and tested with Julia 1.5 and may not be compatible with newer versions. After Julia is installed, launch the Julia console, type `]` to switch to package manger mode, then run: To use RELOG, the first step is to install the [Julia programming language](https://julialang.org/) on your machine. Note that RELOG was developed and tested with Julia 1.8 and may not be compatible with newer versions. After Julia is installed, launch the Julia console, then run:
```text ```julia
(@v1.5) pkg> add https://github.com/ANL-CEEESA/RELOG.git using Pkg
``` Pkg.add(name="RELOG", version="0.6")
After the package and all its dependencies have been installed, please run the RELOG test suite, as shown below, to make sure that the package has been correctly installed:
```text
(@v1.5) pkg> test RELOG
```
To update the package to a newer version, type `]` to enter the package manager mode, then run:
```text
(@v1.5) pkg> update RELOG
``` ```
## 2. Modeling the problem ## 2. Modeling the problem
@@ -66,25 +55,65 @@ RELOG.write_transportation_report(solution, "transportation.csv")
For a complete description of the file formats above, and for a complete list of available reports, see the [data format page](format.md). For a complete description of the file formats above, and for a complete list of available reports, see the [data format page](format.md).
## 4. Advanced options ## 4. What-If Analysis
### 4.1 Changing the solver Fundamentally, RELOG decides when and where to build plants based on a deterministic optimization problem that minimizes costs for a particular input file provided by the user. In practical situations, it may not be possible to perfectly estimate some (or most) entries in this input file in advance, such as costs, demands and emissions. In this situation, it may be interesting to evaluate how well does the facility location plan produced by RELOG work if costs, demands and emissions turn out to be different.
By default, RELOG internally uses [Cbc](https://github.com/coin-or/Cbc), an open-source and freely-available Mixed-Integer Linear Programming solver developed by the [COIN-OR Project](https://www.coin-or.org/). For larger-scale test cases, a commercial solver such as Gurobi, CPLEX or XPRESS is recommended. The following snippet shows how to switch from Cbc to Gurobi, for example: To simplify this what-if analysis, RELOG provides the `resolve` method, which updates a previous solution based on a new scenario, but keeps some of the previous decisions fixed. More precisely, given an optimal solution produced by RELOG and a new input file describing the new scenario, the `resolve` method reoptimizes the supply chain and produces a new solution which still builds the same set of plants as before, in exactly the same locations and with the same capacities, but that may now utilize the plants differently, based on the new data. For example, in the new solution, plants that were previously used at full capacity may now be utilized at half-capacity instead. As another example, regions that were previously served by a certain plant may now be served by a different one.
The following snippet shows how to use the method:
```julia
# Import package
using RELOG
# Optimize for the average scenario
solution_avg, model_avg = RELOG.solve("input_avg.json", return_model=true)
# Write reports for the average scenario
RELOG.write_plants_report(solution_avg, "plants_avg.csv")
RELOG.write_transportation_report(solution_avg, "transportation_avg.csv")
# Re-optimize for the high-demand scenario, keeping plants fixed
solution_high = RELOG.resolve(model_avg, "input_high.json")
# Write reports for the high-demand scenario
RELOG.write_plants_report(solution_high, "plants_high.csv")
RELOG.write_transportation_report(solution_high, "transportation_high.csv")
```
To use the `resolve` method, the new input file should be very similar to the original one. Only the following entries are allowed to change:
- **Products:** Transportation costs, energy, emissions and initial amounts (latitude, longitude and amount).
- **Plants:** Energy and emissions.
- **Plant's location:** Latitude and longitude.
- **Plant's storage:** Cost.
- **Plant's capacity:** Opening cost, fixed operating cost and variable operating cost.
## 5. Advanced options
### 5.1 Changing the solver
By default, RELOG internally uses [HiGHS](https://github.com/ERGO-Code/HiGHS), an open-source and freely-available Mixed-Integer Linear Programming solver. For larger-scale test cases, a commercial solver such as Gurobi, CPLEX or XPRESS is recommended. The following snippet shows how to switch to Gurobi, for example:
```julia ```julia
using RELOG, Gurobi, JuMP using RELOG, Gurobi, JuMP
gurobi = optimizer_with_attributes(Gurobi.Optimizer, gurobi = optimizer_with_attributes(
"TimeLimit" => 3600, Gurobi.Optimizer,
"MIPGap" => 0.001) "TimeLimit" => 3600,
"MIPGap" => 0.001,
)
RELOG.solve("instance.json", RELOG.solve(
output="solution.json", "instance.json",
optimizer=gurobi) output="solution.json",
optimizer=gurobi,
)
``` ```
### 4.2 Multi-period heuristics ### 5.2 Multi-period heuristics
For large-scale instances, it may be too time-consuming to find an exact optimal solution to the multi-period version of the problem. For these situations, RELOG includes a heuristic solution method, which proceeds as follows: For large-scale instances, it may be too time-consuming to find an exact optimal solution to the multi-period version of the problem. For these situations, RELOG includes a heuristic solution method, which proceeds as follows:
@@ -97,6 +126,8 @@ To solve an instance using this heuristic, use the option `heuristic=true`, as s
```julia ```julia
using RELOG using RELOG
solution = RELOG.solve("/home/user/instance.json", solution = RELOG.solve(
heuristic=true) "/home/user/instance.json",
heuristic=true,
)
``` ```

View File

@@ -1,202 +0,0 @@
{
"parameters": {
"time horizon (years)": 2
},
"products": {
"P1": {
"transportation cost ($/km/tonne)": [0.015, 0.015],
"transportation energy (J/km/tonne)": [0.12, 0.11],
"transportation emissions (tonne/km/tonne)": {
"CO2": [0.052, 0.050],
"CH4": [0.003, 0.002]
},
"initial amounts": {
"C1": {
"latitude (deg)": 7.0,
"longitude (deg)": 7.0,
"amount (tonne)": [934.56, 934.56]
},
"C2": {
"latitude (deg)": 7.0,
"longitude (deg)": 19.0,
"amount (tonne)": [198.95, 198.95]
},
"C3": {
"latitude (deg)": 84.0,
"longitude (deg)": 76.0,
"amount (tonne)": [212.97, 212.97]
},
"C4": {
"latitude (deg)": 21.0,
"longitude (deg)": 16.0,
"amount (tonne)": [352.19, 352.19]
},
"C5": {
"latitude (deg)": 32.0,
"longitude (deg)": 92.0,
"amount (tonne)": [510.33, 510.33]
},
"C6": {
"latitude (deg)": 14.0,
"longitude (deg)": 62.0,
"amount (tonne)": [471.66, 471.66]
},
"C7": {
"latitude (deg)": 30.0,
"longitude (deg)": 83.0,
"amount (tonne)": [785.21, 785.21]
},
"C8": {
"latitude (deg)": 35.0,
"longitude (deg)": 40.0,
"amount (tonne)": [706.17, 706.17]
},
"C9": {
"latitude (deg)": 74.0,
"longitude (deg)": 52.0,
"amount (tonne)": [30.08, 30.08]
},
"C10": {
"latitude (deg)": 22.0,
"longitude (deg)": 54.0,
"amount (tonne)": [536.52, 536.52]
}
}
},
"P2": {
"transportation cost ($/km/tonne)": [0.02, 0.02]
},
"P3": {
"transportation cost ($/km/tonne)": [0.0125, 0.0125]
},
"P4": {
"transportation cost ($/km/tonne)": [0.0175, 0.0175]
}
},
"plants": {
"F1": {
"input": "P1",
"outputs (tonne/tonne)": {
"P2": 0.2,
"P3": 0.5
},
"energy (GJ/tonne)": [0.12, 0.11],
"emissions (tonne/tonne)": {
"CO2": [0.052, 0.050],
"CH4": [0.003, 0.002]
},
"locations": {
"L1": {
"latitude (deg)": 0.0,
"longitude (deg)": 0.0,
"disposal": {
"P2": {
"cost ($/tonne)": [-10.0, -10.0],
"limit (tonne)": [1.0, 1.0]
},
"P3": {
"cost ($/tonne)": [-10.0, -10.0],
"limit (tonne)": [1.0, 1.0]
}
},
"capacities (tonne)": {
"250.0": {
"opening cost ($)": [500.0, 500.0],
"fixed operating cost ($)": [30.0, 30.0],
"variable operating cost ($/tonne)": [30.0, 30.0]
},
"1000.0": {
"opening cost ($)": [1250.0, 1250.0],
"fixed operating cost ($)": [30.0, 30.0],
"variable operating cost ($/tonne)": [30.0, 30.0]
}
}
},
"L2": {
"latitude (deg)": 0.5,
"longitude (deg)": 0.5,
"capacities (tonne)": {
"0.0": {
"opening cost ($)": [1000, 1000],
"fixed operating cost ($)": [50.0, 50.0],
"variable operating cost ($/tonne)": [50.0, 50.0]
},
"10000.0": {
"opening cost ($)": [10000, 10000],
"fixed operating cost ($)": [50.0, 50.0],
"variable operating cost ($/tonne)": [50.0, 50.0]
}
}
}
}
},
"F2": {
"input": "P2",
"outputs (tonne/tonne)": {
"P3": 0.05,
"P4": 0.80
},
"locations": {
"L3": {
"latitude (deg)": 25.0,
"longitude (deg)": 65.0,
"disposal": {
"P3": {
"cost ($/tonne)": [100.0, 100.0]
}
},
"capacities (tonne)": {
"1000.0": {
"opening cost ($)": [3000, 3000],
"fixed operating cost ($)": [50.0, 50.0],
"variable operating cost ($/tonne)": [50.0, 50.0]
}
}
},
"L4": {
"latitude (deg)": 0.75,
"longitude (deg)": 0.20,
"capacities (tonne)": {
"10000": {
"opening cost ($)": [3000, 3000],
"fixed operating cost ($)": [50.0, 50.0],
"variable operating cost ($/tonne)": [50.0, 50.0]
}
}
}
}
},
"F3": {
"input": "P4",
"locations": {
"L5": {
"latitude (deg)": 100.0,
"longitude (deg)": 100.0,
"capacities (tonne)": {
"15000": {
"opening cost ($)": [0.0, 0.0],
"fixed operating cost ($)": [0.0, 0.0],
"variable operating cost ($/tonne)": [-15.0, -15.0]
}
}
}
}
},
"F4": {
"input": "P3",
"locations": {
"L6": {
"latitude (deg)": 50.0,
"longitude (deg)": 50.0,
"capacities (tonne)": {
"10000": {
"opening cost ($)": [0.0, 0.0],
"fixed operating cost ($)": [0.0, 0.0],
"variable operating cost ($/tonne)": [-15.0, -15.0]
}
}
}
}
}
}
}

View File

@@ -1,23 +0,0 @@
site_name: RELOG
theme: cinder
copyright: "Copyright © 2020, UChicago Argonne, LLC. All Rights Reserved."
repo_url: https://github.com/ANL-CEEESA/RELOG
edit_uri: edit/master/src/docs/
nav:
- Home: index.md
- Usage: usage.md
- Data Format: format.md
- Reports: reports.md
- Optimization Model: model.md
plugins:
- search
markdown_extensions:
- admonition
- mdx_math
extra_javascript:
- https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-AMS-MML_HTMLorMML
- js/mathjax.js
docs_dir: src/docs
site_dir: docs
extra_css:
- "css/custom.css"

23
relog-web/.gitignore vendored Normal file
View File

@@ -0,0 +1,23 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# production
/build
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*

70
relog-web/README.md Normal file
View File

@@ -0,0 +1,70 @@
# Getting Started with Create React App
This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
## Available Scripts
In the project directory, you can run:
### `npm start`
Runs the app in the development mode.\
Open [http://localhost:3000](http://localhost:3000) to view it in your browser.
The page will reload when you make changes.\
You may also see any lint errors in the console.
### `npm test`
Launches the test runner in the interactive watch mode.\
See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information.
### `npm run build`
Builds the app for production to the `build` folder.\
It correctly bundles React in production mode and optimizes the build for the best performance.
The build is minified and the filenames include the hashes.\
Your app is ready to be deployed!
See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information.
### `npm run eject`
**Note: this is a one-way operation. Once you `eject`, you can't go back!**
If you aren't satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project.
Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you're on your own.
You don't have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn't feel obligated to use this feature. However we understand that this tool wouldn't be useful if you couldn't customize it when you are ready for it.
## Learn More
You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
To learn React, check out the [React documentation](https://reactjs.org/).
### Code Splitting
This section has moved here: [https://facebook.github.io/create-react-app/docs/code-splitting](https://facebook.github.io/create-react-app/docs/code-splitting)
### Analyzing the Bundle Size
This section has moved here: [https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size](https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size)
### Making a Progressive Web App
This section has moved here: [https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app](https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app)
### Advanced Configuration
This section has moved here: [https://facebook.github.io/create-react-app/docs/advanced-configuration](https://facebook.github.io/create-react-app/docs/advanced-configuration)
### Deployment
This section has moved here: [https://facebook.github.io/create-react-app/docs/deployment](https://facebook.github.io/create-react-app/docs/deployment)
### `npm run build` fails to minify
This section has moved here: [https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify](https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify)

29110
relog-web/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

52
relog-web/package.json Normal file
View File

@@ -0,0 +1,52 @@
{
"name": "relog-web",
"version": "0.1.0",
"private": true,
"homepage": "/",
"jest": {
"moduleNameMapper": {
"d3": "<rootDir>/node_modules/d3/dist/d3.min.js"
}
},
"dependencies": {
"@testing-library/jest-dom": "^5.16.2",
"@testing-library/react": "^12.1.4",
"@testing-library/user-event": "^13.5.0",
"ajv": "^8.11.0",
"d3": "^5.16.0",
"d3-array": "^2.12.1",
"dagre": "^0.8.5",
"idb": "^6.1.5",
"leaflet": "^1.8.0",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"react-flow-renderer": "^9.7.4",
"react-router-dom": "^5.3.3",
"react-scripts": "5.0.0",
"web-vitals": "^2.1.4"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
]
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
}
}

View File

@@ -0,0 +1,13 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>RELOG</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
</body>
</html>

View File

@@ -0,0 +1,417 @@
import { openDB } from "idb";
import React, { useEffect, useRef, useState } from "react";
import Button from "../common/Button";
import Footer from "../common/Footer";
import Header from "../common/Header";
import "../index.css";
import { generateFile } from "./csv";
import { defaultData, defaultPlant, defaultProduct } from "./defaults";
import { exportData, importData } from "./export";
import ParametersBlock from "./ParametersBlock";
import PipelineBlock, { randomPosition } from "./PipelineBlock";
import PlantBlock from "./PlantBlock";
import ProductBlock from "./ProductBlock";
import { validate } from "./validate";
import { useHistory } from "react-router-dom";
import { SERVER_URL } from "..";
const setDefaults = (actualDict, defaultDict) => {
for (const [key, defaultValue] of Object.entries(defaultDict)) {
if (!(key in actualDict)) {
if (typeof defaultValue === "object") {
actualDict[key] = { ...defaultValue };
} else {
actualDict[key] = defaultValue;
}
}
}
};
const cleanDict = (dict, defaultDict) => {
for (const key of Object.keys(dict)) {
if (!(key in defaultDict)) {
delete dict[key];
}
}
};
const fixLists = (dict, blacklist, stringify) => {
for (const [key, val] of Object.entries(dict)) {
if (blacklist.includes(key)) continue;
if (Array.isArray(val)) {
// Replace constant lists by a single number
let isConstant = true;
for (let i = 1; i < val.length; i++) {
if (val[i - 1] !== val[i]) {
isConstant = false;
break;
}
}
if (isConstant) dict[key] = val[0];
// Convert lists to JSON strings
if (stringify) dict[key] = JSON.stringify(dict[key]);
}
if (typeof val === "object") {
fixLists(val, blacklist, stringify);
}
}
};
const openRelogDB = async () => {
const dbPromise = await openDB("RELOG", 1, {
upgrade(db) {
db.createObjectStore("casebuilder");
},
});
return dbPromise;
};
const InputPage = () => {
const fileElem = useRef();
let [data, setData] = useState(defaultData);
let [messages, setMessages] = useState([]);
let [processing, setProcessing] = useState(false);
const save = async (data) => {
const db = await openRelogDB();
await db.put("casebuilder", data, "data");
};
useEffect(async () => {
const db = await openRelogDB();
const data = await db.get("casebuilder", "data");
if (data) setData(data);
}, []);
const history = useHistory();
const promptName = (prevData) => {
const name = prompt("Name");
if (!name || name.length === 0) return;
if (name in prevData.products || name in prevData.plants) return;
return name;
};
const onAddPlant = () => {
setData((prevData) => {
const name = promptName(prevData);
if (name === undefined) return prevData;
const newData = { ...prevData };
const [x, y] = randomPosition();
newData.plants[name] = {
...defaultPlant,
x: x,
y: y,
};
save(newData);
return newData;
});
};
const onAddProduct = () => {
setData((prevData) => {
const name = promptName(prevData);
if (name === undefined) return prevData;
const newData = { ...prevData };
const [x, y] = randomPosition();
console.log(x, y);
newData.products[name] = {
...defaultProduct,
x: x,
y: y,
};
save(newData);
return newData;
});
};
const onRenamePlant = (prevName, newName) => {
setData((prevData) => {
const newData = { ...prevData };
newData.plants[newName] = newData.plants[prevName];
delete newData.plants[prevName];
save(newData);
return newData;
});
};
const onRenameProduct = (prevName, newName) => {
setData((prevData) => {
const newData = { ...prevData };
newData.products[newName] = newData.products[prevName];
delete newData.products[prevName];
for (const [, plant] of Object.entries(newData.plants)) {
if (plant.input === prevName) {
plant.input = newName;
}
let outputFound = false;
for (const [outputName] of Object.entries(
plant["outputs (tonne/tonne)"]
)) {
if (outputName === prevName) outputFound = true;
}
if (outputFound) {
plant["outputs (tonne/tonne)"][newName] =
plant["outputs (tonne/tonne)"][prevName];
delete plant["outputs (tonne/tonne)"][prevName];
}
}
save(newData);
return newData;
});
};
const onMovePlant = (plantName, x, y) => {
setData((prevData) => {
const newData = { ...prevData };
newData.plants[plantName].x = x;
newData.plants[plantName].y = y;
save(newData);
return newData;
});
};
const onMoveProduct = (productName, x, y) => {
setData((prevData) => {
const newData = { ...prevData };
newData.products[productName].x = x;
newData.products[productName].y = y;
save(newData);
return newData;
});
};
const onRemovePlant = (plantName) => {
setData((prevData) => {
const newData = { ...prevData };
delete newData.plants[plantName];
save(newData);
return newData;
});
};
const onRemoveProduct = (productName) => {
setData((prevData) => {
const newData = { ...prevData };
delete newData.products[productName];
for (const [, plant] of Object.entries(newData.plants)) {
if (plant.input === productName) {
delete plant.input;
}
let outputFound = false;
for (const [outputName] of Object.entries(
plant["outputs (tonne/tonne)"]
)) {
if (outputName === productName) outputFound = true;
}
if (outputFound) {
delete plant["outputs (tonne/tonne)"][productName];
}
}
save(newData);
return newData;
});
};
const onSetPlantInput = (plantName, productName) => {
setData((prevData) => {
const newData = { ...prevData };
newData.plants[plantName].input = productName;
save(newData);
return newData;
});
};
const onAddPlantOutput = (plantName, productName) => {
setData((prevData) => {
if (productName in prevData.plants[plantName]["outputs (tonne/tonne)"]) {
return prevData;
}
const newData = { ...prevData };
[
"outputs (tonne/tonne)",
"disposal cost ($/tonne)",
"disposal limit (tonne)",
].forEach((key) => {
newData.plants[plantName][key] = { ...newData.plants[plantName][key] };
newData.plants[plantName][key][productName] = 0;
});
save(newData);
return newData;
});
};
const onSave = () => {
const exported = exportData(data);
const valid = validate(exported);
console.log(exported);
console.log(validate.errors);
if (valid) {
generateFile("case.json", JSON.stringify(exported, null, 2));
} else {
setMessages([
...messages,
"Data has validation errors and could not be saved.",
]);
}
};
const onClear = () => {
const newData = JSON.parse(JSON.stringify(defaultData));
setData(newData);
save(newData);
};
const onLoad = (contents) => {
const parsed = JSON.parse(contents);
const valid = validate(parsed);
if (valid) {
const newData = importData(parsed);
setData(newData);
save(newData);
} else {
console.log(validate.errors);
setMessages([...messages, "File is corrupted and could not be loaded."]);
}
};
const onDismissMessage = (idx) => {
setMessages([...messages.slice(0, idx), ...messages.slice(idx + 1)]);
};
const onChange = (val, field1, field2) => {
setData((prevData) => {
const newData = { ...prevData };
if (field2 !== undefined) {
newData[field1][field2] = val;
} else {
newData[field1] = val;
}
save(newData);
return newData;
});
};
let productComps = [];
for (const [prodName, prod] of Object.entries(data.products)) {
productComps.push(
<ProductBlock
key={prodName}
name={prodName}
value={prod}
onChange={(v) => onChange(v, "products", prodName, v)}
/>
);
}
const onSubmit = async () => {
const exported = exportData(data);
const valid = validate(exported);
if (valid) {
setProcessing(true);
try {
const response = await fetch(`${SERVER_URL}/submit`, {
method: "POST",
body: JSON.stringify(exported),
});
if (response.ok) {
const data = await response.json();
history.push(`solver/${data.job_id}`);
} else {
throw "Error";
}
} catch {
setMessages([
...messages,
"Failed to submit job. Please try again later.",
]);
} finally {
setProcessing(false);
}
}
};
let plantComps = [];
for (const [plantName, plant] of Object.entries(data.plants)) {
plantComps.push(
<PlantBlock
key={plantName}
name={plantName}
value={plant}
onChange={(v) => onChange(v, "plants", plantName)}
/>
);
}
let messageComps = [];
for (let i = 0; i < messages.length; i++) {
messageComps.push(
<div className="message error" key={i}>
<p>{messages[i]}</p>
<Button label="Dismiss" onClick={() => onDismissMessage(i)} />
</div>
);
}
const onFileSelected = () => {
const file = fileElem.current.files[0];
if (file) {
const reader = new FileReader();
reader.addEventListener("load", () => {
onLoad(reader.result);
});
reader.readAsText(file);
}
fileElem.current.value = "";
};
return (
<>
<Header title="Case Builder">
<Button label="Clear" disabled={processing} onClick={onClear} />
<Button
label="Load"
disabled={processing}
onClick={(e) => fileElem.current.click()}
/>
<Button label="Save" disabled={processing} onClick={onSave} />
<Button label="Submit" disabled={processing} onClick={onSubmit} />
<input
type="file"
ref={fileElem}
accept=".json"
style={{ display: "none" }}
onChange={onFileSelected}
/>
</Header>
<div id="contentBackground">
<div id="content">
<PipelineBlock
onAddPlant={onAddPlant}
onAddPlantOutput={onAddPlantOutput}
onAddProduct={onAddProduct}
onMovePlant={onMovePlant}
onMoveProduct={onMoveProduct}
onRenamePlant={onRenamePlant}
onRenameProduct={onRenameProduct}
onSetPlantInput={onSetPlantInput}
onRemovePlant={onRemovePlant}
onRemoveProduct={onRemoveProduct}
plants={data.plants}
products={data.products}
/>
<ParametersBlock
value={data.parameters}
onChange={(v) => onChange(v, "parameters")}
/>
{productComps}
{plantComps}
</div>
</div>
<div id="messageTray">{messageComps}</div>
<Footer />
</>
);
};
export default InputPage;

View File

@@ -0,0 +1,46 @@
import Section from "../common/Section";
import Card from "../common/Card";
import Form from "../common/Form";
import TextInputRow from "../common/TextInputRow";
const ParametersBlock = (props) => {
const onChangeField = (field, val) => {
props.value[field] = val;
props.onChange(props.value);
};
return (
<>
<Section title="Parameters" />
<Card>
<Form>
<TextInputRow
label="Time horizon"
unit="years"
tooltip="Number of years in the simulation."
value={props.value["time horizon (years)"]}
onChange={(v) => onChangeField("time horizon (years)", v)}
validate="int"
/>
<TextInputRow
label="Building period"
unit="years"
tooltip="List of years in which we are allowed to open new plants. For example, if this parameter is set to [1,2,3], we can only open plants during the first three years. By default, this equals [1]; that is, plants can only be opened during the first year."
value={props.value["building period (years)"]}
onChange={(v) => onChangeField("building period (years)", v)}
validate="intList"
/>
<TextInputRow
label="Inflation rate"
unit="%"
tooltip="Rate at which costs change from one time period to the next. This is applied uniformly to all costs."
value={props.value["inflation rate (%)"]}
onChange={(v) => onChangeField("inflation rate (%)", v)}
validate="float"
/>
</Form>
</Card>
</>
);
};
export default ParametersBlock;

View File

@@ -0,0 +1,200 @@
import React, { useEffect } from "react";
import ReactFlow, { Background, isNode, Controls } from "react-flow-renderer";
import Section from "../common/Section";
import Card from "../common/Card";
import Button from "../common/Button";
import styles from "./PipelineBlock.module.css";
import dagre from "dagre";
window.nextX = 15;
window.nextY = 15;
export const randomPosition = () => {
window.nextY += 60;
if (window.nextY >= 500) {
window.nextY = 15;
window.nextX += 150;
}
return [window.nextX, window.nextY];
};
const getLayoutedElements = (elements) => {
const nodeWidth = 125;
const nodeHeight = 45;
const dagreGraph = new dagre.graphlib.Graph();
dagreGraph.setDefaultEdgeLabel(() => ({}));
dagreGraph.setGraph({ rankdir: "LR" });
elements.forEach((el) => {
if (isNode(el)) {
dagreGraph.setNode(el.id, { width: nodeWidth, height: nodeHeight });
} else {
dagreGraph.setEdge(el.source, el.target);
}
});
dagre.layout(dagreGraph);
return elements.map((el) => {
if (isNode(el)) {
const n = dagreGraph.node(el.id);
el.position = {
x: 15 + n.x - nodeWidth / 2,
y: 15 + n.y - nodeHeight / 2,
};
}
return el;
});
};
const PipelineBlock = (props) => {
let elements = [];
let mapNameToType = {};
let hasNullPositions = false;
for (const [productName, product] of Object.entries(props.products)) {
if (!product.x || !product.y) hasNullPositions = true;
mapNameToType[productName] = "product";
elements.push({
id: productName,
data: { label: productName, type: "product" },
position: { x: product.x, y: product.y },
sourcePosition: "right",
targetPosition: "left",
className: styles.ProductNode,
});
}
for (const [plantName, plant] of Object.entries(props.plants)) {
if (!plant.x || !plant.y) hasNullPositions = true;
mapNameToType[plantName] = "plant";
elements.push({
id: plantName,
data: { label: plantName, type: "plant" },
position: { x: plant.x, y: plant.y },
sourcePosition: "right",
targetPosition: "left",
className: styles.PlantNode,
});
if (plant.input !== undefined) {
elements.push({
id: `${plant.input}-${plantName}`,
source: plant.input,
target: plantName,
animated: true,
style: { stroke: "black" },
selectable: false,
});
}
for (const [productName] of Object.entries(
plant["outputs (tonne/tonne)"]
)) {
elements.push({
id: `${plantName}-${productName}`,
source: plantName,
target: productName,
animated: true,
style: { stroke: "black" },
selectable: false,
});
}
}
const onNodeDoubleClick = (ev, node) => {
const oldName = node.data.label;
const newName = window.prompt("Enter new name", oldName);
if (newName === undefined || newName.length === 0) return;
if (newName in mapNameToType) return;
if (node.data.type === "plant") {
props.onRenamePlant(oldName, newName);
} else {
props.onRenameProduct(oldName, newName);
}
};
const onElementsRemove = (elements) => {
elements.forEach((el) => {
if (!(el.id in mapNameToType)) return;
if (el.data.type === "plant") {
props.onRemovePlant(el.data.label);
} else {
props.onRemoveProduct(el.data.label);
}
});
};
const onNodeDragStop = (ev, node) => {
if (node.data.type === "plant") {
props.onMovePlant(node.data.label, node.position.x, node.position.y);
} else {
props.onMoveProduct(node.data.label, node.position.x, node.position.y);
}
};
const onConnect = (args) => {
const sourceType = mapNameToType[args.source];
const targetType = mapNameToType[args.target];
if (sourceType === "product" && targetType === "plant") {
props.onSetPlantInput(args.target, args.source);
} else if (sourceType === "plant" && targetType === "product") {
props.onAddPlantOutput(args.source, args.target);
}
};
const onLayout = () => {
const layoutedElements = getLayoutedElements(elements);
layoutedElements.forEach((el) => {
if (isNode(el)) {
if (el.data.type === "plant") {
props.onMovePlant(el.data.label, el.position.x, el.position.y);
} else {
props.onMoveProduct(el.data.label, el.position.x, el.position.y);
}
}
});
};
useEffect(() => {
if (hasNullPositions) onLayout();
}, [hasNullPositions]);
return (
<>
<Section title="Pipeline" />
<Card>
<div className={styles.PipelineBlock}>
<ReactFlow
elements={elements}
onNodeDoubleClick={onNodeDoubleClick}
onNodeDragStop={onNodeDragStop}
onConnect={onConnect}
onElementsRemove={onElementsRemove}
deleteKeyCode={46}
maxZoom={1.25}
minZoom={0.5}
snapToGrid={true}
preventScrolling={false}
>
<Background />
<Controls showInteractive={false} />
</ReactFlow>
</div>
<div style={{ textAlign: "center" }}>
<Button
label="Add product"
kind="inline"
onClick={props.onAddProduct}
/>
<Button label="Add plant" kind="inline" onClick={props.onAddPlant} />
<Button label="Auto Layout" kind="inline" onClick={onLayout} />
<Button
label="?"
kind="inline"
tooltip="Drag from one connector to another to create links between products and plants. Double click to rename an element. Click an element to select and move it. Press the [Delete] key to remove it."
/>
</div>
</Card>
</>
);
};
export default PipelineBlock;

View File

@@ -0,0 +1,25 @@
.PipelineBlock {
height: 800px !important;
border: 1px solid rgba(0, 0, 0, 0.1) !important;
border-radius: var(--border-radius) !important;
margin-bottom: 12px !important;
}
.PlantNode,
.ProductNode {
border-color: rgba(0, 0, 0, 0.8) !important;
color: black !important;
font-size: 13px !important;
border-width: 1px !important;
border-radius: 6px !important;
box-shadow: 0px 2px 4px -3px black !important;
width: 100px !important;
}
.PlantNode {
background-color: #8d8 !important;
}
.ProductNode {
background-color: #e6e6e6 !important;
}

View File

@@ -0,0 +1,271 @@
import Section from "../common/Section";
import Card from "../common/Card";
import Form from "../common/Form";
import TextInputRow from "../common/TextInputRow";
import FileInputRow from "../common/FileInputRow";
import DictInputRow from "../common/DictInputRow";
import { csvFormat, csvParse, generateFile } from "./csv";
const PlantBlock = (props) => {
const onChange = (val, field1, field2, field3) => {
const newPlant = { ...props.value };
if (field3 !== undefined) {
newPlant[field1][field2][field3] = val;
} else if (field2 !== undefined) {
newPlant[field1][field2] = val;
} else {
newPlant[field1] = val;
}
props.onChange(newPlant);
};
const onCandidateLocationsTemplate = () => {
generateFile(
"Candidate locations - Template.csv",
csvFormat([
{
name: "Washakie County",
"latitude (deg)": "43.8356",
"longitude (deg)": "-107.6602",
"area cost factor": "0.88",
},
{
name: "Platte County",
"latitude (deg)": "42.1314",
"longitude (deg)": "-104.9676",
"area cost factor": "1.29",
},
{
name: "Park County",
"latitude (deg)": "44.4063",
"longitude (deg)": "-109.4153",
"area cost factor": "0.99",
},
{
name: "Goshen County",
"latitude (deg)": "42.0853",
"longitude (deg)": "-104.3534",
"area cost factor": "1",
},
])
);
};
const onCandidateLocationsFile = (contents) => {
const data = csvParse({
contents: contents,
requiredCols: [
"name",
"latitude (deg)",
"longitude (deg)",
"area cost factor",
],
});
const result = {};
data.forEach((el) => {
result[el["name"]] = {
"latitude (deg)": el["latitude (deg)"],
"longitude (deg)": el["longitude (deg)"],
"area cost factor": el["area cost factor"],
};
});
onChange(result, "locations");
};
const onCandidateLocationsDownload = () => {
const result = [];
for (const [locationName, locationDict] of Object.entries(
props.value["locations"]
)) {
result.push({
name: locationName,
"latitude (deg)": locationDict["latitude (deg)"],
"longitude (deg)": locationDict["longitude (deg)"],
"area cost factor": locationDict["area cost factor"],
});
}
generateFile(`Candidate locations - ${props.name}.csv`, csvFormat(result));
};
const onCandidateLocationsClear = () => {
onChange({}, "locations");
};
let description = "No locations set";
const nCenters = Object.keys(props.value["locations"]).length;
if (nCenters > 0) description = `${nCenters} locations`;
const shouldDisableMaxCap =
props.value["minimum capacity (tonne)"] ===
props.value["maximum capacity (tonne)"];
return (
<>
<Section title={props.name} />
<Card>
<Form>
<h1>General information</h1>
<FileInputRow
label="Candidate locations"
tooltip="A table describing potential locations where plants can be built and their characteristics."
onTemplate={onCandidateLocationsTemplate}
onFile={onCandidateLocationsFile}
onDownload={onCandidateLocationsDownload}
onClear={onCandidateLocationsClear}
value={description}
/>
<h1>Inputs & Outputs</h1>
<TextInputRow
label="Input"
tooltip="The name of the product that this plant takes as input."
disabled="disabled"
value={props.value["input"]}
/>
<DictInputRow
label="Outputs"
unit="tonne/tonne"
tooltip="A dictionary specifying how many tonnes of each product is produced for each tonne of input."
value={props.value["outputs (tonne/tonne)"]}
onChange={(v) => onChange(v, "outputs (tonne/tonne)")}
disableKeys={true}
validate="float"
/>
<h1>Capacity & Costs</h1>
<TextInputRow
label="Minimum capacity"
unit="tonne"
tooltip="The minimum size of the plant."
value={props.value["minimum capacity (tonne)"]}
onChange={(v) => onChange(v, "minimum capacity (tonne)")}
validate="float"
/>
<TextInputRow
label="Opening cost (min capacity)"
unit="$"
tooltip="The cost to open the plant at minimum capacity."
value={props.value["opening cost (min capacity) ($)"]}
onChange={(v) => onChange(v, "opening cost (min capacity) ($)")}
validate="float"
/>
<TextInputRow
label="Fixed operating cost (min capacity)"
unit="$"
tooltip="The cost to keep the plant open, even if the plant doesn't process anything."
value={props.value["fixed operating cost (min capacity) ($)"]}
onChange={(v) =>
onChange(v, "fixed operating cost (min capacity) ($)")
}
validate="float"
/>
<TextInputRow
label="Maximum capacity"
unit="tonne"
tooltip="The maximum size of the plant."
value={props.value["maximum capacity (tonne)"]}
onChange={(v) => onChange(v, "maximum capacity (tonne)")}
validate="float"
/>
<TextInputRow
label="Opening cost (max capacity)"
unit="$"
tooltip="The cost to open a plant of this size."
value={
shouldDisableMaxCap
? ""
: props.value["opening cost (max capacity) ($)"]
}
onChange={(v) => onChange(v, "opening cost (max capacity) ($)")}
validate="float"
disabled={shouldDisableMaxCap}
/>
<TextInputRow
label="Fixed operating cost (max capacity)"
unit="$"
tooltip="The cost to keep the plant open, even if the plant doesn't process anything."
value={
shouldDisableMaxCap
? ""
: props.value["fixed operating cost (max capacity) ($)"]
}
onChange={(v) =>
onChange(v, "fixed operating cost (max capacity) ($)")
}
validate="float"
disabled={shouldDisableMaxCap}
/>
<TextInputRow
label="Variable operating cost"
unit="$/tonne"
tooltip="The cost that the plant incurs to process each tonne of input."
value={props.value["variable operating cost ($/tonne)"]}
onChange={(v) => onChange(v, "variable operating cost ($/tonne)")}
validate="float"
/>
<TextInputRow
label="Energy expenditure"
unit="GJ/tonne"
tooltip="The energy required to process one tonne of the input."
value={props.value["energy (GJ/tonne)"]}
onChange={(v) => onChange(v, "energy (GJ/tonne)")}
validate="float"
/>
<h1>Storage</h1>
<TextInputRow
label="Storage cost"
unit="$/tonne"
tooltip="The cost to store a tonne of input product for one time period."
value={props.value["storage"]["cost ($/tonne)"]}
onChange={(v) => onChange(v, "storage", "cost ($/tonne)")}
validate="float"
/>
<TextInputRow
label="Storage limit"
unit="tonne"
tooltip="The maximum amount of input product this plant can have in storage at any given time."
value={props.value["storage"]["limit (tonne)"]}
onChange={(v) => onChange(v, "storage", "limit (tonne)")}
validate="float"
/>
<h1>Disposal</h1>
<DictInputRow
label="Disposal cost"
unit="$/tonne"
tooltip="The cost to dispose of the product."
value={props.value["disposal cost ($/tonne)"]}
onChange={(v) => onChange(v, "disposal cost ($/tonne)")}
disableKeys={true}
validate="float"
/>
<DictInputRow
label="Disposal limit"
unit="tonne"
tooltip="The maximum amount that can be disposed of. If an unlimited amount can be disposed, leave blank."
value={props.value["disposal limit (tonne)"]}
onChange={(v) => onChange(v, "disposal limit (tonne)")}
disableKeys={true}
valuePlaceholder="Unlimited"
validate="float"
/>
<h1>Emissions</h1>
<DictInputRow
label="Emissions"
unit="tonne/tonne"
tooltip="A dictionary mapping the name of each greenhouse gas, produced to process each tonne of input, to the amount of gas produced (in tonne)."
value={props.value["emissions (tonne/tonne)"]}
onChange={(v) => onChange(v, "emissions (tonne/tonne)")}
keyPlaceholder="Emission name"
valuePlaceholder="0"
validate="float"
/>
</Form>
</Card>
</>
);
};
export default PlantBlock;

View File

@@ -0,0 +1,187 @@
import Section from "../common/Section";
import Card from "../common/Card";
import Form from "../common/Form";
import TextInputRow from "../common/TextInputRow";
import FileInputRow from "../common/FileInputRow";
import DictInputRow from "../common/DictInputRow";
import { csvParse, extractNumericColumns, generateFile } from "./csv";
import { csvFormat } from "d3";
const ProductBlock = (props) => {
const onChange = (field, val) => {
const newProduct = { ...props.value };
newProduct[field] = val;
props.onChange(newProduct);
};
const onInitialAmountsFile = (contents) => {
const data = csvParse({
contents: contents,
requiredCols: ["latitude (deg)", "longitude (deg)", "name"],
});
const result = {};
data.forEach((el) => {
result[el["name"]] = {
"latitude (deg)": el["latitude (deg)"],
"longitude (deg)": el["longitude (deg)"],
"amount (tonne)": extractNumericColumns(el, "amount"),
};
});
onChange("initial amounts", result);
};
const onInitialAmountsClear = () => {
onChange("initial amounts", {});
};
const onInitialAmountsTemplate = () => {
generateFile(
"Initial amounts - Template.csv",
csvFormat([
{
name: "Washakie County",
"latitude (deg)": "43.8356",
"longitude (deg)": "-107.6602",
"amount 1": "21902",
"amount 2": "6160",
"amount 3": "2721",
"amount 4": "12917",
"amount 5": "18048",
},
{
name: "Platte County",
"latitude (deg)": "42.1314",
"longitude (deg)": "-104.9676",
"amount 1": "16723",
"amount 2": "8709",
"amount 3": "22584",
"amount 4": "12278",
"amount 5": "7196",
},
{
name: "Park County",
"latitude (deg)": "44.4063",
"longitude (deg)": "-109.4153",
"amount 1": "14731",
"amount 2": "11729",
"amount 3": "15562",
"amount 4": "7703",
"amount 5": "23349",
},
])
);
};
const onInitialAmountsDownload = () => {
const results = [];
for (const [locationName, locationDict] of Object.entries(
props.value["initial amounts"]
)) {
const row = {
name: locationName,
"latitude (deg)": locationDict["latitude (deg)"],
"longitude (deg)": locationDict["longitude (deg)"],
};
locationDict["amount (tonne)"].forEach((el, idx) => {
row[`amount ${idx + 1}`] = el;
});
results.push(row);
}
generateFile(`Initial amounts - ${props.name}.csv`, csvFormat(results));
};
let description = "Not initially available";
let notInitiallyAvailable = true;
const nCenters = Object.keys(props.value["initial amounts"]).length;
if (nCenters > 0) {
description = `${nCenters} collection centers`;
notInitiallyAvailable = false;
}
return (
<>
<Section title={props.name} />
<Card>
<Form>
<h1>General Information</h1>
<FileInputRow
value={description}
label="Initial amounts"
tooltip="A table indicating the amount of this product initially available at each collection center."
accept=".csv"
onFile={onInitialAmountsFile}
onDownload={onInitialAmountsDownload}
onClear={onInitialAmountsClear}
onTemplate={onInitialAmountsTemplate}
disableDownload={notInitiallyAvailable}
disableClear={notInitiallyAvailable}
/>
<h1 style={{ display: nCenters == 0 ? "none" : "block" }}>
Disposal
</h1>
<div style={{ display: nCenters == 0 ? "none" : "block" }}>
<TextInputRow
label="Disposal cost"
unit="$/tonne"
tooltip="The cost to dispose of one tonne of this product at a collection center, without further processing."
value={props.value["disposal cost ($/tonne)"]}
onChange={(v) => onChange("disposal cost ($/tonne)", v)}
validate="floatList"
/>
<TextInputRow
label="Disposal limit"
unit="tonne"
tooltip="The maximum amount (in tonnes) of this product that can be disposed of across all collection centers, without further processing."
value={props.value["disposal limit (tonne)"]}
onChange={(v) => onChange("disposal limit (tonne)", v)}
validate="floatList"
disabled={String(props.value["disposal limit (%)"]).length > 0}
/>
<TextInputRow
label="Disposal limit"
unit="%"
tooltip="The maximum amount of this product that can be disposed of across all collection centers, without further processing, as a percentage of the total amount available."
value={props.value["disposal limit (%)"]}
onChange={(v) => onChange("disposal limit (%)", v)}
validate="floatList"
disabled={props.value["disposal limit (tonne)"].length > 0}
/>
</div>
<h1>Transportation</h1>
<TextInputRow
label="Transportation cost"
unit="$/km/tonne"
tooltip="The cost to transport this product."
value={props.value["transportation cost ($/km/tonne)"]}
onChange={(v) => onChange("transportation cost ($/km/tonne)", v)}
validate="floatList"
/>
<TextInputRow
label="Transportation energy"
unit="J/km/tonne"
tooltip="The energy required to transport this product."
value={props.value["transportation energy (J/km/tonne)"]}
onChange={(v) => onChange("transportation energy (J/km/tonne)", v)}
validate="floatList"
/>
<DictInputRow
label="Transportation emissions"
unit="tonne/km/tonne"
tooltip="A dictionary mapping the name of each greenhouse gas, produced to transport one tonne of this product along one kilometer, to the amount of gas produced."
keyPlaceholder="Emission name"
value={props.value["transportation emissions (tonne/km/tonne)"]}
onChange={(v) =>
onChange("transportation emissions (tonne/km/tonne)", v)
}
validate="floatList"
/>
</Form>
</Card>
</>
);
};
export default ProductBlock;

View File

@@ -0,0 +1,50 @@
import * as d3 from "d3";
export const csvParse = ({ contents, requiredCols }) => {
const data = d3.csvParse(contents, d3.autoType);
requiredCols.forEach((col) => {
if (!(col in data[0])) {
throw Error(`Column "${col}" not found in CSV file.`);
}
});
return data;
};
export const parseCsv = (contents, requiredCols = []) => {
const data = d3.csvParse(contents);
const T = data.columns.length - requiredCols.length;
let isValid = true;
for (let t = 0; t < T; t++) {
requiredCols.push(t + 1);
}
requiredCols.forEach((col) => {
if (!(col in data[0])) {
console.log(`Column "${col}" not found in CSV file.`);
isValid = false;
}
});
if (!isValid) return [undefined, undefined];
return [data, T];
};
export const extractNumericColumns = (obj, prefix) => {
const result = [];
for (let i = 1; `${prefix} ${i}` in obj; i++) {
result.push(obj[`${prefix} ${i}`]);
}
return result;
};
export const csvFormat = (data) => {
return d3.csvFormat(data);
};
export const generateFile = (filename, contents) => {
var link = document.createElement("a");
link.setAttribute("href", URL.createObjectURL(new Blob([contents])));
link.setAttribute("download", filename);
link.style.visibility = "hidden";
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
};

View File

@@ -0,0 +1,53 @@
import { csvParse, extractNumericColumns, csvFormat } from "./csv";
import { exportValue } from "./export";
test("parse CSV", () => {
const contents = "name,location,1,2,3\ntest,illinois,100,200,300";
const actual = csvParse({
contents: contents,
requiredCols: ["name", "location"],
});
expect(actual.length).toEqual(1);
expect(actual[0]).toEqual({
name: "test",
location: "illinois",
1: 100,
2: 200,
3: 300,
});
});
test("parse CSV with missing columns", () => {
const contents = "name,location,1,2,3\ntest,illinois,100,200,300";
expect(() =>
csvParse({
contents: contents,
requiredCols: ["name", "location", "latitude"],
})
).toThrow('Column "latitude" not found in CSV file.');
});
test("extract numeric columns from object", () => {
const obj1 = {
"amount 1": "hello",
"amount 2": "world",
"amount 4": "ignored",
};
const obj2 = { hello: "world" };
expect(extractNumericColumns(obj1, "amount")).toEqual(["hello", "world"]);
expect(extractNumericColumns(obj2, "amount")).toEqual([]);
});
test("generate CSV", () => {
const data = [
{ name: "alice", age: 20 },
{ name: "bob", age: null },
];
expect(csvFormat(data)).toEqual("name,age\nalice,20\nbob,");
});
test("export value", () => {
expect(exportValue("1")).toEqual(1);
expect(exportValue("[1,2,3]")).toEqual([1, 2, 3]);
expect(exportValue("qwe")).toEqual("qwe");
});

View File

@@ -0,0 +1,49 @@
export const defaultProduct = {
"initial amounts": {},
"disposal cost ($/tonne)": "0",
"disposal limit (tonne)": "0",
"disposal limit (%)": "",
"transportation cost ($/km/tonne)": "0",
"transportation energy (J/km/tonne)": "0",
"transportation emissions (tonne/km/tonne)": {},
x: 0,
y: 0,
};
export const defaultPlantLocation = {
"area cost factor": 1.0,
"latitude (deg)": 0,
"longitude (deg)": 0,
};
export const defaultPlant = {
locations: {},
"outputs (tonne/tonne)": {},
"disposal cost ($/tonne)": {},
"disposal limit (tonne)": {},
"emissions (tonne/tonne)": {},
storage: {
"cost ($/tonne)": 0,
"limit (tonne)": 0,
},
"maximum capacity (tonne)": 0,
"minimum capacity (tonne)": 0,
"opening cost (max capacity) ($)": 0,
"opening cost (min capacity) ($)": 0,
"fixed operating cost (max capacity) ($)": 0,
"fixed operating cost (min capacity) ($)": 0,
"variable operating cost ($/tonne)": 0,
"energy (GJ/tonne)": 0,
x: 0,
y: 0,
};
export const defaultData = {
parameters: {
"time horizon (years)": "1",
"building period (years)": "[1]",
"inflation rate (%)": "0",
},
products: {},
plants: {},
};

View File

@@ -0,0 +1,555 @@
const isNumeric = (val) => {
return String(val).length > 0 && !isNaN(val);
};
const keysToList = (obj) => {
const result = [];
for (const key of Object.keys(obj)) {
result.push(key);
}
return result;
};
export const exportValue = (original, T, R = 1) => {
if (isNumeric(original)) {
if (T) {
let v = parseFloat(original);
const result = [];
for (let i = 0; i < T; i++) {
result.push(v);
v *= R;
}
return result;
} else {
return parseFloat(original);
}
}
try {
const parsed = JSON.parse(original);
return parsed;
} catch {
// ignore
}
return original;
};
const exportValueDict = (original, T) => {
const result = {};
for (const [key, val] of Object.entries(original)) {
if (key.length === 0) continue;
result[key] = exportValue(val, T);
}
if (Object.keys(result).length > 0) {
return result;
} else {
return null;
}
};
const computeTotalInitialAmount = (prod) => {
let total = null;
for (const locDict of Object.values(prod["initial amounts"])) {
const locAmount = locDict["amount (tonne)"];
if (!total) total = [...locAmount];
else {
for (let i = 0; i < locAmount.length; i++) {
total[i] += locAmount[i];
}
}
}
return total;
};
export const importList = (args, R = 1) => {
if (args === undefined) return "";
if (Array.isArray(args) && args.length > 0) {
let isConstant = true;
for (let i = 1; i < args.length; i++) {
if (Math.abs(args[i - 1] - args[i] / R) > 1e-3) {
isConstant = false;
break;
}
}
if (isConstant) {
return String(args[0]);
} else {
return JSON.stringify(args);
}
} else {
return args;
}
};
export const importDict = (args) => {
if (!args) return {};
const result = {};
for (const [key, val] of Object.entries(args)) {
result[key] = importList(val);
}
return result;
};
const computeAbsDisposal = (prod) => {
const disposalPerc = prod["disposal limit (%)"];
const total = computeTotalInitialAmount(prod);
const disposalAbs = [];
for (let i = 0; i < total.length; i++) {
disposalAbs[i] = (total[i] * disposalPerc) / 100;
}
return disposalAbs;
};
const computeInflationAndTimeHorizon = (obj, keys) => {
for (let i = 0; i < keys.length; i++) {
const list = obj[keys[i]];
if (
Array.isArray(list) &&
list.length > 1 &&
isNumeric(list[0]) &&
isNumeric(list[1]) &&
Math.abs(list[0]) > 0
) {
return [list[1] / list[0], list.length];
}
}
return [1, 1];
};
export const exportProduct = (original, parameters) => {
const result = {};
// Read time horizon
let T = parameters["time horizon (years)"];
if (isNumeric(T)) T = parseInt(T);
else T = 1;
// Read inflation
let R = parameters["inflation rate (%)"];
if (isNumeric(R)) R = parseFloat(R) / 100 + 1;
else R = 1;
// Copy constant time series
result["initial amounts"] = original["initial amounts"];
["disposal limit (tonne)", "transportation energy (J/km/tonne)"].forEach(
(key) => {
const v = exportValue(original[key], T);
if (v.length > 0) result[key] = v;
}
);
// Copy cost time series (with inflation)
["disposal cost ($/tonne)", "transportation cost ($/km/tonne)"].forEach(
(key) => {
const v = exportValue(original[key], T, R);
if (v.length > 0) result[key] = v;
}
);
// Copy dictionaries
["transportation emissions (tonne/km/tonne)"].forEach((key) => {
const v = exportValueDict(original[key], T);
if (v) result[key] = v;
});
// Transform percentage disposal limits into absolute
if (isNumeric(original["disposal limit (%)"])) {
result["disposal limit (tonne)"] = computeAbsDisposal(original);
}
return result;
};
export const exportPlant = (original, parameters) => {
const result = {};
// Read time horizon
let T = parameters["time horizon (years)"];
if (isNumeric(T)) T = parseInt(T);
else T = 1;
// Read inflation
let R = parameters["inflation rate (%)"];
if (isNumeric(R)) R = parseFloat(R) / 100 + 1;
else R = 1;
// Copy scalar values
["input"].forEach((key) => {
result[key] = original[key];
});
// Copy time series values
["energy (GJ/tonne)"].forEach((key) => {
result[key] = exportValue(original[key], T);
if (result[key] === undefined) {
delete result[key];
}
});
// Copy scalar dicts
["outputs (tonne/tonne)"].forEach((key) => {
const v = exportValueDict(original[key]);
if (v) result[key] = v;
});
// Copy time series dicts
["emissions (tonne/tonne)"].forEach((key) => {
const v = exportValueDict(original[key], T);
if (v) result[key] = v;
});
const minCap = original["minimum capacity (tonne)"];
const maxCap = original["maximum capacity (tonne)"];
result.locations = {};
for (const [locName, origDict] of Object.entries(original["locations"])) {
const resDict = (result.locations[locName] = {});
const capDict = (resDict["capacities (tonne)"] = {});
const acf = origDict["area cost factor"];
const exportValueAcf = (obj) => {
const v = exportValue(obj, T, R);
if (Array.isArray(v)) {
return v.map((v) => v * acf);
}
return "";
};
// Copy scalar values
["latitude (deg)", "longitude (deg)"].forEach((key) => {
resDict[key] = origDict[key];
});
// Copy minimum capacity dict
capDict[minCap] = {};
for (const [resKeyName, origKeyName] of Object.entries({
"opening cost ($)": "opening cost (min capacity) ($)",
"fixed operating cost ($)": "fixed operating cost (min capacity) ($)",
"variable operating cost ($/tonne)": "variable operating cost ($/tonne)",
})) {
capDict[minCap][resKeyName] = exportValueAcf(original[origKeyName]);
}
if (maxCap !== minCap) {
// Copy maximum capacity dict
capDict[maxCap] = {};
for (const [resKeyName, origKeyName] of Object.entries({
"opening cost ($)": "opening cost (max capacity) ($)",
"fixed operating cost ($)": "fixed operating cost (max capacity) ($)",
"variable operating cost ($/tonne)":
"variable operating cost ($/tonne)",
})) {
capDict[maxCap][resKeyName] = exportValueAcf(original[origKeyName]);
}
}
// Copy disposal
resDict.disposal = {};
for (const [dispName, dispCost] of Object.entries(
original["disposal cost ($/tonne)"]
)) {
if (dispName.length === 0) continue;
const v = exportValueAcf(dispCost, T);
if (v) {
resDict.disposal[dispName] = { "cost ($/tonne)": v };
const limit = original["disposal limit (tonne)"][dispName];
if (isNumeric(limit)) {
resDict.disposal[dispName]["limit (tonne)"] = exportValue(limit, T);
}
}
}
// Copy storage
resDict.storage = {
"cost ($/tonne)": exportValueAcf(original["storage"]["cost ($/tonne)"]),
};
const storLimit = original["storage"]["limit (tonne)"];
if (isNumeric(storLimit)) {
resDict.storage["limit (tonne)"] = exportValue(storLimit);
}
}
return result;
};
export const exportData = (original) => {
const result = {
parameters: {},
products: {},
plants: {},
};
// Export parameters
["time horizon (years)", "building period (years)"].forEach((key) => {
result.parameters[key] = exportValue(original.parameters[key]);
});
// Read time horizon
let T = result.parameters["time horizon (years)"];
if (!isNumeric(T)) T = 1;
// Export products
for (const [prodName, prodDict] of Object.entries(original.products)) {
result.products[prodName] = exportProduct(prodDict, original.parameters);
}
// Export plants
for (const [plantName, plantDict] of Object.entries(original.plants)) {
result.plants[plantName] = exportPlant(plantDict, original.parameters);
}
return result;
};
const compressDisposalLimits = (original, result) => {
if (!("disposal limit (tonne)" in original)) {
return;
}
const total = computeTotalInitialAmount(original);
if (!total) return;
const limit = original["disposal limit (tonne)"];
let perc = Math.round((limit[0] / total[0]) * 1e6) / 1e6;
for (let i = 1; i < limit.length; i++) {
if (Math.abs(limit[i] / total[i] - perc) > 1e-5) {
return;
}
}
result["disposal limit (tonne)"] = "";
result["disposal limit (%)"] = String(perc * 100);
};
export const importProduct = (original) => {
const prod = {};
const parameters = {};
prod["initial amounts"] = { ...original["initial amounts"] };
// Initialize null values
["x", "y"].forEach((key) => {
prod[key] = null;
});
// Initialize empty values
["disposal limit (%)"].forEach((key) => {
prod[key] = "";
});
// Import constant lists
["transportation energy (J/km/tonne)", "disposal limit (tonne)"].forEach(
(key) => {
prod[key] = importList(original[key]);
}
);
// Compute inflation and time horizon
const [R, T] = computeInflationAndTimeHorizon(original, [
"transportation cost ($/km/tonne)",
"disposal cost ($/tonne)",
]);
parameters["inflation rate (%)"] = String((R - 1) * 100);
parameters["time horizon (years)"] = String(T);
// Import cost lists
["transportation cost ($/km/tonne)", "disposal cost ($/tonne)"].forEach(
(key) => {
prod[key] = importList(original[key], R);
}
);
// Import dicts
["transportation emissions (tonne/km/tonne)"].forEach((key) => {
prod[key] = importDict(original[key]);
});
// Attempt to convert absolute disposal limits to relative
compressDisposalLimits(original, prod);
return [prod, parameters];
};
export const importPlant = (original) => {
const plant = {};
const parameters = {};
plant["storage"] = {};
plant["storage"]["cost ($/tonne)"] = 0;
plant["storage"]["limit (tonne)"] = 0;
plant["disposal cost ($/tonne)"] = 0;
plant["disposal limit (tonne)"] = 0;
// Initialize null values
["x", "y"].forEach((key) => {
plant[key] = null;
});
// Import scalar values
["input"].forEach((key) => {
plant[key] = original[key];
});
// Import timeseries values
["energy (GJ/tonne)"].forEach((key) => {
plant[key] = importList(original[key]);
if (plant[key] === "") {
delete plant[key];
}
});
// Import dicts
["outputs (tonne/tonne)", "emissions (tonne/tonne)"].forEach((key) => {
plant[key] = importDict(original[key]);
});
let costsInitialized = false;
let R = null;
// Read locations
const resLocDict = (plant.locations = {});
for (const [locName, origLocDict] of Object.entries(original["locations"])) {
resLocDict[locName] = {};
// Import latitude and longitude
["latitude (deg)", "longitude (deg)"].forEach((key) => {
resLocDict[locName][key] = origLocDict[key];
});
const capacities = keysToList(origLocDict["capacities (tonne)"]);
const last = capacities.length - 1;
const minCap = capacities[0];
const maxCap = capacities[last];
const minCapDict = origLocDict["capacities (tonne)"][minCap];
const maxCapDict = origLocDict["capacities (tonne)"][maxCap];
// Import min/max capacity
if ("minimum capacity (tonne)" in plant) {
if (
plant["minimum capacity (tonne)"] !== minCap ||
plant["maximum capacity (tonne)"] !== maxCap
) {
throw "Data loss";
}
} else {
plant["minimum capacity (tonne)"] = minCap;
plant["maximum capacity (tonne)"] = maxCap;
}
// Compute area cost factor
let acf = 1;
if (costsInitialized) {
acf = plant["opening cost (max capacity) ($)"];
if (Array.isArray(acf)) acf = acf[0];
acf = maxCapDict["opening cost ($)"][0] / acf;
}
resLocDict[locName]["area cost factor"] = acf;
const [R, T] = computeInflationAndTimeHorizon(maxCapDict, [
"opening cost ($)",
"fixed operating cost ($)",
"variable operating cost ($/tonne)",
]);
parameters["inflation rate (%)"] = String((R - 1) * 100);
parameters["time horizon (years)"] = String(T);
// Read adjusted costs
const importListAcf = (obj) =>
importList(
obj.map((v) => v / acf),
R
);
const openCostMax = importListAcf(maxCapDict["opening cost ($)"]);
const openCostMin = importListAcf(minCapDict["opening cost ($)"]);
const fixCostMax = importListAcf(maxCapDict["fixed operating cost ($)"]);
const fixCostMin = importListAcf(minCapDict["fixed operating cost ($)"]);
const storCost = importListAcf(origLocDict.storage["cost ($/tonne)"]);
const storLimit = String(origLocDict.storage["limit (tonne)"]);
const varCost = importListAcf(
minCapDict["variable operating cost ($/tonne)"]
);
const dispCost = {};
const dispLimit = {};
for (const prodName of Object.keys(original["outputs (tonne/tonne)"])) {
dispCost[prodName] = "";
dispLimit[prodName] = "";
if (prodName in origLocDict["disposal"]) {
const prodDict = origLocDict["disposal"][prodName];
dispCost[prodName] = importListAcf(prodDict["cost ($/tonne)"]);
if ("limit (tonne)" in prodDict)
dispLimit[prodName] = importList(prodDict["limit (tonne)"]);
}
}
const check = (left, right) => {
let valid = true;
if (isNumeric(left) && isNumeric(right)) {
valid = Math.abs(left - right) < 1.0;
} else {
valid = left === right;
}
if (!valid)
console.warn(`Data loss detected: ${locName}, ${left} != ${right}`);
};
if (costsInitialized) {
// Verify that location costs match the previously initialized ones
check(plant["opening cost (max capacity) ($)"], openCostMax);
check(plant["opening cost (min capacity) ($)"], openCostMin);
check(plant["fixed operating cost (max capacity) ($)"], fixCostMax);
check(plant["fixed operating cost (min capacity) ($)"], fixCostMin);
check(plant["variable operating cost ($/tonne)"], varCost);
check(plant["storage"]["cost ($/tonne)"], storCost);
check(plant["storage"]["limit (tonne)"], storLimit);
check(String(plant["disposal cost ($/tonne)"]), String(dispCost));
check(String(plant["disposal limit (tonne)"]), String(dispLimit));
} else {
// Initialize plant costs
costsInitialized = true;
plant["opening cost (max capacity) ($)"] = openCostMax;
plant["opening cost (min capacity) ($)"] = openCostMin;
plant["fixed operating cost (max capacity) ($)"] = fixCostMax;
plant["fixed operating cost (min capacity) ($)"] = fixCostMin;
plant["variable operating cost ($/tonne)"] = varCost;
plant["storage"] = {};
plant["storage"]["cost ($/tonne)"] = storCost;
plant["storage"]["limit (tonne)"] = storLimit;
plant["disposal cost ($/tonne)"] = dispCost;
plant["disposal limit (tonne)"] = dispLimit;
parameters["inflation rate (%)"] = String((R - 1) * 100);
}
}
return [plant, parameters];
};
export const importData = (original) => {
["parameters", "plants", "products"].forEach((key) => {
if (!(key in original)) {
throw "File not recognized.";
}
});
const result = {};
result.parameters = importDict(original.parameters);
["building period (years)"].forEach((k) => {
result.parameters[k] = JSON.stringify(original.parameters[k]);
});
result.parameters["inflation rate (%)"] = "0";
// Import products
result.products = {};
for (const [prodName, origProdDict] of Object.entries(original.products)) {
const [recoveredProd, recoveredParams] = importProduct(origProdDict);
result.products[prodName] = recoveredProd;
result.parameters = { ...result.parameters, ...recoveredParams };
}
// Import plants
result.plants = {};
for (const [plantName, origPlantDict] of Object.entries(original.plants)) {
const [recoveredPlant, recoveredParams] = importPlant(origPlantDict);
result.plants[plantName] = recoveredPlant;
result.parameters = { ...result.parameters, ...recoveredParams };
}
return result;
};

View File

@@ -0,0 +1,583 @@
import {
exportProduct,
exportPlant,
importProduct,
importList,
importDict,
importPlant,
} from "./export";
const sampleProductsOriginal = [
// basic product
{
"initial amounts": {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"amount (tonne)": [100, 200, 300],
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"amount (tonne)": [100, 200, 300],
},
"Park County": {
"latitude (deg)": 44.4063,
"longitude (deg)": -109.4153,
"amount (tonne)": [100, 200, 300],
},
},
"disposal cost ($/tonne)": "50",
"disposal limit (tonne)": "30",
"disposal limit (%)": "",
"transportation cost ($/km/tonne)": "0",
"transportation energy (J/km/tonne)": "10",
"transportation emissions (tonne/km/tonne)": {
CO2: "0.5",
},
x: null,
y: null,
},
// product with percentage disposal limit
{
"initial amounts": {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"amount (tonne)": [100, 200, 300],
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"amount (tonne)": [100, 200, 300],
},
"Park County": {
"latitude (deg)": 44.4063,
"longitude (deg)": -109.4153,
"amount (tonne)": [100, 200, 300],
},
},
"disposal cost ($/tonne)": "50",
"disposal limit (tonne)": "",
"disposal limit (%)": "10",
"transportation cost ($/km/tonne)": "5",
"transportation energy (J/km/tonne)": "10",
"transportation emissions (tonne/km/tonne)": {
CO2: "0.5",
},
x: null,
y: null,
},
// product using defaults
{
"initial amounts": {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"amount (tonne)": [100, 200, 300],
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"amount (tonne)": [100, 200, 300],
},
"Park County": {
"latitude (deg)": 44.4063,
"longitude (deg)": -109.4153,
"amount (tonne)": [100, 200, 300],
},
},
"disposal cost ($/tonne)": "50",
"disposal limit (tonne)": "",
"disposal limit (%)": "",
"transportation cost ($/km/tonne)": "5",
"transportation energy (J/km/tonne)": "",
"transportation emissions (tonne/km/tonne)": {},
x: null,
y: null,
},
];
const sampleProductsExported = [
// basic product
{
"initial amounts": {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"amount (tonne)": [100, 200, 300],
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"amount (tonne)": [100, 200, 300],
},
"Park County": {
"latitude (deg)": 44.4063,
"longitude (deg)": -109.4153,
"amount (tonne)": [100, 200, 300],
},
},
"disposal cost ($/tonne)": [50, 100, 200],
"disposal limit (tonne)": [30, 30, 30],
"transportation cost ($/km/tonne)": [0, 0, 0],
"transportation energy (J/km/tonne)": [10, 10, 10],
"transportation emissions (tonne/km/tonne)": {
CO2: [0.5, 0.5, 0.5],
},
},
// product with percentage disposal limit
{
"initial amounts": {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"amount (tonne)": [100, 200, 300],
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"amount (tonne)": [100, 200, 300],
},
"Park County": {
"latitude (deg)": 44.4063,
"longitude (deg)": -109.4153,
"amount (tonne)": [100, 200, 300],
},
},
"disposal cost ($/tonne)": [50, 50, 50],
"disposal limit (tonne)": [30, 60, 90],
"transportation cost ($/km/tonne)": [5, 5, 5],
"transportation energy (J/km/tonne)": [10, 10, 10],
"transportation emissions (tonne/km/tonne)": {
CO2: [0.5, 0.5, 0.5],
},
},
// product using defaults
{
"initial amounts": {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"amount (tonne)": [100, 200, 300],
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"amount (tonne)": [100, 200, 300],
},
"Park County": {
"latitude (deg)": 44.4063,
"longitude (deg)": -109.4153,
"amount (tonne)": [100, 200, 300],
},
},
"disposal cost ($/tonne)": [50, 50, 50],
"transportation cost ($/km/tonne)": [5, 5, 5],
},
];
const samplePlantsOriginal = [
// basic plant
{
input: "Baled agricultural biomass",
"outputs (tonne/tonne)": {
"Hydrogen gas": 0.095,
"Carbon dioxide": 1.164,
Tar: 0,
},
locations: {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"area cost factor": 1.0,
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"area cost factor": 0.5,
},
},
"disposal cost ($/tonne)": {
"Hydrogen gas": "0",
"Carbon dioxide": "0",
Tar: "200",
},
"disposal limit (tonne)": {
"Hydrogen gas": "10",
"Carbon dioxide": "",
Tar: "",
},
"emissions (tonne/tonne)": {
CO2: "100",
},
storage: {
"cost ($/tonne)": "5",
"limit (tonne)": "10000",
},
"maximum capacity (tonne)": "730000",
"minimum capacity (tonne)": "182500",
"opening cost (max capacity) ($)": "300000",
"opening cost (min capacity) ($)": "200000",
"fixed operating cost (max capacity) ($)": "7000",
"fixed operating cost (min capacity) ($)": "5000",
"variable operating cost ($/tonne)": "10",
x: null,
y: null,
},
// plant with fixed capacity
{
input: "Baled agricultural biomass",
"outputs (tonne/tonne)": {
"Hydrogen gas": 0.095,
"Carbon dioxide": 1.164,
Tar: 0.06,
},
"energy (GJ/tonne)": "50",
locations: {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"area cost factor": 1.0,
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"area cost factor": 0.5,
},
},
"disposal cost ($/tonne)": {
"Hydrogen gas": "0",
"Carbon dioxide": "0",
Tar: "200",
},
"disposal limit (tonne)": {
"Hydrogen gas": "10",
"Carbon dioxide": "",
Tar: "",
},
"emissions (tonne/tonne)": {
CO2: "100",
},
storage: {
"cost ($/tonne)": "5",
"limit (tonne)": "10000",
},
"maximum capacity (tonne)": "182500",
"minimum capacity (tonne)": "182500",
"opening cost (max capacity) ($)": "200000",
"opening cost (min capacity) ($)": "200000",
"fixed operating cost (max capacity) ($)": "5000",
"fixed operating cost (min capacity) ($)": "5000",
"variable operating cost ($/tonne)": "10",
x: null,
y: null,
},
// plant with defaults
{
input: "Baled agricultural biomass",
"outputs (tonne/tonne)": {
"Hydrogen gas": 0.095,
"Carbon dioxide": 1.164,
Tar: 0.06,
},
"energy (GJ/tonne)": "50",
locations: {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
"area cost factor": 1.0,
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
"area cost factor": 0.5,
},
},
"disposal cost ($/tonne)": {
"Hydrogen gas": "",
"Carbon dioxide": "",
Tar: "",
},
"disposal limit (tonne)": {
"Hydrogen gas": "",
"Carbon dioxide": "",
Tar: "",
},
"emissions (tonne/tonne)": {
CO2: "100",
},
storage: {
"cost ($/tonne)": "5",
"limit (tonne)": "10000",
},
"maximum capacity (tonne)": "730000",
"minimum capacity (tonne)": "182500",
"opening cost (max capacity) ($)": "300000",
"opening cost (min capacity) ($)": "200000",
"fixed operating cost (max capacity) ($)": "7000",
"fixed operating cost (min capacity) ($)": "5000",
"variable operating cost ($/tonne)": "10",
x: null,
y: null,
},
];
const samplePlantsExported = [
//basic plant
{
input: "Baled agricultural biomass",
"outputs (tonne/tonne)": {
"Hydrogen gas": 0.095,
"Carbon dioxide": 1.164,
Tar: 0,
},
locations: {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
disposal: {
"Hydrogen gas": {
"cost ($/tonne)": [0, 0, 0],
"limit (tonne)": [10, 10, 10],
},
"Carbon dioxide": {
"cost ($/tonne)": [0, 0, 0],
},
Tar: {
"cost ($/tonne)": [200, 400, 800],
},
},
storage: {
"cost ($/tonne)": [5, 10, 20],
"limit (tonne)": 10000,
},
"capacities (tonne)": {
182500: {
"opening cost ($)": [200000, 400000, 800000],
"fixed operating cost ($)": [5000, 10000, 20000],
"variable operating cost ($/tonne)": [10, 20, 40],
},
730000: {
"opening cost ($)": [300000, 600000, 1200000],
"fixed operating cost ($)": [7000, 14000, 28000],
"variable operating cost ($/tonne)": [10, 20, 40],
},
},
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
disposal: {
"Hydrogen gas": {
"cost ($/tonne)": [0, 0, 0],
"limit (tonne)": [10, 10, 10],
},
"Carbon dioxide": {
"cost ($/tonne)": [0, 0, 0],
},
Tar: {
"cost ($/tonne)": [100, 200.0, 400],
},
},
storage: {
"cost ($/tonne)": [2.5, 5, 10],
"limit (tonne)": 10000,
},
"capacities (tonne)": {
182500: {
"opening cost ($)": [100000, 200000, 400000],
"fixed operating cost ($)": [2500, 5000, 10000],
"variable operating cost ($/tonne)": [5, 10, 20],
},
730000: {
"opening cost ($)": [150000, 300000, 600000],
"fixed operating cost ($)": [3500, 7000, 14000],
"variable operating cost ($/tonne)": [5, 10, 20],
},
},
},
},
"emissions (tonne/tonne)": {
CO2: [100, 100, 100],
},
},
// plant with fixed capacity
{
input: "Baled agricultural biomass",
"outputs (tonne/tonne)": {
"Hydrogen gas": 0.095,
"Carbon dioxide": 1.164,
Tar: 0.06,
},
"energy (GJ/tonne)": [50, 50, 50],
locations: {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
disposal: {
"Hydrogen gas": {
"cost ($/tonne)": [0, 0, 0],
"limit (tonne)": [10, 10, 10],
},
"Carbon dioxide": {
"cost ($/tonne)": [0, 0, 0],
},
Tar: {
"cost ($/tonne)": [200.0, 200.0, 200.0],
},
},
storage: {
"cost ($/tonne)": [5, 5, 5],
"limit (tonne)": 10000,
},
"capacities (tonne)": {
182500: {
"opening cost ($)": [200000, 200000, 200000],
"fixed operating cost ($)": [5000, 5000, 5000],
"variable operating cost ($/tonne)": [10, 10, 10],
},
},
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
disposal: {
"Hydrogen gas": {
"cost ($/tonne)": [0, 0, 0],
"limit (tonne)": [10, 10, 10],
},
"Carbon dioxide": {
"cost ($/tonne)": [0, 0, 0],
},
Tar: {
"cost ($/tonne)": [100.0, 100.0, 100.0],
},
},
storage: {
"cost ($/tonne)": [2.5, 2.5, 2.5],
"limit (tonne)": 10000,
},
"capacities (tonne)": {
182500: {
"opening cost ($)": [100000, 100000, 100000],
"fixed operating cost ($)": [2500, 2500, 2500],
"variable operating cost ($/tonne)": [5, 5, 5],
},
},
},
},
"emissions (tonne/tonne)": {
CO2: [100, 100, 100],
},
},
// plant with defaults
{
input: "Baled agricultural biomass",
"outputs (tonne/tonne)": {
"Hydrogen gas": 0.095,
"Carbon dioxide": 1.164,
Tar: 0.06,
},
"energy (GJ/tonne)": [50, 50, 50],
locations: {
"Washakie County": {
"latitude (deg)": 43.8356,
"longitude (deg)": -107.6602,
disposal: {},
storage: {
"cost ($/tonne)": [5, 5, 5],
"limit (tonne)": 10000,
},
"capacities (tonne)": {
182500: {
"opening cost ($)": [200000, 200000, 200000],
"fixed operating cost ($)": [5000, 5000, 5000],
"variable operating cost ($/tonne)": [10, 10, 10],
},
730000: {
"opening cost ($)": [300000, 300000, 300000],
"fixed operating cost ($)": [7000, 7000, 7000],
"variable operating cost ($/tonne)": [10, 10, 10],
},
},
},
"Platte County": {
"latitude (deg)": 42.1314,
"longitude (deg)": -104.9676,
disposal: {},
storage: {
"cost ($/tonne)": [2.5, 2.5, 2.5],
"limit (tonne)": 10000,
},
"capacities (tonne)": {
182500: {
"opening cost ($)": [100000, 100000, 100000],
"fixed operating cost ($)": [2500, 2500, 2500],
"variable operating cost ($/tonne)": [5, 5, 5],
},
730000: {
"opening cost ($)": [150000, 150000, 150000],
"fixed operating cost ($)": [3500, 3500, 3500],
"variable operating cost ($/tonne)": [5, 5, 5],
},
},
},
},
"emissions (tonne/tonne)": {
CO2: [100, 100, 100],
},
},
];
const sampleParameters = [
{
"time horizon (years)": "3",
"inflation rate (%)": "100",
},
{
"time horizon (years)": "3",
"inflation rate (%)": "0",
},
{
"time horizon (years)": "3",
"inflation rate (%)": "0",
},
];
test("export products", () => {
for (let i = 0; i < sampleProductsOriginal.length; i++) {
const original = sampleProductsOriginal[i];
const exported = sampleProductsExported[i];
expect(exportProduct(original, sampleParameters[i])).toEqual(exported);
const [recoveredProd, recoveredParams] = importProduct(exported);
expect(recoveredProd).toEqual(original);
expect(recoveredParams).toEqual(sampleParameters[i]);
}
});
test("export plants", () => {
for (let i = 0; i < samplePlantsOriginal.length; i++) {
const original = samplePlantsOriginal[i];
const exported = samplePlantsExported[i];
expect(exportPlant(original, sampleParameters[i])).toEqual(exported);
const [recoveredPlant, recoveredParams] = importPlant(exported);
expect(recoveredPlant).toEqual(original);
expect(recoveredParams).toEqual(sampleParameters[i]);
}
});
test("importList", () => {
expect(importList("invalid")).toEqual("invalid");
expect(importList([1, 1, 1])).toEqual("1");
expect(importList([1, 2, 3])).toEqual("[1,2,3]");
expect(importList(["A", "A", "A"])).toEqual("A");
});
test("importDict", () => {
expect(importDict({ a: [5, 5, 5] })).toEqual({ a: "5" });
expect(importDict({ a: [1, 2, 3] })).toEqual({ a: "[1,2,3]" });
expect(importDict({ a: "invalid" })).toEqual({ a: "invalid" });
});

View File

@@ -0,0 +1,182 @@
const Ajv = require("ajv");
const ajv = new Ajv();
const schema = {
$schema: "http://json-schema.org/draft-07/schema#",
$id: "https://anl-ceeesa.github.io/RELOG/input",
title: "Schema for RELOG Input File",
definitions: {
TimeSeries: {
type: "array",
items: {
type: "number",
},
},
Parameters: {
type: "object",
properties: {
"time horizon (years)": {
type: "number",
},
},
required: ["time horizon (years)"],
},
Plant: {
type: "object",
additionalProperties: {
type: "object",
properties: {
input: {
type: "string",
},
"outputs (tonne/tonne)": {
type: "object",
additionalProperties: {
type: "number",
},
},
"energy (GJ/tonne)": {
$ref: "#/definitions/TimeSeries",
},
"emissions (tonne/tonne)": {
type: "object",
additionalProperties: {
$ref: "#/definitions/TimeSeries",
},
},
locations: {
$ref: "#/definitions/PlantLocation",
},
},
required: ["input", "locations"],
},
},
PlantLocation: {
type: "object",
additionalProperties: {
type: "object",
properties: {
location: {
type: "string",
},
"latitude (deg)": {
type: "number",
},
"longitude (deg)": {
type: "number",
},
disposal: {
type: "object",
additionalProperties: {
type: "object",
properties: {
"cost ($/tonne)": {
$ref: "#/definitions/TimeSeries",
},
"limit (tonne)": {
$ref: "#/definitions/TimeSeries",
},
},
required: ["cost ($/tonne)"],
},
},
storage: {
type: "object",
properties: {
"cost ($/tonne)": {
$ref: "#/definitions/TimeSeries",
},
"limit (tonne)": {
type: "number",
},
},
required: ["cost ($/tonne)", "limit (tonne)"],
},
"capacities (tonne)": {
type: "object",
additionalProperties: {
type: "object",
properties: {
"variable operating cost ($/tonne)": {
$ref: "#/definitions/TimeSeries",
},
"fixed operating cost ($)": {
$ref: "#/definitions/TimeSeries",
},
"opening cost ($)": {
$ref: "#/definitions/TimeSeries",
},
},
required: [
"variable operating cost ($/tonne)",
"fixed operating cost ($)",
"opening cost ($)",
],
},
},
},
required: ["capacities (tonne)"],
},
},
InitialAmount: {
type: "object",
additionalProperties: {
type: "object",
properties: {
location: {
type: "string",
},
"latitude (deg)": {
type: "number",
},
"longitude (deg)": {
type: "number",
},
"amount (tonne)": {
$ref: "#/definitions/TimeSeries",
},
},
required: ["amount (tonne)"],
},
},
Product: {
type: "object",
additionalProperties: {
type: "object",
properties: {
"transportation cost ($/km/tonne)": {
$ref: "#/definitions/TimeSeries",
},
"transportation energy (J/km/tonne)": {
$ref: "#/definitions/TimeSeries",
},
"transportation emissions (tonne/km/tonne)": {
type: "object",
additionalProperties: {
$ref: "#/definitions/TimeSeries",
},
},
"initial amounts": {
$ref: "#/definitions/InitialAmount",
},
},
required: ["transportation cost ($/km/tonne)"],
},
},
},
type: "object",
properties: {
parameters: {
$ref: "#/definitions/Parameters",
},
plants: {
$ref: "#/definitions/Plant",
},
products: {
$ref: "#/definitions/Product",
},
},
required: ["parameters", "plants", "products"],
};
export const validate = ajv.compile(schema);

View File

@@ -0,0 +1,26 @@
import styles from "./Button.module.css";
const Button = (props) => {
let className = styles.Button;
if (props.kind === "inline") {
className += " " + styles.inline;
}
let tooltip = "";
if (props.tooltip !== undefined) {
tooltip = <span className={styles.tooltip}>{props.tooltip}</span>;
}
return (
<button
className={className}
onClick={props.onClick}
disabled={props.disabled}
>
{tooltip}
{props.label}
</button>
);
};
export default Button;

View File

@@ -0,0 +1,67 @@
.Button {
padding: 6px 36px;
margin: 12px 6px;
line-height: 24px;
border: var(--box-border);
/* background-color: white; */
box-shadow: var(--box-shadow);
border-radius: var(--border-radius);
cursor: pointer;
color: rgba(0, 0, 0, 0.8);
text-transform: uppercase;
font-weight: bold;
font-size: 12px;
background: linear-gradient(rgb(255, 255, 255) 25%, rgb(245, 245, 245) 100%);
}
.Button:hover {
background: rgb(245, 245, 245);
}
.Button:active {
background: rgba(220, 220, 220);
}
.inline {
padding: 0 12px;
margin: 2px 4px 2px 0;
height: 32px;
font-size: 11px;
}
/* .inline:last-child {
margin: 2px 1px;
} */
.tooltip {
visibility: hidden;
background-color: #333;
color: white;
opacity: 0%;
width: 180px;
margin-top: 36px;
margin-left: -180px;
position: absolute;
z-index: 100;
text-transform: none;
font-size: 13px;
border-radius: 4px;
box-shadow: 4px 4px 8px rgba(0, 0, 0, 0.25);
line-height: 18px;
padding: 6px;
transition: opacity 0.5s;
font-weight: normal;
text-align: left;
padding: 6px 12px;
}
.Button:hover .tooltip {
visibility: visible;
opacity: 100%;
transition: opacity 0.5s;
}
.Button:disabled {
color: rgba(0, 0, 0, 0.25);
cursor: default;
}

View File

@@ -0,0 +1,7 @@
import styles from "./Card.module.css";
const Card = (props) => {
return <div className={styles.Card}>{props.children}</div>;
};
export default Card;

View File

@@ -0,0 +1,22 @@
.Card {
border: var(--box-border);
box-shadow: var(--box-shadow);
border-radius: var(--border-radius);
background-color: white;
padding: 12px;
min-height: 24px;
}
.Card h1 {
margin: 12px -12px 0px -12px;
padding: 6px 12px 0px 12px;
font-size: 14px;
line-height: 35px;
border-top: 1px solid #ddd;
}
.Card h1:first-child {
margin: -12px -12px 0px -12px;
border-top: none;
background: none;
}

View File

@@ -0,0 +1,91 @@
import form_styles from "./Form.module.css";
import Button from "./Button";
import { validate } from "./Form";
const DictInputRow = (props) => {
const dict = { ...props.value };
if (!props.disableKeys) {
dict[""] = "0";
}
let unit = "";
if (props.unit) {
unit = <span className={form_styles.FormRow_unit}>({props.unit})</span>;
}
let tooltip = "";
if (props.tooltip !== undefined) {
tooltip = <Button label="?" kind="inline" tooltip={props.tooltip} />;
}
const onChangeValue = (key, v) => {
const newDict = { ...dict };
newDict[key] = v;
props.onChange(newDict);
};
const onChangeKey = (prevKey, newKey) => {
const newDict = renameKey(dict, prevKey, newKey);
if (!("" in newDict)) newDict[""] = "";
props.onChange(newDict);
};
const form = [];
Object.keys(dict).forEach((key, index) => {
let label = (
<span>
{props.label} {unit}
</span>
);
if (index > 0) {
label = "";
}
let isValid = true;
if (props.validate !== undefined) {
isValid = validate(props.validate, dict[key]);
}
let className = "";
if (!isValid) className = form_styles.invalid;
form.push(
<div className={form_styles.FormRow} key={index}>
<label>{label}</label>
<input
type="text"
data-index={index}
value={key}
placeholder={props.keyPlaceholder}
disabled={props.disableKeys}
onChange={(e) => onChangeKey(key, e.target.value)}
/>
<input
type="text"
data-index={index}
value={dict[key]}
placeholder={props.valuePlaceholder}
className={className}
onChange={(e) => onChangeValue(key, e.target.value)}
/>
{tooltip}
</div>
);
});
return <>{form}</>;
};
export function renameKey(obj, prevKey, newKey) {
const keys = Object.keys(obj);
return keys.reduce((acc, val) => {
if (val === prevKey) {
acc[newKey] = obj[prevKey];
} else {
acc[val] = obj[val];
}
return acc;
}, {});
}
export default DictInputRow;

View File

@@ -0,0 +1,59 @@
import form_styles from "./Form.module.css";
import Button from "./Button";
import { useRef } from "react";
const FileInputRow = (props) => {
let tooltip = "";
if (props.tooltip !== undefined) {
tooltip = <Button label="?" kind="inline" tooltip={props.tooltip} />;
}
const fileElem = useRef();
const onClickUpload = () => {
fileElem.current.click();
};
const onFileSelected = () => {
const file = fileElem.current.files[0];
if (file) {
const reader = new FileReader();
reader.addEventListener("load", () => {
props.onFile(reader.result);
});
reader.readAsText(file);
}
fileElem.current.value = "";
};
return (
<div className={form_styles.FormRow}>
<label>{props.label}</label>
<input type="text" value={props.value} disabled="disabled" />
<Button label="Upload" kind="inline" onClick={onClickUpload} />
<Button
label="Download"
kind="inline"
onClick={props.onDownload}
disabled={props.disableDownload}
/>
<Button
label="Clear"
kind="inline"
onClick={props.onClear}
disabled={props.disableClear}
/>
<Button label="Template" kind="inline" onClick={props.onTemplate} />
{tooltip}
<input
type="file"
ref={fileElem}
accept={props.accept}
style={{ display: "none" }}
onChange={onFileSelected}
/>
</div>
);
};
export default FileInputRow;

View File

@@ -0,0 +1,15 @@
import styles from "./Footer.module.css";
const Footer = () => {
return (
<div className={styles.Footer}>
<p>RELOG: Reverse Logistics Optimization</p>
<p>
Copyright &copy; 2020&mdash;2022, UChicago Argonne, LLC. All Rights
Reserved.
</p>
</div>
);
};
export default Footer;

View File

@@ -0,0 +1,8 @@
.Footer {
padding: 12px;
color: rgba(255, 255, 255, 0.5);
text-align: center;
font-size: 14px;
line-height: 8px;
min-width: 900px;
}

View File

@@ -0,0 +1,19 @@
const VALIDATION_REGEX = {
int: new RegExp("^[0-9]+$"),
intList: new RegExp("[[0-9]*]$"),
float: new RegExp("^[0-9]*\\.?[0-9]*$"),
floatList: new RegExp("^[?[0-9,.]*]?$"),
};
export const validate = (kind, value) => {
if (!VALIDATION_REGEX[kind].test(value)) {
return false;
}
return true;
};
const Form = (props) => {
return <>{props.children}</>;
};
export default Form;

View File

@@ -0,0 +1,28 @@
.FormRow {
display: flex;
line-height: 24px;
}
.FormRow label {
width: 350px;
padding: 6px 12px;
text-align: right;
}
.FormRow input {
flex: 1;
font-family: monospace;
border: var(--box-border);
border-radius: var(--border-radius);
padding: 4px;
margin: 2px 3px;
}
.FormRow_unit {
color: rgba(0, 0, 0, 0.4);
}
.invalid {
border: 2px solid #faa !important;
background-color: rgba(255, 0, 0, 0.05);
}

View File

@@ -0,0 +1,17 @@
import styles from "./Header.module.css";
const Header = (props) => {
return (
<div className={styles.HeaderBox}>
<div className={styles.HeaderContent}>
<h1>RELOG</h1>
<h2>{props.title}</h2>
<div style={{ float: "right", paddingTop: "5px" }}>
{props.children}
</div>
</div>
</div>
);
};
export default Header;

View File

@@ -0,0 +1,28 @@
.HeaderBox {
background-color: white;
border-bottom: var(--box-border);
box-shadow: var(--box-shadow);
padding: 0;
margin: 0;
}
.HeaderContent {
margin: 0 auto;
max-width: var(--site-width);
}
.HeaderContent h1,
.HeaderContent h2 {
line-height: 48px;
font-size: 28px;
padding: 12px;
margin: 0;
display: inline-block;
vertical-align: middle;
}
.HeaderContent h2 {
font-size: 22px;
font-weight: normal;
color: rgba(0, 0, 0, 0.6);
}

View File

@@ -0,0 +1,7 @@
import styles from "./Section.module.css";
const Section = (props) => {
return <h2 className={styles.Section}>{props.title}</h2>;
};
export default Section;

View File

@@ -0,0 +1,6 @@
.Section {
line-height: 36px;
margin: 12px;
font-size: 16px;
font-weight: bold;
}

View File

@@ -0,0 +1,44 @@
import form_styles from "./Form.module.css";
import Button from "./Button";
import { validate } from "./Form";
import React from "react";
const TextInputRow = React.forwardRef((props, ref) => {
let unit = "";
if (props.unit) {
unit = <span className={form_styles.FormRow_unit}>({props.unit})</span>;
}
let tooltip = "";
if (props.tooltip !== undefined) {
tooltip = <Button label="?" kind="inline" tooltip={props.tooltip} />;
}
let isValid = true;
if (!props.disabled && props.validate !== undefined) {
isValid = validate(props.validate, props.value);
}
let className = "";
if (!isValid) className = form_styles.invalid;
return (
<div className={form_styles.FormRow}>
<label>
{props.label} {unit}
</label>
<input
type="text"
placeholder={props.default}
disabled={props.disabled}
value={props.value}
className={className}
onChange={(e) => props.onChange(e.target.value)}
ref={ref}
/>
{tooltip}
</div>
);
});
export default TextInputRow;

109
relog-web/src/index.css Normal file
View File

@@ -0,0 +1,109 @@
:root {
--site-width: 1200px;
--box-border: 1px solid rgba(0, 0, 0, 0.2);
--box-shadow: 0px 2px 4px -3px rgba(0, 0, 0, 0.2);
--border-radius: 4px;
--primary: #0d6efd;
}
html,
body {
margin: 0;
padding: 0;
border: 0;
font-family: sans-serif;
}
body {
background-color: #333;
color: rgba(0, 0, 0, 0.95);
}
#contentBackground {
background-color: #f6f6f6;
}
#content {
max-width: var(--site-width);
min-width: 900px;
margin: 0 auto;
padding: 1px 6px 32px 6px;
}
.react-flow__node.selected {
box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.2) !important;
border-width: 2px !important;
margin-top: -1px !important;
margin-left: -1px !important;
border-radius: 8px !important;
}
.react-flow__handle {
width: 6px !important;
height: 6px !important;
background-color: white !important;
border: 1px solid black !important;
}
.react-flow__handle:hover {
background-color: black !important;
}
.react-flow__handle-right {
right: -4px !important;
}
.react-flow__handle-left {
left: -4px !important;
}
#messageTray {
max-width: var(--site-width);
margin: 0 auto;
position: fixed;
bottom: 12px;
left: 0;
right: 0;
z-index: 100;
}
#messageTray .message {
background-color: rgb(221, 69, 69);
color: #eee;
padding: 12px;
border-radius: var(--border-radius);
box-shadow: 4px 4px 8px rgba(0, 0, 0, 0.4);
display: flex;
margin-top: 12px;
}
#messageTray .message p {
flex: 1;
margin: 0;
padding: 12px 0;
}
#messageTray .message button {
margin: 0;
background: transparent;
border: 1px solid #eee;
color: #eee;
float: right;
padding: 0 24px;
line-height: 6px;
}
#messageTray .message button:hover {
background: rgba(255, 255, 255, 0.05);
}
#messageTray .message button:active {
background: rgba(255, 255, 255, 0.1);
}
.nodata {
text-align: center;
padding: 24px 0;
color: #888;
margin: 0;
}

27
relog-web/src/index.js Normal file
View File

@@ -0,0 +1,27 @@
import React from "react";
import ReactDOM from "react-dom";
import "./index.css";
import InputPage from "./casebuilder/InputPage";
import SolverPage from "./solver/SolverPage";
import { Route, BrowserRouter, Switch, Redirect } from "react-router-dom";
export const SERVER_URL = "";
ReactDOM.render(
<BrowserRouter>
<React.StrictMode>
<Switch>
<Route path="/casebuilder">
<InputPage />
</Route>
<Route path="/solver/:job_id">
<SolverPage />
</Route>
<Route path="/">
<Redirect to="/casebuilder" />
</Route>
</Switch>
</React.StrictMode>
</BrowserRouter>,
document.getElementById("root")
);

View File

@@ -0,0 +1,46 @@
import { useState } from "react";
import { useEffect } from "react";
import Section from "../common/Section";
import Card from "../common/Card";
import styles from "./FilesBlock.module.css";
import { SERVER_URL } from "..";
const FilesBlock = (props) => {
const [filesFound, setFilesFound] = useState(false);
const fetchFiles = async () => {
const response = await fetch(`${SERVER_URL}/jobs/${props.job}/output.json`);
if (response.ok) {
setFilesFound(true);
}
};
// Fetch files periodically from the server
useEffect(() => {
fetchFiles();
if (!filesFound) {
const interval = setInterval(() => {
fetchFiles();
}, 1000);
return () => clearInterval(interval);
}
}, [filesFound]);
let content = <div className="nodata">No files available</div>;
if (filesFound) {
content = (
<div className={styles.files}>
<a href={`${SERVER_URL}/jobs/${props.job}/output.zip`}>output.zip</a>
</div>
);
}
return (
<>
<Section title="Output Files" />
<Card>{content}</Card>
</>
);
};
export default FilesBlock;

View File

@@ -0,0 +1,19 @@
.files a {
display: block;
padding: 16px;
text-decoration: none;
color: var(--primary);
}
.files a:hover {
background-color: var(--primary);
color: white;
border-radius: var(--border-radius);
}
.nodata {
text-align: center;
padding: 24px 0;
color: #888;
margin: 0;
}

View File

@@ -0,0 +1,47 @@
import { useState } from "react";
import { useEffect } from "react";
import Section from "../common/Section";
import Card from "../common/Card";
import styles from "./LogBlock.module.css";
import { useRef } from "react";
import { SERVER_URL } from "..";
const LogBlock = (props) => {
const [log, setLog] = useState();
const preRef = useRef(null);
const fetchLog = async () => {
const response = await fetch(`${SERVER_URL}/jobs/${props.job}/solve.log`);
const data = await response.text();
if (log !== data) {
setLog(data);
}
};
// Fetch log periodically from the server
useEffect(() => {
fetchLog();
const interval = setInterval(() => {
fetchLog();
}, 1000);
return () => clearInterval(interval);
}, []);
// Scroll to bottom whenever the log is updated
useEffect(() => {
preRef.current.scrollTop = preRef.current.scrollHeight;
}, [log]);
return (
<>
<Section title="Optimization Log" />
<Card>
<pre ref={preRef} className={styles.log}>
{log}
</pre>
</Card>
</>
);
};
export default LogBlock;

View File

@@ -0,0 +1,8 @@
.log {
max-height: 500px;
min-height: 500px;
border: 0;
margin: 0;
overflow: auto;
line-height: 1.4em;
}

View File

@@ -0,0 +1,238 @@
import * as d3 from "d3";
import { group } from "d3-array";
import * as L from "leaflet";
import "leaflet/dist/leaflet.css";
import { useEffect, useState } from "react";
import { SERVER_URL } from "..";
import Card from "../common/Card";
import Section from "../common/Section";
function drawMap(csv_plants, csv_tr) {
const mapLink = '<a href="http://openstreetmap.org">OpenStreetMap</a>';
const base = L.tileLayer(
"https://{s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}{r}.png",
{
attribution:
'&copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors &copy; <a href="https://carto.com/attributions">CARTO</a>',
subdomains: "abcd",
maxZoom: 10,
}
);
const plant_types = [...new Set(csv_plants.map((d) => d["plant type"]))];
plant_types.push("Multiple");
const plant_color = d3
.scaleOrdinal()
.domain(plant_types)
.range([
"#558B2F",
"#FF8F00",
"#0277BD",
"#AD1457",
"#00838F",
"#4527A0",
"#C62828",
"#424242",
]);
const plant_locations = d3
.nest()
.key((d) => d["location name"])
.rollup(function (v) {
return {
amount_processed: d3.sum(v, function (d) {
return d["amount processed (tonne)"];
}),
latitude: d3.mean(v, function (d) {
return d["latitude (deg)"];
}),
longitude: d3.mean(v, function (d) {
return d["longitude (deg)"];
}),
plant_types: [...new Set(v.map((d) => d["plant type"]))],
};
})
.entries(csv_plants);
const plant_scale = d3
.scaleSqrt()
.range([2, 10])
.domain([0, d3.max(plant_locations, (d) => d.value.amount_processed)]);
const plants_array = [];
plant_locations.forEach((d) => {
if (d.value.plant_types.length > 1) {
d.value.plant_type = "Multiple";
} else {
d.value.plant_type = d.value.plant_types[0];
}
const marker = L.circleMarker([d.value.latitude, d.value.longitude], {
id: "circleMarker",
className: "marker",
color: "#222",
weight: 1,
fillColor: plant_color(d.value.plant_type),
fillOpacity: 0.9,
radius: plant_scale(d.value.amount_processed),
});
const num = d.value.amount_processed.toFixed(2);
const num_parts = num.toString().split(".");
num_parts[0] = num_parts[0].replace(/\B(?=(\d{3})+(?!\d))/g, ",");
marker.bindTooltip(
`<b>${d.key}</b>
<br>
Amount processed:
${num_parts.join(".")}
<br>
Plant types:
${d.value.plant_types}`
);
plants_array.push(marker);
});
const collection_centers = d3
.nest()
.key((d) => d["source location name"])
.rollup(function (v) {
return {
source_lat: d3.mean(v, (d) => d["source latitude (deg)"]),
source_long: d3.mean(v, (d) => d["source longitude (deg)"]),
amount: d3.sum(v, (d) => d["amount (tonne)"]),
};
})
.entries(csv_tr);
//Color scale for the collection centers
const colors = d3
.scaleLog()
.domain([
d3.min(collection_centers, (d) => d.value.amount),
d3.max(collection_centers, (d) => d.value.amount),
])
.range(["#777", "#777"]);
//Plot the collection centers
const collection_array = [];
collection_centers.forEach(function (d) {
const marker = L.circleMarker([d.value.source_lat, d.value.source_long], {
color: "#000",
fillColor: colors(d.value.amount),
fillOpacity: 1,
radius: 1.25,
weight: 0,
className: "marker",
});
collection_array.push(marker);
});
const transportation_lines = group(
csv_tr,
(d) => d["source location name"],
(d) => d["destination location name"]
);
//Plot the transportation lines
const transport_array = [];
transportation_lines.forEach(function (d1) {
d1.forEach(function (d2) {
const object = d2[0];
const line = L.polyline(
[
[object["source latitude (deg)"], object["source longitude (deg)"]],
[
object["destination latitude (deg)"],
object["destination longitude (deg)"],
],
],
{
color: "#666",
stroke: true,
weight: 0.5,
opacity: Math.max(0.1, 0.5 / d1.size),
}
);
transport_array.push(line);
});
});
const plants = L.layerGroup(plants_array);
const cities = L.layerGroup(collection_array);
const transport = L.layerGroup(transport_array);
const baseMaps = {
"Open Street Map": base,
};
const overlayMaps = {
Plants: plants,
"Collection Centers": cities,
"Transportation Lines": transport,
};
cities.on({
add: function () {
cities.eachLayer((layer) => layer.bringToBack());
},
});
transport.on({
add: function () {
plants.eachLayer((layer) => layer.bringToFront());
},
});
function setHeight() {
let mapDiv = document.getElementById("map");
mapDiv.style.height = `${+mapDiv.offsetWidth * 0.55}px`;
}
//$(window).resize(setHeight);
setHeight();
const map = L.map("map", {
layers: [base, plants],
}).setView([37.8, -96.9], 4);
const svg6 = d3.select(map.getPanes().overlayPane).append("svg");
svg6.append("g").attr("class", "leaflet-zoom-hide");
L.control.layers(baseMaps, overlayMaps).addTo(map);
}
const MapBlock = (props) => {
const [filesFound, setFilesFound] = useState(false);
const fetchFiles = () => {
const file_prefix = `${SERVER_URL}/jobs/${props.job}/case`;
d3.csv(`${file_prefix}_plants.csv`).then((csv_plants) => {
d3.csv(`${file_prefix}_tr.csv`).then((csv_tr) => {
setFilesFound(true);
drawMap(csv_plants, csv_tr, file_prefix);
});
});
};
// Fetch files periodically from the server
useEffect(() => {
fetchFiles();
if (!filesFound) {
const interval = setInterval(() => {
fetchFiles();
}, 1000);
return () => clearInterval(interval);
}
}, [filesFound]);
return (
<>
<Section title="Map" />
<Card>
<div id="map">
<div className="nodata">No data available</div>
</div>
</Card>
</>
);
};
export default MapBlock;

View File

@@ -0,0 +1,28 @@
import React from "react";
import { useParams } from "react-router-dom";
import Footer from "../common/Footer";
import Header from "../common/Header";
import LogBlock from "./LogBlock";
import FilesBlock from "./FilesBlock";
import MapBlock from "./MapBlock";
const SolverPage = () => {
const params = useParams();
return (
<>
<Header title="Solver"></Header>
<div id="contentBackground">
{" "}
<div id="content">
<LogBlock job={params.job_id} />
<FilesBlock job={params.job_id} />
<MapBlock job={params.job_id} />
</div>
</div>
<Footer />
</>
);
};
export default SolverPage;

View File

@@ -3,9 +3,31 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
module RELOG module RELOG
include("dotdict.jl")
include("instance.jl") using Pkg
include("graph.jl")
include("model.jl") version() = Pkg.dependencies()[Base.UUID("a2afcdf7-cf04-4913-85f9-c0d81ddf2008")].version
include("reports.jl")
include("instance/structs.jl")
include("graph/structs.jl")
include("instance/geodb.jl")
include("graph/dist.jl")
include("graph/build.jl")
include("graph/csv.jl")
include("instance/compress.jl")
include("instance/parse.jl")
include("instance/validate.jl")
include("model/build.jl")
include("model/getsol.jl")
include("model/resolve.jl")
include("model/solve.jl")
include("reports/plant_emissions.jl")
include("reports/plant_outputs.jl")
include("reports/plants.jl")
include("reports/products.jl")
include("reports/tr_emissions.jl")
include("reports/tr.jl")
include("reports/write.jl")
include("web/web.jl")
end end

View File

@@ -1,28 +0,0 @@
.navbar-default {
border-bottom: 0px;
background-color: #fff;
box-shadow: 0px 0px 15px rgba(0, 0, 0, 0.2);
}
a, .navbar-default a {
color: #06a !important;
font-weight: normal;
}
.disabled > a {
color: #999 !important;
}
.navbar-default a:hover,
.navbar-default .active,
.active > a {
background-color: #f0f0f0 !important;
}
.icon-bar {
background-color: #666 !important;
}
.navbar-collapse {
border-color: #fff !important;
}

View File

@@ -1,8 +0,0 @@
MathJax.Hub.Config({
"tex2jax": { inlineMath: [ [ '$', '$' ] ] }
});
MathJax.Hub.Config({
config: ["MMLorHTML.js"],
jax: ["input/TeX", "output/HTML-CSS", "output/NativeMML"],
extensions: ["MathMenu.js", "MathZoom.js"]
});

View File

@@ -1,68 +0,0 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
struct DotDict
inner::Dict
end
DotDict() = DotDict(Dict())
function Base.setproperty!(d::DotDict, key::Symbol, value)
setindex!(getfield(d, :inner), value, key)
end
function Base.getproperty(d::DotDict, key::Symbol)
(key == :inner ? getfield(d, :inner) : d.inner[key])
end
function Base.getindex(d::DotDict, key::Int64)
d.inner[Symbol(key)]
end
function Base.getindex(d::DotDict, key::Symbol)
d.inner[key]
end
function Base.keys(d::DotDict)
keys(d.inner)
end
function Base.values(d::DotDict)
values(d.inner)
end
function Base.iterate(d::DotDict)
iterate(values(d.inner))
end
function Base.iterate(d::DotDict, v::Int64)
iterate(values(d.inner), v)
end
function Base.length(d::DotDict)
length(values(d.inner))
end
function Base.show(io::IO, d::DotDict)
print(io, "DotDict with $(length(keys(d.inner))) entries:\n")
count = 0
for k in keys(d.inner)
count += 1
if count > 10
print(io, " ...\n")
break
end
print(io, " :$(k) => $(d.inner[k])\n")
end
end
function recursive_to_dot_dict(el)
if typeof(el) == Dict{String, Any}
return DotDict(Dict(Symbol(k) => recursive_to_dot_dict(el[k]) for k in keys(el)))
else
return el
end
end
export recursive_to_dot_dict

View File

@@ -2,71 +2,37 @@
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. # Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
using Geodesy
abstract type Node
end
mutable struct Arc
source::Node
dest::Node
values::Dict{String, Float64}
end
mutable struct ProcessNode <: Node
index::Int
location::Plant
incoming_arcs::Array{Arc}
outgoing_arcs::Array{Arc}
end
mutable struct ShippingNode <: Node
index::Int
location::Union{Plant, CollectionCenter}
product::Product
incoming_arcs::Array{Arc}
outgoing_arcs::Array{Arc}
end
mutable struct Graph
process_nodes::Array{ProcessNode}
plant_shipping_nodes::Array{ShippingNode}
collection_shipping_nodes::Array{ShippingNode}
arcs::Array{Arc}
end
function build_graph(instance::Instance)::Graph function build_graph(instance::Instance)::Graph
arcs = [] arcs = []
next_index = 0 next_index = 0
process_nodes = ProcessNode[] process_nodes = ProcessNode[]
plant_shipping_nodes = ShippingNode[] plant_shipping_nodes = ShippingNode[]
collection_shipping_nodes = ShippingNode[] collection_shipping_nodes = ShippingNode[]
process_nodes_by_input_product = Dict(product => ProcessNode[] name_to_process_node_map = Dict{Tuple{AbstractString,AbstractString},ProcessNode}()
for product in instance.products) collection_center_to_node = Dict()
shipping_nodes_by_plant = Dict(plant => []
for plant in instance.plants) process_nodes_by_input_product =
Dict(product => ProcessNode[] for product in instance.products)
shipping_nodes_by_plant = Dict(plant => [] for plant in instance.plants)
# Build collection center shipping nodes # Build collection center shipping nodes
for center in instance.collection_centers for center in instance.collection_centers
node = ShippingNode(next_index, center, center.product, [], []) node = ShippingNode(next_index, center, center.product, [], [])
next_index += 1 next_index += 1
collection_center_to_node[center] = node
push!(collection_shipping_nodes, node) push!(collection_shipping_nodes, node)
end end
# Build process and shipping nodes for plants # Build process and shipping nodes for plants
for plant in instance.plants for plant in instance.plants
pn = ProcessNode(next_index, plant, [], []) pn = ProcessNode(next_index, plant, [], [])
next_index += 1 next_index += 1
push!(process_nodes, pn) push!(process_nodes, pn)
push!(process_nodes_by_input_product[plant.input], pn) push!(process_nodes_by_input_product[plant.input], pn)
name_to_process_node_map[(plant.plant_name, plant.location_name)] = pn
for product in keys(plant.output) for product in keys(plant.output)
sn = ShippingNode(next_index, plant, product, [], []) sn = ShippingNode(next_index, plant, product, [], [])
next_index += 1 next_index += 1
@@ -74,14 +40,17 @@ function build_graph(instance::Instance)::Graph
push!(shipping_nodes_by_plant[plant], sn) push!(shipping_nodes_by_plant[plant], sn)
end end
end end
# Build arcs from collection centers to plants, and from one plant to another # Build arcs from collection centers to plants, and from one plant to another
for source in [collection_shipping_nodes; plant_shipping_nodes] for source in [collection_shipping_nodes; plant_shipping_nodes]
for dest in process_nodes_by_input_product[source.product] for dest in process_nodes_by_input_product[source.product]
distance = calculate_distance(source.location.latitude, distance = _calculate_distance(
source.location.longitude, source.location.latitude,
dest.location.latitude, source.location.longitude,
dest.location.longitude) dest.location.latitude,
dest.location.longitude,
instance.distance_metric,
)
values = Dict("distance" => distance) values = Dict("distance" => distance)
arc = Arc(source, dest, values) arc = Arc(source, dest, values)
push!(source.outgoing_arcs, arc) push!(source.outgoing_arcs, arc)
@@ -89,7 +58,7 @@ function build_graph(instance::Instance)::Graph
push!(arcs, arc) push!(arcs, arc)
end end
end end
# Build arcs from process nodes to shipping nodes within a plant # Build arcs from process nodes to shipping nodes within a plant
for source in process_nodes for source in process_nodes
plant = source.location plant = source.location
@@ -102,25 +71,26 @@ function build_graph(instance::Instance)::Graph
push!(arcs, arc) push!(arcs, arc)
end end
end end
return Graph(process_nodes, return Graph(
plant_shipping_nodes, process_nodes,
collection_shipping_nodes, plant_shipping_nodes,
arcs) collection_shipping_nodes,
arcs,
name_to_process_node_map,
collection_center_to_node,
)
end end
function to_csv(graph::Graph) function print_graph_stats(instance::Instance, graph::Graph)::Nothing
result = "" @info @sprintf(" %12d time periods", instance.time)
for a in graph.arcs @info @sprintf(" %12d process nodes", length(graph.process_nodes))
result *= "$(a.source.index),$(a.dest.index)\n" @info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
end @info @sprintf(
return result " %12d shipping nodes (collection)",
end length(graph.collection_shipping_nodes)
)
@info @sprintf(" %12d arcs", length(graph.arcs))
function calculate_distance(source_lat, source_lon, dest_lat, dest_lon)::Float64 return
x = LLA(source_lat, source_lon, 0.0)
y = LLA(dest_lat, dest_lon, 0.0)
return round(distance(x, y) / 1000.0, digits=2)
end end

11
src/graph/csv.jl Normal file
View File

@@ -0,0 +1,11 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
function to_csv(graph::Graph)
result = ""
for a in graph.arcs
result *= "$(a.source.index),$(a.dest.index)\n"
end
return result
end

60
src/graph/dist.jl Normal file
View File

@@ -0,0 +1,60 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Geodesy
using NearestNeighbors
using DataFrames
function _calculate_distance(
source_lat,
source_lon,
dest_lat,
dest_lon,
::EuclideanDistance,
)::Float64
x = LLA(source_lat, source_lon, 0.0)
y = LLA(dest_lat, dest_lon, 0.0)
return round(euclidean_distance(x, y) / 1000.0, digits = 3)
end
function _calculate_distance(
source_lat,
source_lon,
dest_lat,
dest_lon,
metric::KnnDrivingDistance,
)::Float64
if metric.tree === nothing
basedir = joinpath(dirname(@__FILE__), "..", "..", "data")
csv_filename = joinpath(basedir, "dist_driving.csv")
# Download pre-computed driving data
if !isfile(csv_filename)
_download_zip(
"https://axavier.org/RELOG/0.6/data/dist_driving_0b9a6ad6.zip",
basedir,
csv_filename,
0x0b9a6ad6,
)
end
# Fit kNN model
df = DataFrame(CSV.File(csv_filename, missingstring = "NaN"))
dropmissing!(df)
coords = Matrix(df[!, [:source_lat, :source_lon, :dest_lat, :dest_lon]])'
metric.ratios = Matrix(df[!, [:ratio]])
metric.tree = KDTree(coords)
end
# Compute Euclidean distance
dist_euclidean =
_calculate_distance(source_lat, source_lon, dest_lat, dest_lon, EuclideanDistance())
# Predict ratio
idxs, _ = knn(metric.tree, [source_lat, source_lon, dest_lat, dest_lon], 5)
ratio_pred = mean(metric.ratios[idxs])
dist_pred = round(dist_euclidean * ratio_pred, digits = 3)
isfinite(dist_pred) || error("non-finite distance detected: $dist_pred")
return dist_pred
end

45
src/graph/structs.jl Normal file
View File

@@ -0,0 +1,45 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Geodesy
abstract type Node end
mutable struct Arc
source::Node
dest::Node
values::Dict{String,Float64}
end
mutable struct ProcessNode <: Node
index::Int
location::Plant
incoming_arcs::Vector{Arc}
outgoing_arcs::Vector{Arc}
end
mutable struct ShippingNode <: Node
index::Int
location::Union{Plant,CollectionCenter}
product::Product
incoming_arcs::Vector{Arc}
outgoing_arcs::Vector{Arc}
end
mutable struct Graph
process_nodes::Vector{ProcessNode}
plant_shipping_nodes::Vector{ShippingNode}
collection_shipping_nodes::Vector{ShippingNode}
arcs::Vector{Arc}
name_to_process_node_map::Dict{Tuple{AbstractString,AbstractString},ProcessNode}
collection_center_to_node::Dict{CollectionCenter,ShippingNode}
end
function Base.show(io::IO, instance::Graph)
print(io, "RELOG graph with ")
print(io, "$(length(instance.process_nodes)) process nodes, ")
print(io, "$(length(instance.plant_shipping_nodes)) plant shipping nodes, ")
print(io, "$(length(instance.collection_shipping_nodes)) collection shipping nodes, ")
print(io, "$(length(instance.arcs)) arcs")
end

View File

@@ -1,281 +0,0 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
mutable struct Product
name::String
transportation_cost::Array{Float64}
transportation_energy::Array{Float64}
transportation_emissions::Dict{String, Array{Float64}}
end
mutable struct CollectionCenter
index::Int64
name::String
latitude::Float64
longitude::Float64
product::Product
amount::Array{Float64}
end
mutable struct PlantSize
capacity::Float64
variable_operating_cost::Array{Float64}
fixed_operating_cost::Array{Float64}
opening_cost::Array{Float64}
end
mutable struct Plant
index::Int64
plant_name::String
location_name::String
input::Product
output::Dict{Product, Float64}
latitude::Float64
longitude::Float64
disposal_limit::Dict{Product, Array{Float64}}
disposal_cost::Dict{Product, Array{Float64}}
sizes::Array{PlantSize}
energy::Array{Float64}
emissions::Dict{String, Array{Float64}}
storage_limit::Float64
storage_cost::Array{Float64}
end
mutable struct Instance
time::Int64
products::Array{Product, 1}
collection_centers::Array{CollectionCenter, 1}
plants::Array{Plant, 1}
building_period::Array{Int64}
end
function validate(json, schema)
result = JSONSchema.validate(json, schema)
if result !== nothing
if result isa JSONSchema.SingleIssue
path = join(result.path, "")
if length(path) == 0
path = "root"
end
msg = "$(result.msg) in $(path)"
else
msg = convert(String, result)
end
throw(msg)
end
end
function parsefile(path::String)::Instance
return RELOG.parse(JSON.parsefile(path))
end
function parse(json)::Instance
basedir = dirname(@__FILE__)
json_schema = JSON.parsefile("$basedir/schemas/input.json")
validate(json, Schema(json_schema))
T = json["parameters"]["time horizon (years)"]
json_schema["definitions"]["TimeSeries"]["minItems"] = T
json_schema["definitions"]["TimeSeries"]["maxItems"] = T
validate(json, Schema(json_schema))
building_period = [1]
if "building period (years)" in keys(json)
building_period = json["building period (years)"]
end
plants = Plant[]
products = Product[]
collection_centers = CollectionCenter[]
prod_name_to_product = Dict{String, Product}()
# Create products
for (product_name, product_dict) in json["products"]
cost = product_dict["transportation cost (\$/km/tonne)"]
energy = zeros(T)
emissions = Dict()
if "transportation energy (J/km/tonne)" in keys(product_dict)
energy = product_dict["transportation energy (J/km/tonne)"]
end
if "transportation emissions (tonne/km/tonne)" in keys(product_dict)
emissions = product_dict["transportation emissions (tonne/km/tonne)"]
end
product = Product(product_name, cost, energy, emissions)
push!(products, product)
prod_name_to_product[product_name] = product
# Create collection centers
if "initial amounts" in keys(product_dict)
for (center_name, center_dict) in product_dict["initial amounts"]
center = CollectionCenter(length(collection_centers) + 1,
center_name,
center_dict["latitude (deg)"],
center_dict["longitude (deg)"],
product,
center_dict["amount (tonne)"])
push!(collection_centers, center)
end
end
end
# Create plants
for (plant_name, plant_dict) in json["plants"]
input = prod_name_to_product[plant_dict["input"]]
output = Dict()
# Plant outputs
if "outputs (tonne/tonne)" in keys(plant_dict)
output = Dict(prod_name_to_product[key] => value
for (key, value) in plant_dict["outputs (tonne/tonne)"]
if value > 0)
end
energy = zeros(T)
emissions = Dict()
if "energy (GJ/tonne)" in keys(plant_dict)
energy = plant_dict["energy (GJ/tonne)"]
end
if "emissions (tonne/tonne)" in keys(plant_dict)
emissions = plant_dict["emissions (tonne/tonne)"]
end
for (location_name, location_dict) in plant_dict["locations"]
sizes = PlantSize[]
disposal_limit = Dict(p => [0.0 for t in 1:T] for p in keys(output))
disposal_cost = Dict(p => [0.0 for t in 1:T] for p in keys(output))
# Disposal
if "disposal" in keys(location_dict)
for (product_name, disposal_dict) in location_dict["disposal"]
limit = [1e8 for t in 1:T]
if "limit (tonne)" in keys(disposal_dict)
limit = disposal_dict["limit (tonne)"]
end
disposal_limit[prod_name_to_product[product_name]] = limit
disposal_cost[prod_name_to_product[product_name]] = disposal_dict["cost (\$/tonne)"]
end
end
# Capacities
for (capacity_name, capacity_dict) in location_dict["capacities (tonne)"]
push!(sizes, PlantSize(Base.parse(Float64, capacity_name),
capacity_dict["variable operating cost (\$/tonne)"],
capacity_dict["fixed operating cost (\$)"],
capacity_dict["opening cost (\$)"]))
end
length(sizes) > 1 || push!(sizes, sizes[1])
sort!(sizes, by = x -> x.capacity)
# Storage
storage_limit = 0
storage_cost = zeros(T)
if "storage" in keys(location_dict)
storage_dict = location_dict["storage"]
storage_limit = storage_dict["limit (tonne)"]
storage_cost = storage_dict["cost (\$/tonne)"]
end
# Validation: Capacities
if length(sizes) != 2
throw("At most two capacities are supported")
end
if sizes[1].variable_operating_cost != sizes[2].variable_operating_cost
throw("Variable operating costs must be the same for all capacities")
end
plant = Plant(length(plants) + 1,
plant_name,
location_name,
input,
output,
location_dict["latitude (deg)"],
location_dict["longitude (deg)"],
disposal_limit,
disposal_cost,
sizes,
energy,
emissions,
storage_limit,
storage_cost)
push!(plants, plant)
end
end
@info @sprintf("%12d collection centers", length(collection_centers))
@info @sprintf("%12d candidate plant locations", length(plants))
return Instance(T, products, collection_centers, plants, building_period)
end
"""
_compress(instance::Instance)
Create a single-period instance from a multi-period one. Specifically,
replaces every time-dependent attribute, such as initial_amounts,
by a list with a single element, which is either a sum, an average,
or something else that makes sense to that specific attribute.
"""
function _compress(instance::Instance)::Instance
T = instance.time
compressed = deepcopy(instance)
compressed.time = 1
compressed.building_period = [1]
# Compress products
for p in compressed.products
p.transportation_cost = [mean(p.transportation_cost)]
p.transportation_energy = [mean(p.transportation_energy)]
for (emission_name, emission_value) in p.transportation_emissions
p.transportation_emissions[emission_name] = [mean(emission_value)]
end
end
# Compress collection centers
for c in compressed.collection_centers
c.amount = [maximum(c.amount) * T]
end
# Compress plants
for plant in compressed.plants
plant.energy = [mean(plant.energy)]
for (emission_name, emission_value) in plant.emissions
plant.emissions[emission_name] = [mean(emission_value)]
end
for s in plant.sizes
s.capacity *= T
s.variable_operating_cost = [mean(s.variable_operating_cost)]
s.opening_cost = [s.opening_cost[1]]
s.fixed_operating_cost = [sum(s.fixed_operating_cost)]
end
for (prod_name, disp_limit) in plant.disposal_limit
plant.disposal_limit[prod_name] = [sum(disp_limit)]
end
for (prod_name, disp_cost) in plant.disposal_cost
plant.disposal_cost[prod_name] = [mean(disp_cost)]
end
end
return compressed
end

60
src/instance/compress.jl Normal file
View File

@@ -0,0 +1,60 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
"""
_compress(instance::Instance)
Create a single-period instance from a multi-period one. Specifically,
replaces every time-dependent attribute, such as initial_amounts,
by a list with a single element, which is either a sum, an average,
or something else that makes sense to that specific attribute.
"""
function _compress(instance::Instance)::Instance
T = instance.time
compressed = deepcopy(instance)
compressed.time = 1
compressed.building_period = [1]
# Compress products
for p in compressed.products
p.transportation_cost = [mean(p.transportation_cost)]
p.transportation_energy = [mean(p.transportation_energy)]
for (emission_name, emission_value) in p.transportation_emissions
p.transportation_emissions[emission_name] = [mean(emission_value)]
end
end
# Compress collection centers
for c in compressed.collection_centers
c.amount = [maximum(c.amount) * T]
end
# Compress plants
for plant in compressed.plants
plant.energy = [mean(plant.energy)]
for (emission_name, emission_value) in plant.emissions
plant.emissions[emission_name] = [mean(emission_value)]
end
for s in plant.sizes
s.capacity *= T
s.variable_operating_cost = [mean(s.variable_operating_cost)]
s.opening_cost = [s.opening_cost[1]]
s.fixed_operating_cost = [sum(s.fixed_operating_cost)]
end
for (prod_name, disp_limit) in plant.disposal_limit
plant.disposal_limit[prod_name] = [sum(disp_limit)]
end
for (prod_name, disp_cost) in plant.disposal_cost
plant.disposal_cost[prod_name] = [mean(disp_cost)]
end
end
return compressed
end

212
src/instance/geodb.jl Normal file
View File

@@ -0,0 +1,212 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using CRC
using CSV
using DataFrames
using Shapefile
using Statistics
using ZipFile
using ProgressBars
using OrderedCollections
import Downloads: download
import Base: parse
crc32 = crc(CRC_32)
struct GeoPoint
lat::Float64
lon::Float64
end
struct GeoRegion
centroid::GeoPoint
population::Int
GeoRegion(; centroid, population) = new(centroid, population)
end
DB_CACHE = Dict{String,Dict{String,GeoRegion}}()
function centroid(geom::Shapefile.Polygon)::GeoPoint
x_max, x_min, y_max, y_min = -Inf, Inf, -Inf, Inf
for p in geom.points
x_max = max(x_max, p.x)
x_min = min(x_min, p.x)
y_max = max(y_max, p.y)
y_min = min(y_min, p.y)
end
x_center = (x_max + x_min) / 2.0
y_center = (y_max + y_min) / 2.0
return GeoPoint(round(y_center, digits = 5), round(x_center, digits = 5))
end
function _download_file(url, output, expected_crc32)::Nothing
if isfile(output)
return
end
mkpath(dirname(output))
@info "Downloading: $url"
fname = download(url)
actual_crc32 = open(crc32, fname)
expected_crc32 == actual_crc32 || error("CRC32 mismatch")
cp(fname, output)
return
end
function _download_zip(url, outputdir, expected_output_file, expected_crc32)::Nothing
if isfile(expected_output_file)
return
end
mkpath(outputdir)
@info "Downloading: $url"
zip_filename = download(url)
actual_crc32 = open(crc32, zip_filename)
expected_crc32 == actual_crc32 || error("CRC32 mismatch")
open(zip_filename) do zip_file
zr = ZipFile.Reader(zip_file)
for file in zr.files
open(joinpath(outputdir, file.name), "w") do output_file
write(output_file, read(file))
end
end
end
return
end
function _geodb_load_gov_census(;
db_name,
extract_cols,
shp_crc32,
shp_filename,
shp_url,
population_url,
population_crc32,
population_col,
population_preprocess,
population_join,
)::Dict{String,GeoRegion}
basedir = joinpath(dirname(@__FILE__), "..", "..", "data", db_name)
csv_filename = "$basedir/locations.csv"
if !isfile(csv_filename)
# Download required files
_download_zip(shp_url, basedir, joinpath(basedir, shp_filename), shp_crc32)
_download_file(population_url, "$basedir/population.csv", population_crc32)
# Read shapefile
@info "Processing: $shp_filename"
table = Shapefile.Table(joinpath(basedir, shp_filename))
geoms = Shapefile.shapes(table)
# Build empty dataframe
df = DataFrame()
cols = extract_cols(table, 1)
for k in keys(cols)
df[!, k] = []
end
df[!, "latitude"] = Float64[]
df[!, "longitude"] = Float64[]
# Add regions to dataframe
for (i, geom) in tqdm(enumerate(geoms))
c = centroid(geom)
cols = extract_cols(table, i)
push!(df, [values(cols)..., c.lat, c.lon])
end
sort!(df)
# Join with population data
population = DataFrame(CSV.File("$basedir/population.csv"))
population_preprocess(population)
population = population[:, [population_join, population_col]]
rename!(population, population_col => "population")
df = leftjoin(df, population, on = population_join)
# Write output
CSV.write(csv_filename, df)
end
if db_name keys(DB_CACHE)
csv = CSV.File(csv_filename)
DB_CACHE[db_name] = Dict(
string(row.id) => GeoRegion(
centroid = GeoPoint(row.latitude, row.longitude),
population = (row.population === missing ? 0 : row.population),
) for row in csv
)
end
return DB_CACHE[db_name]
end
# 2018 US counties
# -----------------------------------------------------------------------------
function _extract_cols_2018_us_county(
table::Shapefile.Table,
i::Int,
)::OrderedDict{String,Any}
return OrderedDict(
"id" => table.STATEFP[i] * table.COUNTYFP[i],
"statefp" => table.STATEFP[i],
"countyfp" => table.COUNTYFP[i],
"name" => table.NAME[i],
)
end
function _population_preprocess_2018_us_county(df)
df[!, "id"] = [@sprintf("%02d%03d", row.STATE, row.COUNTY) for row in eachrow(df)]
end
function _geodb_load_2018_us_county()::Dict{String,GeoRegion}
return _geodb_load_gov_census(
db_name = "2018-us-county",
extract_cols = _extract_cols_2018_us_county,
shp_crc32 = 0x83eaec6d,
shp_filename = "cb_2018_us_county_500k.shp",
shp_url = "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_county_500k.zip",
population_url = "https://www2.census.gov/programs-surveys/popest/datasets/2010-2019/counties/totals/co-est2019-alldata.csv",
population_crc32 = 0xf85b0405,
population_col = "POPESTIMATE2019",
population_join = "id",
population_preprocess = _population_preprocess_2018_us_county,
)
end
# US States
# -----------------------------------------------------------------------------
function _extract_cols_us_state(table::Shapefile.Table, i::Int)::OrderedDict{String,Any}
return OrderedDict(
"id" => table.STUSPS[i],
"statefp" => parse(Int, table.STATEFP[i]),
"name" => table.NAME[i],
)
end
function _population_preprocess_us_state(df)
rename!(df, "STATE" => "statefp")
end
function _geodb_load_us_state()::Dict{String,GeoRegion}
return _geodb_load_gov_census(
db_name = "us-state",
extract_cols = _extract_cols_us_state,
shp_crc32 = 0x9469e5ca,
shp_filename = "cb_2018_us_state_500k.shp",
shp_url = "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_state_500k.zip",
population_url = "http://www2.census.gov/programs-surveys/popest/datasets/2010-2019/national/totals/nst-est2019-alldata.csv",
population_crc32 = 0x191cc64c,
population_col = "POPESTIMATE2019",
population_join = "statefp",
population_preprocess = _population_preprocess_us_state,
)
end
function geodb_load(db_name::AbstractString)::Dict{String,GeoRegion}
db_name == "2018-us-county" && return _geodb_load_2018_us_county()
db_name == "us-state" && return _geodb_load_us_state()
error("Unknown database: $db_name")
end
function geodb_query(name)::GeoRegion
db_name, id = split(name, ":")
return geodb_load(db_name)[id]
end

220
src/instance/parse.jl Normal file
View File

@@ -0,0 +1,220 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
function parsefile(path::String)::Instance
return RELOG.parse(JSON.parsefile(path))
end
function parse(json)::Instance
basedir = dirname(@__FILE__)
json_schema = JSON.parsefile("$basedir/../schemas/input.json")
validate(json, Schema(json_schema))
T = json["parameters"]["time horizon (years)"]
json_schema["definitions"]["TimeSeries"]["minItems"] = T
json_schema["definitions"]["TimeSeries"]["maxItems"] = T
validate(json, Schema(json_schema))
building_period = [1]
if "building period (years)" in keys(json["parameters"])
building_period = json["parameters"]["building period (years)"]
end
distance_metric = EuclideanDistance()
if "distance metric" in keys(json["parameters"])
metric_name = json["parameters"]["distance metric"]
if metric_name == "driving"
distance_metric = KnnDrivingDistance()
elseif metric_name == "Euclidean"
# nop
else
error("Unknown distance metric: $metric_name")
end
end
plants = Plant[]
products = Product[]
collection_centers = CollectionCenter[]
prod_name_to_product = Dict{String,Product}()
# Create products
for (product_name, product_dict) in json["products"]
cost = product_dict["transportation cost (\$/km/tonne)"]
energy = zeros(T)
emissions = Dict()
disposal_limit = zeros(T)
disposal_cost = zeros(T)
if "transportation energy (J/km/tonne)" in keys(product_dict)
energy = product_dict["transportation energy (J/km/tonne)"]
end
if "transportation emissions (tonne/km/tonne)" in keys(product_dict)
emissions = product_dict["transportation emissions (tonne/km/tonne)"]
end
if "disposal limit (tonne)" in keys(product_dict)
disposal_limit = product_dict["disposal limit (tonne)"]
end
if "disposal cost (\$/tonne)" in keys(product_dict)
disposal_cost = product_dict["disposal cost (\$/tonne)"]
end
prod_centers = []
product = Product(
product_name,
cost,
energy,
emissions,
disposal_limit,
disposal_cost,
prod_centers,
)
push!(products, product)
prod_name_to_product[product_name] = product
# Create collection centers
if "initial amounts" in keys(product_dict)
for (center_name, center_dict) in product_dict["initial amounts"]
if "location" in keys(center_dict)
region = geodb_query(center_dict["location"])
center_dict["latitude (deg)"] = region.centroid.lat
center_dict["longitude (deg)"] = region.centroid.lon
end
center = CollectionCenter(
length(collection_centers) + 1,
center_name,
center_dict["latitude (deg)"],
center_dict["longitude (deg)"],
product,
center_dict["amount (tonne)"],
)
push!(prod_centers, center)
push!(collection_centers, center)
end
end
end
# Create plants
for (plant_name, plant_dict) in json["plants"]
input = prod_name_to_product[plant_dict["input"]]
output = Dict()
# Plant outputs
if "outputs (tonne/tonne)" in keys(plant_dict)
output = Dict(
prod_name_to_product[key] => value for
(key, value) in plant_dict["outputs (tonne/tonne)"] if value > 0
)
end
energy = zeros(T)
emissions = Dict()
if "energy (GJ/tonne)" in keys(plant_dict)
energy = plant_dict["energy (GJ/tonne)"]
end
if "emissions (tonne/tonne)" in keys(plant_dict)
emissions = plant_dict["emissions (tonne/tonne)"]
end
for (location_name, location_dict) in plant_dict["locations"]
sizes = PlantSize[]
disposal_limit = Dict(p => [0.0 for t = 1:T] for p in keys(output))
disposal_cost = Dict(p => [0.0 for t = 1:T] for p in keys(output))
# GeoDB
if "location" in keys(location_dict)
region = geodb_query(location_dict["location"])
location_dict["latitude (deg)"] = region.centroid.lat
location_dict["longitude (deg)"] = region.centroid.lon
end
# Disposal
if "disposal" in keys(location_dict)
for (product_name, disposal_dict) in location_dict["disposal"]
limit = [1e8 for t = 1:T]
if "limit (tonne)" in keys(disposal_dict)
limit = disposal_dict["limit (tonne)"]
end
disposal_limit[prod_name_to_product[product_name]] = limit
disposal_cost[prod_name_to_product[product_name]] =
disposal_dict["cost (\$/tonne)"]
end
end
# Capacities
for (capacity_name, capacity_dict) in location_dict["capacities (tonne)"]
push!(
sizes,
PlantSize(
Base.parse(Float64, capacity_name),
capacity_dict["variable operating cost (\$/tonne)"],
capacity_dict["fixed operating cost (\$)"],
capacity_dict["opening cost (\$)"],
),
)
end
length(sizes) > 1 || push!(sizes, sizes[1])
sort!(sizes, by = x -> x.capacity)
# Storage
storage_limit = 0
storage_cost = zeros(T)
if "storage" in keys(location_dict)
storage_dict = location_dict["storage"]
storage_limit = storage_dict["limit (tonne)"]
storage_cost = storage_dict["cost (\$/tonne)"]
end
# Validation: Capacities
if length(sizes) != 2
throw("At most two capacities are supported")
end
if sizes[1].variable_operating_cost != sizes[2].variable_operating_cost
throw("Variable operating costs must be the same for all capacities")
end
plant = Plant(
length(plants) + 1,
plant_name,
location_name,
input,
output,
location_dict["latitude (deg)"],
location_dict["longitude (deg)"],
disposal_limit,
disposal_cost,
sizes,
energy,
emissions,
storage_limit,
storage_cost,
)
push!(plants, plant)
end
end
@info @sprintf("%12d collection centers", length(collection_centers))
@info @sprintf("%12d candidate plant locations", length(plants))
return Instance(
T,
products,
collection_centers,
plants,
building_period,
distance_metric,
)
end

71
src/instance/structs.jl Normal file
View File

@@ -0,0 +1,71 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
mutable struct Product
name::String
transportation_cost::Vector{Float64}
transportation_energy::Vector{Float64}
transportation_emissions::Dict{String,Vector{Float64}}
disposal_limit::Vector{Float64}
disposal_cost::Vector{Float64}
collection_centers::Vector
end
mutable struct CollectionCenter
index::Int64
name::String
latitude::Float64
longitude::Float64
product::Product
amount::Vector{Float64}
end
mutable struct PlantSize
capacity::Float64
variable_operating_cost::Vector{Float64}
fixed_operating_cost::Vector{Float64}
opening_cost::Vector{Float64}
end
mutable struct Plant
index::Int64
plant_name::String
location_name::String
input::Product
output::Dict{Product,Float64}
latitude::Float64
longitude::Float64
disposal_limit::Dict{Product,Vector{Float64}}
disposal_cost::Dict{Product,Vector{Float64}}
sizes::Vector{PlantSize}
energy::Vector{Float64}
emissions::Dict{String,Vector{Float64}}
storage_limit::Float64
storage_cost::Vector{Float64}
end
abstract type DistanceMetric end
Base.@kwdef mutable struct KnnDrivingDistance <: DistanceMetric
tree = nothing
ratios = nothing
end
mutable struct EuclideanDistance <: DistanceMetric end
mutable struct Instance
time::Int64
products::Vector{Product}
collection_centers::Vector{CollectionCenter}
plants::Vector{Plant}
building_period::Vector{Int64}
distance_metric::DistanceMetric
end

21
src/instance/validate.jl Normal file
View File

@@ -0,0 +1,21 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataStructures
using JSON
using JSONSchema
using Printf
using Statistics
function validate(json, schema)
result = JSONSchema.validate(json, schema)
if result !== nothing
if result isa JSONSchema.SingleIssue
msg = "$(result.reason) in $(result.path)"
else
msg = convert(String, result)
end
throw("Error parsing input file: $(msg)")
end
end

View File

@@ -1,535 +0,0 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, LinearAlgebra, Geodesy, Cbc, Clp, ProgressBars, Printf, DataStructures
mutable struct ManufacturingModel
mip::JuMP.Model
vars::DotDict
eqs::DotDict
instance::Instance
graph::Graph
end
function build_model(instance::Instance, graph::Graph, optimizer)::ManufacturingModel
model = ManufacturingModel(Model(optimizer), DotDict(), DotDict(), instance, graph)
create_vars!(model)
create_objective_function!(model)
create_shipping_node_constraints!(model)
create_process_node_constraints!(model)
return model
end
function create_vars!(model::ManufacturingModel)
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
vars.flow = Dict((a, t) => @variable(mip, lower_bound=0)
for a in graph.arcs, t in 1:T)
vars.dispose = Dict((n, t) => @variable(mip,
lower_bound=0,
upper_bound=n.location.disposal_limit[n.product][t])
for n in values(graph.plant_shipping_nodes), t in 1:T)
vars.store = Dict((n, t) => @variable(mip,
lower_bound=0,
upper_bound=n.location.storage_limit)
for n in values(graph.process_nodes), t in 1:T)
vars.process = Dict((n, t) => @variable(mip,
lower_bound = 0)
for n in values(graph.process_nodes), t in 1:T)
vars.open_plant = Dict((n, t) => @variable(mip, binary=true)
for n in values(graph.process_nodes), t in 1:T)
vars.is_open = Dict((n, t) => @variable(mip, binary=true)
for n in values(graph.process_nodes), t in 1:T)
vars.capacity = Dict((n, t) => @variable(mip,
lower_bound = 0,
upper_bound = n.location.sizes[2].capacity)
for n in values(graph.process_nodes), t in 1:T)
vars.expansion = Dict((n, t) => @variable(mip,
lower_bound = 0,
upper_bound = n.location.sizes[2].capacity -
n.location.sizes[1].capacity)
for n in values(graph.process_nodes), t in 1:T)
end
function slope_open(plant, t)
if plant.sizes[2].capacity <= plant.sizes[1].capacity
0.0
else
(plant.sizes[2].opening_cost[t] - plant.sizes[1].opening_cost[t]) /
(plant.sizes[2].capacity - plant.sizes[1].capacity)
end
end
function slope_fix_oper_cost(plant, t)
if plant.sizes[2].capacity <= plant.sizes[1].capacity
0.0
else
(plant.sizes[2].fixed_operating_cost[t] - plant.sizes[1].fixed_operating_cost[t]) /
(plant.sizes[2].capacity - plant.sizes[1].capacity)
end
end
function create_objective_function!(model::ManufacturingModel)
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
obj = AffExpr(0.0)
# Process node costs
for n in values(graph.process_nodes), t in 1:T
# Transportation and variable operating costs
for a in n.incoming_arcs
c = n.location.input.transportation_cost[t] * a.values["distance"]
add_to_expression!(obj, c, vars.flow[a, t])
end
# Opening costs
add_to_expression!(obj,
n.location.sizes[1].opening_cost[t],
vars.open_plant[n, t])
# Fixed operating costs (base)
add_to_expression!(obj,
n.location.sizes[1].fixed_operating_cost[t],
vars.is_open[n, t])
# Fixed operating costs (expansion)
add_to_expression!(obj,
slope_fix_oper_cost(n.location, t),
vars.expansion[n, t])
# Processing costs
add_to_expression!(obj,
n.location.sizes[1].variable_operating_cost[t],
vars.process[n, t])
# Storage costs
add_to_expression!(obj,
n.location.storage_cost[t],
vars.store[n, t])
# Expansion costs
if t < T
add_to_expression!(obj,
slope_open(n.location, t) - slope_open(n.location, t + 1),
vars.expansion[n, t])
else
add_to_expression!(obj,
slope_open(n.location, t),
vars.expansion[n, t])
end
end
# Shipping node costs
for n in values(graph.plant_shipping_nodes), t in 1:T
# Disposal costs
add_to_expression!(obj,
n.location.disposal_cost[n.product][t],
vars.dispose[n, t])
end
@objective(mip, Min, obj)
end
function create_shipping_node_constraints!(model::ManufacturingModel)
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
eqs = model.eqs
eqs.balance = OrderedDict()
for t in 1:T
# Collection centers
for n in graph.collection_shipping_nodes
eqs.balance[n, t] = @constraint(mip,
sum(vars.flow[a, t] for a in n.outgoing_arcs)
== n.location.amount[t])
end
# Plants
for n in graph.plant_shipping_nodes
@constraint(mip,
sum(vars.flow[a, t] for a in n.incoming_arcs) ==
sum(vars.flow[a, t] for a in n.outgoing_arcs) + vars.dispose[n, t])
end
end
end
function create_process_node_constraints!(model::ManufacturingModel)
mip, vars, graph, T = model.mip, model.vars, model.graph, model.instance.time
for t in 1:T, n in graph.process_nodes
input_sum = AffExpr(0.0)
for a in n.incoming_arcs
add_to_expression!(input_sum, 1.0, vars.flow[a, t])
end
# Output amount is implied by amount processed
for a in n.outgoing_arcs
@constraint(mip, vars.flow[a, t] == a.values["weight"] * vars.process[n, t])
end
# If plant is closed, capacity is zero
@constraint(mip, vars.capacity[n, t] <= n.location.sizes[2].capacity * vars.is_open[n, t])
# If plant is open, capacity is greater than base
@constraint(mip, vars.capacity[n, t] >= n.location.sizes[1].capacity * vars.is_open[n, t])
# Capacity is linked to expansion
@constraint(mip, vars.capacity[n, t] <= n.location.sizes[1].capacity + vars.expansion[n, t])
# Can only process up to capacity
@constraint(mip, vars.process[n, t] <= vars.capacity[n, t])
if t > 1
# Plant capacity can only increase over time
@constraint(mip, vars.capacity[n, t] >= vars.capacity[n, t-1])
@constraint(mip, vars.expansion[n, t] >= vars.expansion[n, t-1])
end
# Amount received equals amount processed plus stored
store_in = 0
if t > 1
store_in = vars.store[n, t-1]
end
if t == T
@constraint(mip, vars.store[n, t] == 0)
end
@constraint(mip,
input_sum + store_in == vars.store[n, t] + vars.process[n, t])
# Plant is currently open if it was already open in the previous time period or
# if it was built just now
if t > 1
@constraint(mip, vars.is_open[n, t] == vars.is_open[n, t-1] + vars.open_plant[n, t])
else
@constraint(mip, vars.is_open[n, t] == vars.open_plant[n, t])
end
# Plant can only be opened during building period
if t model.instance.building_period
@constraint(mip, vars.open_plant[n, t] == 0)
end
end
end
default_milp_optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
default_lp_optimizer = optimizer_with_attributes(Clp.Optimizer, "LogLevel" => 0)
function solve(instance::Instance;
optimizer=nothing,
output=nothing,
marginal_costs=true,
)
milp_optimizer = lp_optimizer = optimizer
if optimizer == nothing
milp_optimizer = default_milp_optimizer
lp_optimizer = default_lp_optimizer
end
@info "Building graph..."
graph = RELOG.build_graph(instance)
@info @sprintf(" %12d time periods", instance.time)
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
@info @sprintf(" %12d shipping nodes (collection)", length(graph.collection_shipping_nodes))
@info @sprintf(" %12d arcs", length(graph.arcs))
@info "Building optimization model..."
model = RELOG.build_model(instance, graph, milp_optimizer)
@info "Optimizing MILP..."
JuMP.optimize!(model.mip)
if !has_values(model.mip)
@warn "No solution available"
return OrderedDict()
end
if marginal_costs
@info "Re-optimizing with integer variables fixed..."
all_vars = JuMP.all_variables(model.mip)
vals = OrderedDict(var => JuMP.value(var) for var in all_vars)
JuMP.set_optimizer(model.mip, lp_optimizer)
for var in all_vars
if JuMP.is_binary(var)
JuMP.unset_binary(var)
JuMP.fix(var, vals[var])
end
end
JuMP.optimize!(model.mip)
end
@info "Extracting solution..."
solution = get_solution(model, marginal_costs=marginal_costs)
if output != nothing
write(solution, output)
end
return solution
end
function solve(filename::AbstractString;
heuristic=false,
kwargs...,
)
@info "Reading $filename..."
instance = RELOG.parsefile(filename)
if heuristic && instance.time > 1
@info "Solving single-period version..."
compressed = _compress(instance)
csol = solve(compressed;
output=nothing,
marginal_costs=false,
kwargs...)
@info "Filtering candidate locations..."
selected_pairs = []
for (plant_name, plant_dict) in csol["Plants"]
for (location_name, location_dict) in plant_dict
push!(selected_pairs, (plant_name, location_name))
end
end
filtered_plants = []
for p in instance.plants
if (p.plant_name, p.location_name) in selected_pairs
push!(filtered_plants, p)
end
end
instance.plants = filtered_plants
@info "Solving original version..."
end
sol = solve(instance; kwargs...)
return sol
end
function get_solution(model::ManufacturingModel;
marginal_costs=true,
)
mip, vars, eqs, graph, instance = model.mip, model.vars, model.eqs, model.graph, model.instance
T = instance.time
output = OrderedDict(
"Plants" => OrderedDict(),
"Products" => OrderedDict(),
"Costs" => OrderedDict(
"Fixed operating (\$)" => zeros(T),
"Variable operating (\$)" => zeros(T),
"Opening (\$)" => zeros(T),
"Transportation (\$)" => zeros(T),
"Disposal (\$)" => zeros(T),
"Expansion (\$)" => zeros(T),
"Storage (\$)" => zeros(T),
"Total (\$)" => zeros(T),
),
"Energy" => OrderedDict(
"Plants (GJ)" => zeros(T),
"Transportation (GJ)" => zeros(T),
),
"Emissions" => OrderedDict(
"Plants (tonne)" => OrderedDict(),
"Transportation (tonne)" => OrderedDict(),
),
)
plant_to_process_node = OrderedDict(n.location => n for n in graph.process_nodes)
plant_to_shipping_nodes = OrderedDict()
for p in instance.plants
plant_to_shipping_nodes[p] = []
for a in plant_to_process_node[p].outgoing_arcs
push!(plant_to_shipping_nodes[p], a.dest)
end
end
# Products
if marginal_costs
for n in graph.collection_shipping_nodes
location_dict = OrderedDict{Any, Any}(
"Marginal cost (\$/tonne)" => [round(abs(JuMP.shadow_price(eqs.balance[n, t])), digits=2)
for t in 1:T]
)
if n.product.name keys(output["Products"])
output["Products"][n.product.name] = OrderedDict()
end
output["Products"][n.product.name][n.location.name] = location_dict
end
end
# Plants
for plant in instance.plants
skip_plant = true
process_node = plant_to_process_node[plant]
plant_dict = OrderedDict{Any, Any}(
"Input" => OrderedDict(),
"Output" => OrderedDict(
"Send" => OrderedDict(),
"Dispose" => OrderedDict(),
),
"Input product" => plant.input.name,
"Total input (tonne)" => [0.0 for t in 1:T],
"Total output" => OrderedDict(),
"Latitude (deg)" => plant.latitude,
"Longitude (deg)" => plant.longitude,
"Capacity (tonne)" => [JuMP.value(vars.capacity[process_node, t])
for t in 1:T],
"Opening cost (\$)" => [JuMP.value(vars.open_plant[process_node, t]) *
plant.sizes[1].opening_cost[t]
for t in 1:T],
"Fixed operating cost (\$)" => [JuMP.value(vars.is_open[process_node, t]) *
plant.sizes[1].fixed_operating_cost[t] +
JuMP.value(vars.expansion[process_node, t]) *
slope_fix_oper_cost(plant, t)
for t in 1:T],
"Expansion cost (\$)" => [(if t == 1
slope_open(plant, t) * JuMP.value(vars.expansion[process_node, t])
else
slope_open(plant, t) * (
JuMP.value(vars.expansion[process_node, t]) -
JuMP.value(vars.expansion[process_node, t - 1])
)
end)
for t in 1:T],
"Process (tonne)" => [JuMP.value(vars.process[process_node, t])
for t in 1:T],
"Variable operating cost (\$)" => [JuMP.value(vars.process[process_node, t]) *
plant.sizes[1].variable_operating_cost[t]
for t in 1:T],
"Storage (tonne)" => [JuMP.value(vars.store[process_node, t])
for t in 1:T],
"Storage cost (\$)" => [JuMP.value(vars.store[process_node, t]) *
plant.storage_cost[t]
for t in 1:T],
)
output["Costs"]["Fixed operating (\$)"] += plant_dict["Fixed operating cost (\$)"]
output["Costs"]["Variable operating (\$)"] += plant_dict["Variable operating cost (\$)"]
output["Costs"]["Opening (\$)"] += plant_dict["Opening cost (\$)"]
output["Costs"]["Expansion (\$)"] += plant_dict["Expansion cost (\$)"]
output["Costs"]["Storage (\$)"] += plant_dict["Storage cost (\$)"]
# Inputs
for a in process_node.incoming_arcs
vals = [JuMP.value(vars.flow[a, t]) for t in 1:T]
if sum(vals) <= 1e-3
continue
end
skip_plant = false
dict = OrderedDict{Any, Any}(
"Amount (tonne)" => vals,
"Distance (km)" => a.values["distance"],
"Latitude (deg)" => a.source.location.latitude,
"Longitude (deg)" => a.source.location.longitude,
"Transportation cost (\$)" => a.source.product.transportation_cost .*
vals .*
a.values["distance"],
"Transportation energy (J)" => vals .*
a.values["distance"] .*
a.source.product.transportation_energy,
"Emissions (tonne)" => OrderedDict(),
)
emissions_dict = output["Emissions"]["Transportation (tonne)"]
for (em_name, em_values) in a.source.product.transportation_emissions
dict["Emissions (tonne)"][em_name] = em_values .*
dict["Amount (tonne)"] .*
a.values["distance"]
if em_name keys(emissions_dict)
emissions_dict[em_name] = zeros(T)
end
emissions_dict[em_name] += dict["Emissions (tonne)"][em_name]
end
if a.source.location isa CollectionCenter
plant_name = "Origin"
location_name = a.source.location.name
else
plant_name = a.source.location.plant_name
location_name = a.source.location.location_name
end
if plant_name keys(plant_dict["Input"])
plant_dict["Input"][plant_name] = OrderedDict()
end
plant_dict["Input"][plant_name][location_name] = dict
plant_dict["Total input (tonne)"] += vals
output["Costs"]["Transportation (\$)"] += dict["Transportation cost (\$)"]
output["Energy"]["Transportation (GJ)"] += dict["Transportation energy (J)"] / 1e9
end
plant_dict["Energy (GJ)"] = plant_dict["Total input (tonne)"] .* plant.energy
output["Energy"]["Plants (GJ)"] += plant_dict["Energy (GJ)"]
plant_dict["Emissions (tonne)"] = OrderedDict()
emissions_dict = output["Emissions"]["Plants (tonne)"]
for (em_name, em_values) in plant.emissions
plant_dict["Emissions (tonne)"][em_name] = em_values .* plant_dict["Total input (tonne)"]
if em_name keys(emissions_dict)
emissions_dict[em_name] = zeros(T)
end
emissions_dict[em_name] += plant_dict["Emissions (tonne)"][em_name]
end
# Outputs
for shipping_node in plant_to_shipping_nodes[plant]
product_name = shipping_node.product.name
plant_dict["Total output"][product_name] = zeros(T)
plant_dict["Output"]["Send"][product_name] = product_dict = OrderedDict()
disposal_amount = [JuMP.value(vars.dispose[shipping_node, t]) for t in 1:T]
if sum(disposal_amount) > 1e-5
skip_plant = false
plant_dict["Output"]["Dispose"][product_name] = disposal_dict = OrderedDict()
disposal_dict["Amount (tonne)"] = [JuMP.value(model.vars.dispose[shipping_node, t])
for t in 1:T]
disposal_dict["Cost (\$)"] = [disposal_dict["Amount (tonne)"][t] *
plant.disposal_cost[shipping_node.product][t]
for t in 1:T]
plant_dict["Total output"][product_name] += disposal_amount
output["Costs"]["Disposal (\$)"] += disposal_dict["Cost (\$)"]
end
for a in shipping_node.outgoing_arcs
vals = [JuMP.value(vars.flow[a, t]) for t in 1:T]
if sum(vals) <= 1e-3
continue
end
skip_plant = false
dict = OrderedDict(
"Amount (tonne)" => vals,
"Distance (km)" => a.values["distance"],
"Latitude (deg)" => a.dest.location.latitude,
"Longitude (deg)" => a.dest.location.longitude,
)
if a.dest.location.plant_name keys(product_dict)
product_dict[a.dest.location.plant_name] = OrderedDict()
end
product_dict[a.dest.location.plant_name][a.dest.location.location_name] = dict
plant_dict["Total output"][product_name] += vals
end
end
if !skip_plant
if plant.plant_name keys(output["Plants"])
output["Plants"][plant.plant_name] = OrderedDict()
end
output["Plants"][plant.plant_name][plant.location_name] = plant_dict
end
end
output["Costs"]["Total (\$)"] = sum(values(output["Costs"]))
return output
end

277
src/model/build.jl Normal file
View File

@@ -0,0 +1,277 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, LinearAlgebra, Geodesy, ProgressBars, Printf, DataStructures
function build_model(instance::Instance, graph::Graph, optimizer)::JuMP.Model
model = Model(optimizer)
model[:instance] = instance
model[:graph] = graph
create_vars!(model)
create_objective_function!(model)
create_shipping_node_constraints!(model)
create_process_node_constraints!(model)
return model
end
function create_vars!(model::JuMP.Model)
graph, T = model[:graph], model[:instance].time
model[:flow] =
Dict((a, t) => @variable(model, lower_bound = 0) for a in graph.arcs, t = 1:T)
model[:plant_dispose] = Dict(
(n, t) => @variable(
model,
lower_bound = 0,
upper_bound = n.location.disposal_limit[n.product][t]
) for n in values(graph.plant_shipping_nodes), t = 1:T
)
model[:collection_dispose] = Dict(
(n, t) => @variable(model, lower_bound = 0,) for
n in values(graph.collection_shipping_nodes), t = 1:T
)
model[:store] = Dict(
(n, t) =>
@variable(model, lower_bound = 0, upper_bound = n.location.storage_limit)
for n in values(graph.process_nodes), t = 1:T
)
model[:process] = Dict(
(n, t) => @variable(model, lower_bound = 0) for
n in values(graph.process_nodes), t = 1:T
)
model[:open_plant] = Dict(
(n, t) => @variable(model, binary = true) for n in values(graph.process_nodes),
t = 1:T
)
model[:is_open] = Dict(
(n, t) => @variable(model, binary = true) for n in values(graph.process_nodes),
t = 1:T
)
model[:capacity] = Dict(
(n, t) => @variable(
model,
lower_bound = 0,
upper_bound = n.location.sizes[2].capacity
) for n in values(graph.process_nodes), t = 1:T
)
model[:expansion] = Dict(
(n, t) => @variable(
model,
lower_bound = 0,
upper_bound = n.location.sizes[2].capacity - n.location.sizes[1].capacity
) for n in values(graph.process_nodes), t = 1:T
)
end
function slope_open(plant, t)
if plant.sizes[2].capacity <= plant.sizes[1].capacity
0.0
else
(plant.sizes[2].opening_cost[t] - plant.sizes[1].opening_cost[t]) /
(plant.sizes[2].capacity - plant.sizes[1].capacity)
end
end
function slope_fix_oper_cost(plant, t)
if plant.sizes[2].capacity <= plant.sizes[1].capacity
0.0
else
(plant.sizes[2].fixed_operating_cost[t] - plant.sizes[1].fixed_operating_cost[t]) /
(plant.sizes[2].capacity - plant.sizes[1].capacity)
end
end
function create_objective_function!(model::JuMP.Model)
graph, T = model[:graph], model[:instance].time
obj = AffExpr(0.0)
# Process node costs
for n in values(graph.process_nodes), t = 1:T
# Transportation and variable operating costs
for a in n.incoming_arcs
c = n.location.input.transportation_cost[t] * a.values["distance"]
add_to_expression!(obj, c, model[:flow][a, t])
end
# Opening costs
add_to_expression!(
obj,
n.location.sizes[1].opening_cost[t],
model[:open_plant][n, t],
)
# Fixed operating costs (base)
add_to_expression!(
obj,
n.location.sizes[1].fixed_operating_cost[t],
model[:is_open][n, t],
)
# Fixed operating costs (expansion)
add_to_expression!(obj, slope_fix_oper_cost(n.location, t), model[:expansion][n, t])
# Processing costs
add_to_expression!(
obj,
n.location.sizes[1].variable_operating_cost[t],
model[:process][n, t],
)
# Storage costs
add_to_expression!(obj, n.location.storage_cost[t], model[:store][n, t])
# Expansion costs
if t < T
add_to_expression!(
obj,
slope_open(n.location, t) - slope_open(n.location, t + 1),
model[:expansion][n, t],
)
else
add_to_expression!(obj, slope_open(n.location, t), model[:expansion][n, t])
end
end
# Plant shipping node costs
for n in values(graph.plant_shipping_nodes), t = 1:T
# Disposal costs
add_to_expression!(
obj,
n.location.disposal_cost[n.product][t],
model[:plant_dispose][n, t],
)
end
# Collection shipping node costs
for n in values(graph.collection_shipping_nodes), t = 1:T
# Disposal costs
add_to_expression!(
obj,
n.location.product.disposal_cost[t],
model[:collection_dispose][n, t],
)
end
@objective(model, Min, obj)
end
function create_shipping_node_constraints!(model::JuMP.Model)
graph, T = model[:graph], model[:instance].time
model[:eq_balance] = OrderedDict()
for t = 1:T
# Collection centers
for n in graph.collection_shipping_nodes
model[:eq_balance][n, t] = @constraint(
model,
sum(model[:flow][a, t] for a in n.outgoing_arcs) ==
n.location.amount[t] + model[:collection_dispose][n, t]
)
end
for prod in model[:instance].products
if isempty(prod.collection_centers)
continue
end
expr = AffExpr()
for center in prod.collection_centers
n = graph.collection_center_to_node[center]
add_to_expression!(expr, model[:collection_dispose][n, t])
end
@constraint(model, expr <= prod.disposal_limit[t])
end
# Plants
for n in graph.plant_shipping_nodes
@constraint(
model,
sum(model[:flow][a, t] for a in n.incoming_arcs) ==
sum(model[:flow][a, t] for a in n.outgoing_arcs) +
model[:plant_dispose][n, t]
)
end
end
end
function create_process_node_constraints!(model::JuMP.Model)
graph, T = model[:graph], model[:instance].time
for t = 1:T, n in graph.process_nodes
input_sum = AffExpr(0.0)
for a in n.incoming_arcs
add_to_expression!(input_sum, 1.0, model[:flow][a, t])
end
# Output amount is implied by amount processed
for a in n.outgoing_arcs
@constraint(
model,
model[:flow][a, t] == a.values["weight"] * model[:process][n, t]
)
end
# If plant is closed, capacity is zero
@constraint(
model,
model[:capacity][n, t] <= n.location.sizes[2].capacity * model[:is_open][n, t]
)
# If plant is open, capacity is greater than base
@constraint(
model,
model[:capacity][n, t] >= n.location.sizes[1].capacity * model[:is_open][n, t]
)
# Capacity is linked to expansion
@constraint(
model,
model[:capacity][n, t] <=
n.location.sizes[1].capacity + model[:expansion][n, t]
)
# Can only process up to capacity
@constraint(model, model[:process][n, t] <= model[:capacity][n, t])
if t > 1
# Plant capacity can only increase over time
@constraint(model, model[:capacity][n, t] >= model[:capacity][n, t-1])
@constraint(model, model[:expansion][n, t] >= model[:expansion][n, t-1])
end
# Amount received equals amount processed plus stored
store_in = 0
if t > 1
store_in = model[:store][n, t-1]
end
if t == T
@constraint(model, model[:store][n, t] == 0)
end
@constraint(
model,
input_sum + store_in == model[:store][n, t] + model[:process][n, t]
)
# Plant is currently open if it was already open in the previous time period or
# if it was built just now
if t > 1
@constraint(
model,
model[:is_open][n, t] == model[:is_open][n, t-1] + model[:open_plant][n, t]
)
else
@constraint(model, model[:is_open][n, t] == model[:open_plant][n, t])
end
# Plant can only be opened during building period
if t model[:instance].building_period
@constraint(model, model[:open_plant][n, t] == 0)
end
end
end

231
src/model/getsol.jl Normal file
View File

@@ -0,0 +1,231 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, LinearAlgebra, Geodesy, ProgressBars, Printf, DataStructures
function get_solution(model::JuMP.Model; marginal_costs = true)
graph, instance = model[:graph], model[:instance]
T = instance.time
output = OrderedDict(
"Plants" => OrderedDict(),
"Products" => OrderedDict(),
"Costs" => OrderedDict(
"Fixed operating (\$)" => zeros(T),
"Variable operating (\$)" => zeros(T),
"Opening (\$)" => zeros(T),
"Transportation (\$)" => zeros(T),
"Disposal (\$)" => zeros(T),
"Expansion (\$)" => zeros(T),
"Storage (\$)" => zeros(T),
"Total (\$)" => zeros(T),
),
"Energy" =>
OrderedDict("Plants (GJ)" => zeros(T), "Transportation (GJ)" => zeros(T)),
"Emissions" => OrderedDict(
"Plants (tonne)" => OrderedDict(),
"Transportation (tonne)" => OrderedDict(),
),
)
plant_to_process_node = OrderedDict(n.location => n for n in graph.process_nodes)
plant_to_shipping_nodes = OrderedDict()
for p in instance.plants
plant_to_shipping_nodes[p] = []
for a in plant_to_process_node[p].outgoing_arcs
push!(plant_to_shipping_nodes[p], a.dest)
end
end
# Products
for n in graph.collection_shipping_nodes
location_dict = OrderedDict{Any,Any}(
"Latitude (deg)" => n.location.latitude,
"Longitude (deg)" => n.location.longitude,
"Amount (tonne)" => n.location.amount,
"Dispose (tonne)" =>
[JuMP.value(model[:collection_dispose][n, t]) for t = 1:T],
)
if marginal_costs
location_dict["Marginal cost (\$/tonne)"] = [
round(abs(JuMP.shadow_price(model[:eq_balance][n, t])), digits = 2) for
t = 1:T
]
end
if n.product.name keys(output["Products"])
output["Products"][n.product.name] = OrderedDict()
end
output["Products"][n.product.name][n.location.name] = location_dict
end
# Plants
for plant in instance.plants
skip_plant = true
process_node = plant_to_process_node[plant]
plant_dict = OrderedDict{Any,Any}(
"Input" => OrderedDict(),
"Output" =>
OrderedDict("Send" => OrderedDict(), "Dispose" => OrderedDict()),
"Input product" => plant.input.name,
"Total input (tonne)" => [0.0 for t = 1:T],
"Total output" => OrderedDict(),
"Latitude (deg)" => plant.latitude,
"Longitude (deg)" => plant.longitude,
"Capacity (tonne)" =>
[JuMP.value(model[:capacity][process_node, t]) for t = 1:T],
"Opening cost (\$)" => [
JuMP.value(model[:open_plant][process_node, t]) *
plant.sizes[1].opening_cost[t] for t = 1:T
],
"Fixed operating cost (\$)" => [
JuMP.value(model[:is_open][process_node, t]) *
plant.sizes[1].fixed_operating_cost[t] +
JuMP.value(model[:expansion][process_node, t]) *
slope_fix_oper_cost(plant, t) for t = 1:T
],
"Expansion cost (\$)" => [
(
if t == 1
slope_open(plant, t) * JuMP.value(model[:expansion][process_node, t])
else
slope_open(plant, t) * (
JuMP.value(model[:expansion][process_node, t]) -
JuMP.value(model[:expansion][process_node, t-1])
)
end
) for t = 1:T
],
"Process (tonne)" =>
[JuMP.value(model[:process][process_node, t]) for t = 1:T],
"Variable operating cost (\$)" => [
JuMP.value(model[:process][process_node, t]) *
plant.sizes[1].variable_operating_cost[t] for t = 1:T
],
"Storage (tonne)" =>
[JuMP.value(model[:store][process_node, t]) for t = 1:T],
"Storage cost (\$)" => [
JuMP.value(model[:store][process_node, t]) * plant.storage_cost[t]
for t = 1:T
],
)
output["Costs"]["Fixed operating (\$)"] += plant_dict["Fixed operating cost (\$)"]
output["Costs"]["Variable operating (\$)"] +=
plant_dict["Variable operating cost (\$)"]
output["Costs"]["Opening (\$)"] += plant_dict["Opening cost (\$)"]
output["Costs"]["Expansion (\$)"] += plant_dict["Expansion cost (\$)"]
output["Costs"]["Storage (\$)"] += plant_dict["Storage cost (\$)"]
# Inputs
for a in process_node.incoming_arcs
vals = [JuMP.value(model[:flow][a, t]) for t = 1:T]
if sum(vals) <= 1e-3
continue
end
skip_plant = false
dict = OrderedDict{Any,Any}(
"Amount (tonne)" => vals,
"Distance (km)" => a.values["distance"],
"Latitude (deg)" => a.source.location.latitude,
"Longitude (deg)" => a.source.location.longitude,
"Transportation cost (\$)" =>
a.source.product.transportation_cost .* vals .* a.values["distance"],
"Transportation energy (J)" =>
vals .* a.values["distance"] .* a.source.product.transportation_energy,
"Emissions (tonne)" => OrderedDict(),
)
emissions_dict = output["Emissions"]["Transportation (tonne)"]
for (em_name, em_values) in a.source.product.transportation_emissions
dict["Emissions (tonne)"][em_name] =
em_values .* dict["Amount (tonne)"] .* a.values["distance"]
if em_name keys(emissions_dict)
emissions_dict[em_name] = zeros(T)
end
emissions_dict[em_name] += dict["Emissions (tonne)"][em_name]
end
if a.source.location isa CollectionCenter
plant_name = "Origin"
location_name = a.source.location.name
else
plant_name = a.source.location.plant_name
location_name = a.source.location.location_name
end
if plant_name keys(plant_dict["Input"])
plant_dict["Input"][plant_name] = OrderedDict()
end
plant_dict["Input"][plant_name][location_name] = dict
plant_dict["Total input (tonne)"] += vals
output["Costs"]["Transportation (\$)"] += dict["Transportation cost (\$)"]
output["Energy"]["Transportation (GJ)"] +=
dict["Transportation energy (J)"] / 1e9
end
plant_dict["Energy (GJ)"] = plant_dict["Total input (tonne)"] .* plant.energy
output["Energy"]["Plants (GJ)"] += plant_dict["Energy (GJ)"]
plant_dict["Emissions (tonne)"] = OrderedDict()
emissions_dict = output["Emissions"]["Plants (tonne)"]
for (em_name, em_values) in plant.emissions
plant_dict["Emissions (tonne)"][em_name] =
em_values .* plant_dict["Total input (tonne)"]
if em_name keys(emissions_dict)
emissions_dict[em_name] = zeros(T)
end
emissions_dict[em_name] += plant_dict["Emissions (tonne)"][em_name]
end
# Outputs
for shipping_node in plant_to_shipping_nodes[plant]
product_name = shipping_node.product.name
plant_dict["Total output"][product_name] = zeros(T)
plant_dict["Output"]["Send"][product_name] = product_dict = OrderedDict()
disposal_amount =
[JuMP.value(model[:plant_dispose][shipping_node, t]) for t = 1:T]
if sum(disposal_amount) > 1e-5
skip_plant = false
plant_dict["Output"]["Dispose"][product_name] =
disposal_dict = OrderedDict()
disposal_dict["Amount (tonne)"] =
[JuMP.value(model[:plant_dispose][shipping_node, t]) for t = 1:T]
disposal_dict["Cost (\$)"] = [
disposal_dict["Amount (tonne)"][t] *
plant.disposal_cost[shipping_node.product][t] for t = 1:T
]
plant_dict["Total output"][product_name] += disposal_amount
output["Costs"]["Disposal (\$)"] += disposal_dict["Cost (\$)"]
end
for a in shipping_node.outgoing_arcs
vals = [JuMP.value(model[:flow][a, t]) for t = 1:T]
if sum(vals) <= 1e-3
continue
end
skip_plant = false
dict = OrderedDict(
"Amount (tonne)" => vals,
"Distance (km)" => a.values["distance"],
"Latitude (deg)" => a.dest.location.latitude,
"Longitude (deg)" => a.dest.location.longitude,
)
if a.dest.location.plant_name keys(product_dict)
product_dict[a.dest.location.plant_name] = OrderedDict()
end
product_dict[a.dest.location.plant_name][a.dest.location.location_name] =
dict
plant_dict["Total output"][product_name] += vals
end
end
if !skip_plant
if plant.plant_name keys(output["Plants"])
output["Plants"][plant.plant_name] = OrderedDict()
end
output["Plants"][plant.plant_name][plant.location_name] = plant_dict
end
end
output["Costs"]["Total (\$)"] = sum(values(output["Costs"]))
return output
end

97
src/model/resolve.jl Normal file
View File

@@ -0,0 +1,97 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP
function resolve(model_old, filename::AbstractString; kwargs...)::OrderedDict
@info "Reading $filename..."
instance = RELOG.parsefile(filename)
return resolve(model_old, instance; kwargs...)
end
function resolve(model_old, instance::Instance; optimizer = nothing)::OrderedDict
milp_optimizer = lp_optimizer = optimizer
if optimizer === nothing
milp_optimizer = _get_default_milp_optimizer()
lp_optimizer = _get_default_lp_optimizer()
end
@info "Building new graph..."
graph = build_graph(instance)
_print_graph_stats(instance, graph)
@info "Building new optimization model..."
model_new = RELOG.build_model(instance, graph, milp_optimizer)
@info "Fixing decision variables..."
_fix_plants!(model_old, model_new)
JuMP.set_optimizer(model_new, lp_optimizer)
@info "Optimizing MILP..."
JuMP.optimize!(model_new)
if !has_values(model_new)
@warn("No solution available")
return OrderedDict()
end
@info "Extracting solution..."
solution = get_solution(model_new, marginal_costs = true)
return solution
end
function _fix_plants!(model_old, model_new)::Nothing
T = model_new[:instance].time
# Fix open_plant variables
for ((node_old, t), var_old) in model_old[:open_plant]
value_old = JuMP.value(var_old)
node_new = model_new[:graph].name_to_process_node_map[(
node_old.location.plant_name,
node_old.location.location_name,
)]
var_new = model_new[:open_plant][node_new, t]
JuMP.unset_binary(var_new)
JuMP.fix(var_new, value_old)
end
# Fix is_open variables
for ((node_old, t), var_old) in model_old[:is_open]
value_old = JuMP.value(var_old)
node_new = model_new[:graph].name_to_process_node_map[(
node_old.location.plant_name,
node_old.location.location_name,
)]
var_new = model_new[:is_open][node_new, t]
JuMP.unset_binary(var_new)
JuMP.fix(var_new, value_old)
end
# Fix plant capacities
for ((node_old, t), var_old) in model_old[:capacity]
value_old = JuMP.value(var_old)
node_new = model_new[:graph].name_to_process_node_map[(
node_old.location.plant_name,
node_old.location.location_name,
)]
var_new = model_new[:capacity][node_new, t]
JuMP.delete_lower_bound(var_new)
JuMP.delete_upper_bound(var_new)
JuMP.fix(var_new, value_old)
end
# Fix plant expansion
for ((node_old, t), var_old) in model_old[:expansion]
value_old = JuMP.value(var_old)
node_new = model_new[:graph].name_to_process_node_map[(
node_old.location.plant_name,
node_old.location.location_name,
)]
var_new = model_new[:expansion][node_new, t]
JuMP.delete_lower_bound(var_new)
JuMP.delete_upper_bound(var_new)
JuMP.fix(var_new, value_old)
end
end

126
src/model/solve.jl Normal file
View File

@@ -0,0 +1,126 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP, LinearAlgebra, Geodesy, ProgressBars, Printf, DataStructures, HiGHS
function _get_default_milp_optimizer()
return optimizer_with_attributes(HiGHS.Optimizer)
end
function _get_default_lp_optimizer()
return optimizer_with_attributes(HiGHS.Optimizer)
end
function _print_graph_stats(instance::Instance, graph::Graph)::Nothing
@info @sprintf("%12d time periods", instance.time)
@info @sprintf("%12d process nodes", length(graph.process_nodes))
@info @sprintf("%12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
@info @sprintf(
"%12d shipping nodes (collection)",
length(graph.collection_shipping_nodes)
)
@info @sprintf("%12d arcs", length(graph.arcs))
return
end
function solve(
instance::Instance;
optimizer = nothing,
lp_optimizer = nothing,
output = nothing,
marginal_costs = true,
return_model = false,
)
if lp_optimizer == nothing
if optimizer == nothing
# If neither is provided, use default LP optimizer.
lp_optimizer = _get_default_lp_optimizer()
else
# If only MIP optimizer is provided, use it as
# LP solver too.
lp_optimizer = optimizer
end
end
if optimizer == nothing
optimizer = _get_default_milp_optimizer()
end
@info "Building graph..."
graph = RELOG.build_graph(instance)
_print_graph_stats(instance, graph)
@info "Building optimization model..."
model = RELOG.build_model(instance, graph, optimizer)
@info "Optimizing MILP..."
JuMP.optimize!(model)
if !has_values(model)
error("No solution available")
end
if marginal_costs
@info "Re-optimizing with integer variables fixed..."
all_vars = JuMP.all_variables(model)
vals = OrderedDict(var => JuMP.value(var) for var in all_vars)
JuMP.set_optimizer(model, lp_optimizer)
for var in all_vars
if JuMP.is_binary(var)
JuMP.unset_binary(var)
JuMP.fix(var, vals[var])
end
end
JuMP.optimize!(model)
end
@info "Extracting solution..."
solution = get_solution(model, marginal_costs = marginal_costs)
if output != nothing
write(solution, output)
end
if return_model
return solution, model
else
return solution
end
end
function solve(filename::AbstractString; heuristic = false, kwargs...)
@info "Reading $filename..."
instance = RELOG.parsefile(filename)
if heuristic && instance.time > 1
@info "Solving single-period version..."
compressed = _compress(instance)
csol, _ = solve(
compressed;
return_model = true,
output = nothing,
marginal_costs = false,
kwargs...,
)
@info "Filtering candidate locations..."
selected_pairs = []
for (plant_name, plant_dict) in csol["Plants"]
for (location_name, location_dict) in plant_dict
push!(selected_pairs, (plant_name, location_name))
end
end
filtered_plants = []
for p in instance.plants
if (p.plant_name, p.location_name) in selected_pairs
push!(filtered_plants, p)
end
end
instance.plants = filtered_plants
@info "Solving original version..."
end
sol = solve(instance; kwargs...)
return sol
end

View File

@@ -1,278 +0,0 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function plants_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."latitude (deg)" = Float64[]
df."longitude (deg)" = Float64[]
df."capacity (tonne)" = Float64[]
df."amount processed (tonne)" = Float64[]
df."amount received (tonne)" = Float64[]
df."amount in storage (tonne)" = Float64[]
df."utilization factor (%)" = Float64[]
df."energy (GJ)" = Float64[]
df."opening cost (\$)" = Float64[]
df."expansion cost (\$)" = Float64[]
df."fixed operating cost (\$)" = Float64[]
df."variable operating cost (\$)" = Float64[]
df."storage cost (\$)" = Float64[]
df."total cost (\$)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for year in 1:T
capacity = round(location_dict["Capacity (tonne)"][year], digits=2)
received = round(location_dict["Total input (tonne)"][year], digits=2)
processed = round(location_dict["Process (tonne)"][year], digits=2)
in_storage = round(location_dict["Storage (tonne)"][year], digits=2)
utilization_factor = round(processed / capacity * 100.0, digits=2)
energy = round(location_dict["Energy (GJ)"][year], digits=2)
latitude = round(location_dict["Latitude (deg)"], digits=6)
longitude = round(location_dict["Longitude (deg)"], digits=6)
opening_cost = round(location_dict["Opening cost (\$)"][year], digits=2)
expansion_cost = round(location_dict["Expansion cost (\$)"][year], digits=2)
fixed_cost = round(location_dict["Fixed operating cost (\$)"][year], digits=2)
var_cost = round(location_dict["Variable operating cost (\$)"][year], digits=2)
storage_cost = round(location_dict["Storage cost (\$)"][year], digits=2)
total_cost = round(opening_cost + expansion_cost + fixed_cost +
var_cost + storage_cost, digits=2)
push!(df, [
plant_name,
location_name,
year,
latitude,
longitude,
capacity,
processed,
received,
in_storage,
utilization_factor,
energy,
opening_cost,
expansion_cost,
fixed_cost,
var_cost,
storage_cost,
total_cost,
])
end
end
end
return df
end
function plant_outputs_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."product name" = String[]
df."amount produced (tonne)" = Float64[]
df."amount sent (tonne)" = Float64[]
df."amount disposed (tonne)" = Float64[]
df."disposal cost (\$)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for (product_name, amount_produced) in location_dict["Total output"]
send_dict = location_dict["Output"]["Send"]
disposal_dict = location_dict["Output"]["Dispose"]
sent = zeros(T)
if product_name in keys(send_dict)
for (dst_plant_name, dst_plant_dict) in send_dict[product_name]
for (dst_location_name, dst_location_dict) in dst_plant_dict
sent += dst_location_dict["Amount (tonne)"]
end
end
end
sent = round.(sent, digits=2)
disposal_amount = zeros(T)
disposal_cost = zeros(T)
if product_name in keys(disposal_dict)
disposal_amount += disposal_dict[product_name]["Amount (tonne)"]
disposal_cost += disposal_dict[product_name]["Cost (\$)"]
end
disposal_amount = round.(disposal_amount, digits=2)
disposal_cost = round.(disposal_cost, digits=2)
for year in 1:T
push!(df, [
plant_name,
location_name,
year,
product_name,
round(amount_produced[year], digits=2),
sent[year],
disposal_amount[year],
disposal_cost[year],
])
end
end
end
end
return df
end
function plant_emissions_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."emission type" = String[]
df."emission amount (tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for (emission_name, emission_amount) in location_dict["Emissions (tonne)"]
for year in 1:T
push!(df, [
plant_name,
location_name,
year,
emission_name,
round(emission_amount[year], digits=2),
])
end
end
end
end
return df
end
function transportation_report(solution)::DataFrame
df = DataFrame()
df."source type" = String[]
df."source location name" = String[]
df."source latitude (deg)" = Float64[]
df."source longitude (deg)" = Float64[]
df."destination type" = String[]
df."destination location name" = String[]
df."destination latitude (deg)" = Float64[]
df."destination longitude (deg)" = Float64[]
df."product" = String[]
df."year" = Int[]
df."distance (km)" = Float64[]
df."amount (tonne)" = Float64[]
df."amount-distance (tonne-km)" = Float64[]
df."transportation cost (\$)" = Float64[]
df."transportation energy (GJ)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
for (dst_location_name, dst_location_dict) in dst_plant_dict
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
for (src_location_name, src_location_dict) in src_plant_dict
for year in 1:T
push!(df, [
src_plant_name,
src_location_name,
round(src_location_dict["Latitude (deg)"], digits=6),
round(src_location_dict["Longitude (deg)"], digits=6),
dst_plant_name,
dst_location_name,
round(dst_location_dict["Latitude (deg)"], digits=6),
round(dst_location_dict["Longitude (deg)"], digits=6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits=2),
round(src_location_dict["Amount (tonne)"][year], digits=2),
round(src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits=2),
round(src_location_dict["Transportation cost (\$)"][year], digits=2),
round(src_location_dict["Transportation energy (J)"][year] / 1e9, digits=2),
])
end
end
end
end
end
return df
end
function transportation_emissions_report(solution)::DataFrame
df = DataFrame()
df."source type" = String[]
df."source location name" = String[]
df."source latitude (deg)" = Float64[]
df."source longitude (deg)" = Float64[]
df."destination type" = String[]
df."destination location name" = String[]
df."destination latitude (deg)" = Float64[]
df."destination longitude (deg)" = Float64[]
df."product" = String[]
df."year" = Int[]
df."distance (km)" = Float64[]
df."shipped amount (tonne)" = Float64[]
df."shipped amount-distance (tonne-km)" = Float64[]
df."emission type" = String[]
df."emission amount (tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
for (dst_location_name, dst_location_dict) in dst_plant_dict
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
for (src_location_name, src_location_dict) in src_plant_dict
for (emission_name, emission_amount) in src_location_dict["Emissions (tonne)"]
for year in 1:T
push!(df, [
src_plant_name,
src_location_name,
round(src_location_dict["Latitude (deg)"], digits=6),
round(src_location_dict["Longitude (deg)"], digits=6),
dst_plant_name,
dst_location_name,
round(dst_location_dict["Latitude (deg)"], digits=6),
round(dst_location_dict["Longitude (deg)"], digits=6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits=2),
round(src_location_dict["Amount (tonne)"][year], digits=2),
round(src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits=2),
emission_name,
round(emission_amount[year], digits=2),
])
end
end
end
end
end
end
return df
end
function write(solution::AbstractDict, filename::AbstractString)
@info "Writing solution: $filename"
open(filename, "w") do file
JSON.print(file, solution, 2)
end
end
write_plants_report(solution, filename) =
CSV.write(filename, plants_report(solution))
write_plant_outputs_report(solution, filename) =
CSV.write(filename, plant_outputs_report(solution))
write_plant_emissions_report(solution, filename) =
CSV.write(filename, plant_emissions_report(solution))
write_transportation_report(solution, filename) =
CSV.write(filename, transportation_report(solution))
write_transportation_emissions_report(solution, filename) =
CSV.write(filename, transportation_emissions_report(solution))

View File

@@ -0,0 +1,38 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function plant_emissions_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."emission type" = String[]
df."emission amount (tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for (emission_name, emission_amount) in location_dict["Emissions (tonne)"]
for year = 1:T
push!(
df,
[
plant_name,
location_name,
year,
emission_name,
round(emission_amount[year], digits = 2),
],
)
end
end
end
end
return df
end
write_plant_emissions_report(solution, filename) =
CSV.write(filename, plant_emissions_report(solution))

View File

@@ -0,0 +1,66 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function plant_outputs_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."product name" = String[]
df."amount produced (tonne)" = Float64[]
df."amount sent (tonne)" = Float64[]
df."amount disposed (tonne)" = Float64[]
df."disposal cost (\$)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for (product_name, amount_produced) in location_dict["Total output"]
send_dict = location_dict["Output"]["Send"]
disposal_dict = location_dict["Output"]["Dispose"]
sent = zeros(T)
if product_name in keys(send_dict)
for (dst_plant_name, dst_plant_dict) in send_dict[product_name]
for (dst_location_name, dst_location_dict) in dst_plant_dict
sent += dst_location_dict["Amount (tonne)"]
end
end
end
sent = round.(sent, digits = 2)
disposal_amount = zeros(T)
disposal_cost = zeros(T)
if product_name in keys(disposal_dict)
disposal_amount += disposal_dict[product_name]["Amount (tonne)"]
disposal_cost += disposal_dict[product_name]["Cost (\$)"]
end
disposal_amount = round.(disposal_amount, digits = 2)
disposal_cost = round.(disposal_cost, digits = 2)
for year = 1:T
push!(
df,
[
plant_name,
location_name,
year,
product_name,
round(amount_produced[year], digits = 2),
sent[year],
disposal_amount[year],
disposal_cost[year],
],
)
end
end
end
end
return df
end
write_plant_outputs_report(solution, filename) =
CSV.write(filename, plant_outputs_report(solution))

79
src/reports/plants.jl Normal file
View File

@@ -0,0 +1,79 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function plants_report(solution)::DataFrame
df = DataFrame()
df."plant type" = String[]
df."location name" = String[]
df."year" = Int[]
df."latitude (deg)" = Float64[]
df."longitude (deg)" = Float64[]
df."capacity (tonne)" = Float64[]
df."amount processed (tonne)" = Float64[]
df."amount received (tonne)" = Float64[]
df."amount in storage (tonne)" = Float64[]
df."utilization factor (%)" = Float64[]
df."energy (GJ)" = Float64[]
df."opening cost (\$)" = Float64[]
df."expansion cost (\$)" = Float64[]
df."fixed operating cost (\$)" = Float64[]
df."variable operating cost (\$)" = Float64[]
df."storage cost (\$)" = Float64[]
df."total cost (\$)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for year = 1:T
capacity = round(location_dict["Capacity (tonne)"][year], digits = 2)
received = round(location_dict["Total input (tonne)"][year], digits = 2)
processed = round(location_dict["Process (tonne)"][year], digits = 2)
in_storage = round(location_dict["Storage (tonne)"][year], digits = 2)
utilization_factor = round(processed / capacity * 100.0, digits = 2)
energy = round(location_dict["Energy (GJ)"][year], digits = 2)
latitude = round(location_dict["Latitude (deg)"], digits = 6)
longitude = round(location_dict["Longitude (deg)"], digits = 6)
opening_cost = round(location_dict["Opening cost (\$)"][year], digits = 2)
expansion_cost =
round(location_dict["Expansion cost (\$)"][year], digits = 2)
fixed_cost =
round(location_dict["Fixed operating cost (\$)"][year], digits = 2)
var_cost =
round(location_dict["Variable operating cost (\$)"][year], digits = 2)
storage_cost = round(location_dict["Storage cost (\$)"][year], digits = 2)
total_cost = round(
opening_cost + expansion_cost + fixed_cost + var_cost + storage_cost,
digits = 2,
)
push!(
df,
[
plant_name,
location_name,
year,
latitude,
longitude,
capacity,
processed,
received,
in_storage,
utilization_factor,
energy,
opening_cost,
expansion_cost,
fixed_cost,
var_cost,
storage_cost,
total_cost,
],
)
end
end
end
return df
end
write_plants_report(solution, filename) = CSV.write(filename, plants_report(solution))

46
src/reports/products.jl Normal file
View File

@@ -0,0 +1,46 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function products_report(solution; marginal_costs = true)::DataFrame
df = DataFrame()
df."product name" = String[]
df."location name" = String[]
df."latitude (deg)" = Float64[]
df."longitude (deg)" = Float64[]
df."year" = Int[]
df."amount (tonne)" = Float64[]
df."amount disposed (tonne)" = Float64[]
df."marginal cost (\$/tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (prod_name, prod_dict) in solution["Products"]
for (location_name, location_dict) in prod_dict
for year = 1:T
marginal_cost = location_dict["Marginal cost (\$/tonne)"][year]
latitude = round(location_dict["Latitude (deg)"], digits = 6)
longitude = round(location_dict["Longitude (deg)"], digits = 6)
amount = location_dict["Amount (tonne)"][year]
amount_disposed = location_dict["Dispose (tonne)"][year]
push!(
df,
[
prod_name,
location_name,
latitude,
longitude,
year,
amount,
marginal_cost,
amount_disposed,
],
)
end
end
end
return df
end
write_products_report(solution, filename) = CSV.write(filename, products_report(solution))

75
src/reports/tr.jl Normal file
View File

@@ -0,0 +1,75 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function transportation_report(solution)::DataFrame
df = DataFrame()
df."source type" = String[]
df."source location name" = String[]
df."source latitude (deg)" = Float64[]
df."source longitude (deg)" = Float64[]
df."destination type" = String[]
df."destination location name" = String[]
df."destination latitude (deg)" = Float64[]
df."destination longitude (deg)" = Float64[]
df."product" = String[]
df."year" = Int[]
df."distance (km)" = Float64[]
df."amount (tonne)" = Float64[]
df."amount-distance (tonne-km)" = Float64[]
df."transportation cost (\$)" = Float64[]
df."transportation energy (GJ)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
for (dst_location_name, dst_location_dict) in dst_plant_dict
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
for (src_location_name, src_location_dict) in src_plant_dict
for year = 1:T
push!(
df,
[
src_plant_name,
src_location_name,
round(src_location_dict["Latitude (deg)"], digits = 6),
round(src_location_dict["Longitude (deg)"], digits = 6),
dst_plant_name,
dst_location_name,
round(dst_location_dict["Latitude (deg)"], digits = 6),
round(dst_location_dict["Longitude (deg)"], digits = 6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits = 2),
round(
src_location_dict["Amount (tonne)"][year],
digits = 2,
),
round(
src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits = 2,
),
round(
src_location_dict["Transportation cost (\$)"][year],
digits = 2,
),
round(
src_location_dict["Transportation energy (J)"][year] /
1e9,
digits = 2,
),
],
)
end
end
end
end
end
return df
end
write_transportation_report(solution, filename) =
CSV.write(filename, transportation_report(solution))

View File

@@ -0,0 +1,71 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
function transportation_emissions_report(solution)::DataFrame
df = DataFrame()
df."source type" = String[]
df."source location name" = String[]
df."source latitude (deg)" = Float64[]
df."source longitude (deg)" = Float64[]
df."destination type" = String[]
df."destination location name" = String[]
df."destination latitude (deg)" = Float64[]
df."destination longitude (deg)" = Float64[]
df."product" = String[]
df."year" = Int[]
df."distance (km)" = Float64[]
df."shipped amount (tonne)" = Float64[]
df."shipped amount-distance (tonne-km)" = Float64[]
df."emission type" = String[]
df."emission amount (tonne)" = Float64[]
T = length(solution["Energy"]["Plants (GJ)"])
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
for (dst_location_name, dst_location_dict) in dst_plant_dict
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
for (src_location_name, src_location_dict) in src_plant_dict
for (emission_name, emission_amount) in
src_location_dict["Emissions (tonne)"]
for year = 1:T
push!(
df,
[
src_plant_name,
src_location_name,
round(src_location_dict["Latitude (deg)"], digits = 6),
round(src_location_dict["Longitude (deg)"], digits = 6),
dst_plant_name,
dst_location_name,
round(dst_location_dict["Latitude (deg)"], digits = 6),
round(dst_location_dict["Longitude (deg)"], digits = 6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits = 2),
round(
src_location_dict["Amount (tonne)"][year],
digits = 2,
),
round(
src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits = 2,
),
emission_name,
round(emission_amount[year], digits = 2),
],
)
end
end
end
end
end
end
return df
end
write_transportation_emissions_report(solution, filename) =
CSV.write(filename, transportation_emissions_report(solution))

14
src/reports/write.jl Normal file
View File

@@ -0,0 +1,14 @@
# RELOG: Reverse Logistics Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using DataFrames
using CSV
import Base: write
function write(solution::AbstractDict, filename::AbstractString)
@info "Writing solution: $filename"
open(filename, "w") do file
JSON.print(file, solution, 2)
end
end

View File

@@ -12,7 +12,12 @@
"Parameters": { "Parameters": {
"type": "object", "type": "object",
"properties": { "properties": {
"time horizon (years)": { "type": "number" } "time horizon (years)": {
"type": "number"
},
"distance metric": {
"type": "string"
}
}, },
"required": [ "required": [
"time horizon (years)" "time horizon (years)"
@@ -23,17 +28,27 @@
"additionalProperties": { "additionalProperties": {
"type": "object", "type": "object",
"properties": { "properties": {
"input": { "type": "string" }, "input": {
"type": "string"
},
"outputs (tonne/tonne)": { "outputs (tonne/tonne)": {
"type": "object", "type": "object",
"additionalProperties": { "type": "number" } "additionalProperties": {
"type": "number"
}
},
"energy (GJ/tonne)": {
"$ref": "#/definitions/TimeSeries"
}, },
"energy (GJ/tonne)": { "$ref": "#/definitions/TimeSeries" },
"emissions (tonne/tonne)": { "emissions (tonne/tonne)": {
"type": "object", "type": "object",
"additionalProperties": { "$ref": "#/definitions/TimeSeries" } "additionalProperties": {
"$ref": "#/definitions/TimeSeries"
}
}, },
"locations": { "$ref": "#/definitions/PlantLocation" } "locations": {
"$ref": "#/definitions/PlantLocation"
}
}, },
"required": [ "required": [
"input", "input",
@@ -46,15 +61,26 @@
"additionalProperties": { "additionalProperties": {
"type": "object", "type": "object",
"properties": { "properties": {
"latitude (deg)": { "type": "number" }, "location": {
"longitude (deg)": { "type": "number" }, "type": "string"
},
"latitude (deg)": {
"type": "number"
},
"longitude (deg)": {
"type": "number"
},
"disposal": { "disposal": {
"type": "object", "type": "object",
"additionalProperties": { "additionalProperties": {
"type": "object", "type": "object",
"properties": { "properties": {
"cost ($/tonne)": { "$ref": "#/definitions/TimeSeries" }, "cost ($/tonne)": {
"limit (tonne)": { "$ref": "#/definitions/TimeSeries" } "$ref": "#/definitions/TimeSeries"
},
"limit (tonne)": {
"$ref": "#/definitions/TimeSeries"
}
}, },
"required": [ "required": [
"cost ($/tonne)" "cost ($/tonne)"
@@ -64,22 +90,32 @@
"storage": { "storage": {
"type": "object", "type": "object",
"properties": { "properties": {
"cost ($/tonne)": { "$ref": "#/definitions/TimeSeries" }, "cost ($/tonne)": {
"limit (tonne)": { "type": "number" } "$ref": "#/definitions/TimeSeries"
},
"limit (tonne)": {
"type": "number"
}
}, },
"required": [ "required": [
"cost ($/tonne)", "cost ($/tonne)",
"limit (tonne)" "limit (tonne)"
] ]
}, },
"capacities (tonne)": { "capacities (tonne)": {
"type": "object", "type": "object",
"additionalProperties": { "additionalProperties": {
"type": "object", "type": "object",
"properties": { "properties": {
"variable operating cost ($/tonne)": { "$ref": "#/definitions/TimeSeries" }, "variable operating cost ($/tonne)": {
"fixed operating cost ($)": { "$ref": "#/definitions/TimeSeries" }, "$ref": "#/definitions/TimeSeries"
"opening cost ($)": { "$ref": "#/definitions/TimeSeries" } },
"fixed operating cost ($)": {
"$ref": "#/definitions/TimeSeries"
},
"opening cost ($)": {
"$ref": "#/definitions/TimeSeries"
}
}, },
"required": [ "required": [
"variable operating cost ($/tonne)", "variable operating cost ($/tonne)",
@@ -87,11 +123,9 @@
"opening cost ($)" "opening cost ($)"
] ]
} }
} }
}, },
"required": [ "required": [
"latitude (deg)",
"longitude (deg)",
"capacities (tonne)" "capacities (tonne)"
] ]
} }
@@ -101,13 +135,20 @@
"additionalProperties": { "additionalProperties": {
"type": "object", "type": "object",
"properties": { "properties": {
"latitude (deg)": { "type": "number" }, "location": {
"longitude (deg)": { "type": "number" }, "type": "string"
"amount (tonne)": { "$ref": "#/definitions/TimeSeries" } },
"latitude (deg)": {
"type": "number"
},
"longitude (deg)": {
"type": "number"
},
"amount (tonne)": {
"$ref": "#/definitions/TimeSeries"
}
}, },
"required": [ "required": [
"latitude (deg)",
"longitude (deg)",
"amount (tonne)" "amount (tonne)"
] ]
} }
@@ -117,25 +158,45 @@
"additionalProperties": { "additionalProperties": {
"type": "object", "type": "object",
"properties": { "properties": {
"transportation cost ($/km/tonne)": { "$ref": "#/definitions/TimeSeries" }, "transportation cost ($/km/tonne)": {
"transportation energy (J/km/tonne)": { "$ref": "#/definitions/TimeSeries" }, "$ref": "#/definitions/TimeSeries"
},
"transportation energy (J/km/tonne)": {
"$ref": "#/definitions/TimeSeries"
},
"transportation emissions (tonne/km/tonne)": { "transportation emissions (tonne/km/tonne)": {
"type": "object", "type": "object",
"additionalProperties": { "$ref": "#/definitions/TimeSeries" } "additionalProperties": {
"$ref": "#/definitions/TimeSeries"
}
}, },
"initial amounts": { "$ref": "#/definitions/InitialAmount" } "initial amounts": {
"$ref": "#/definitions/InitialAmount"
},
"disposal limit (tonne)": {
"$ref": "#/definitions/TimeSeries"
},
"disposal cost ($/tonne)": {
"$ref": "#/definitions/TimeSeries"
}
}, },
"required": [ "required": [
"transportation cost ($/km/tonne)" "transportation cost ($/km/tonne)"
] ]
} }
} }
}, },
"type": "object", "type": "object",
"properties": { "properties": {
"parameters": { "$ref": "#/definitions/Parameters" }, "parameters": {
"plants": { "$ref": "#/definitions/Plant" }, "$ref": "#/definitions/Parameters"
"products": { "$ref": "#/definitions/Product" } },
"plants": {
"$ref": "#/definitions/Plant"
},
"products": {
"$ref": "#/definitions/Product"
}
}, },
"required": [ "required": [
"parameters", "parameters",

View File

@@ -1,22 +0,0 @@
using PackageCompiler
using Cbc
using Clp
using Geodesy
using JSON
using JSONSchema
using JuMP
using MathOptInterface
using ProgressBars
pkg = [:Cbc,
:Clp,
:Geodesy,
:JSON,
:JSONSchema,
:JuMP,
:MathOptInterface,
:ProgressBars]
@info "Building system image..."
create_sysimage(pkg, sysimage_path="build/sysimage.so")

116
src/web/run.jl Normal file
View File

@@ -0,0 +1,116 @@
println("Initializing...")
using Logging
using JSON
using JuMP
using HiGHS
using RELOG
function solve(root, filename)
ref_file = "$root/$filename"
optimizer = optimizer_with_attributes(
HiGHS.Optimizer,
"time_limit" => parse(Float64, ENV["RELOG_TIME_LIMIT_SEC"]),
)
ref_solution, ref_model = RELOG.solve(
ref_file,
heuristic = true,
optimizer = optimizer,
lp_optimizer = HiGHS.Optimizer,
return_model = true,
marginal_costs = true,
)
Libc.flush_cstdio()
flush(stdout)
sleep(1)
if length(ref_solution) == 0
return
end
RELOG.write_products_report(ref_solution, replace(ref_file, ".json" => "_products.csv"))
RELOG.write_plants_report(ref_solution, replace(ref_file, ".json" => "_plants.csv"))
RELOG.write_plant_outputs_report(
ref_solution,
replace(ref_file, ".json" => "_plant_outputs.csv"),
)
RELOG.write_plant_emissions_report(
ref_solution,
replace(ref_file, ".json" => "_plant_emissions.csv"),
)
RELOG.write_transportation_report(ref_solution, replace(ref_file, ".json" => "_tr.csv"))
RELOG.write_transportation_emissions_report(
ref_solution,
replace(ref_file, ".json" => "_tr_emissions.csv"),
)
isdir("$root/scenarios") || return
for filename in readdir("$root/scenarios")
scenario = "$root/scenarios/$filename"
endswith(filename, ".json") || continue
sc_solution = RELOG.resolve(
ref_model,
scenario,
optimizer = optimizer,
lp_optimizer = HiGHS.Optimizer,
)
if length(sc_solution) == 0
return
end
RELOG.write_plants_report(sc_solution, replace(scenario, ".json" => "_plants.csv"))
RELOG.write_products_report(
sc_solution,
replace(scenario, ".json" => "_products.csv"),
)
RELOG.write_plant_outputs_report(
sc_solution,
replace(scenario, ".json" => "_plant_outputs.csv"),
)
RELOG.write_plant_emissions_report(
sc_solution,
replace(scenario, ".json" => "_plant_emissions.csv"),
)
RELOG.write_transportation_report(
sc_solution,
replace(scenario, ".json" => "_tr.csv"),
)
RELOG.write_transportation_emissions_report(
sc_solution,
replace(scenario, ".json" => "_tr_emissions.csv"),
)
end
end
function solve_recursive(path)
cd(path)
# Solve instances
for (root, dirs, files) in walkdir(".")
if occursin(r"scenarios"i, root)
continue
end
for filename in files
endswith(filename, ".json") || continue
solve(root, filename)
end
end
# Collect results
results = []
for (root, dirs, files) in walkdir(".")
for filename in files
endswith(filename, "_plants.csv") || continue
push!(
results,
joinpath(replace(root, path => ""), replace(filename, "_plants.csv" => "")),
)
end
end
open("output.json", "w") do file
JSON.print(file, results)
end
run(`zip -r output.zip .`)
end
solve_recursive(ARGS[1])

65
src/web/web.jl Normal file
View File

@@ -0,0 +1,65 @@
import HTTP
import JSON
using Random
const ROUTER = HTTP.Router()
const PROJECT_DIR = joinpath(dirname(@__FILE__), "..", "..")
const STATIC_DIR = joinpath(PROJECT_DIR, "relog-web", "build", "static")
const JOBS_DIR = joinpath(PROJECT_DIR, "jobs")
function serve_file(req::HTTP.Request, filename)
if isfile(filename)
open(filename) do file
return HTTP.Response(200, read(file))
end
else
return HTTP.Response(404)
end
end
function submit(req::HTTP.Request)
# Generate random job id
job_id = lowercase(randstring(12))
# Create job folder
job_path = joinpath(JOBS_DIR, job_id)
mkpath(job_path)
# Write JSON file
case = JSON.parse(String(req.body))
open(joinpath(job_path, "case.json"), "w") do file
JSON.print(file, case)
end
# Run job
run(
`bash -c "(julia --project=$PROJECT_DIR $PROJECT_DIR/src/web/run.jl $job_path 2>&1 | tee $job_path/solve.log) >/dev/null 2>&1 &"`,
)
response = Dict("job_id" => job_id)
return HTTP.Response(200, body = JSON.json(response))
end
function get_index(req::HTTP.Request)
return serve_file(req, joinpath(STATIC_DIR, "..", "index.html"))
end
function get_static(req::HTTP.Request)
return serve_file(req, joinpath(STATIC_DIR, req.target[9:end]))
end
function get_jobs(req::HTTP.Request)
return serve_file(req, joinpath(JOBS_DIR, req.target[7:end]))
end
HTTP.@register(ROUTER, "GET", "/static", get_static)
HTTP.@register(ROUTER, "GET", "/jobs", get_jobs)
HTTP.@register(ROUTER, "POST", "/submit", submit)
HTTP.@register(ROUTER, "GET", "/", get_index)
function web(host = "127.0.0.1", port = 8080)
@info "Launching web interface: http://$(host):$(port)/"
Base.exit_on_sigint(false)
HTTP.serve(ROUTER, host, port)
Base.exit_on_sigint(true)
end

19
test/Project.toml Normal file
View File

@@ -0,0 +1,19 @@
name = "RELOGT"
uuid = "a6dae211-05d8-42ed-9081-b88c982fc90a"
authors = ["Alinson S. Xavier <git@axavier.org>"]
version = "0.1.0"
[deps]
Cbc = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
GZip = "92fee26a-97fe-5a0c-ad85-20a5f3185b63"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
RELOG = "a2afcdf7-cf04-4913-85f9-c0d81ddf2008"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[compat]
JuliaFormatter = "1"

Some files were not shown because too many files have changed in this diff Show More