mirror of
https://github.com/ANL-CEEESA/RELOG.git
synced 2025-12-05 23:38:52 -06:00
Start implementation of circular model
This commit is contained in:
30
src/RELOG.jl
30
src/RELOG.jl
@@ -1,33 +1,7 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
module RELOG
|
||||
|
||||
using Pkg
|
||||
|
||||
version() = Pkg.dependencies()[Base.UUID("a2afcdf7-cf04-4913-85f9-c0d81ddf2008")].version
|
||||
|
||||
include("instance/structs.jl")
|
||||
include("graph/structs.jl")
|
||||
|
||||
include("instance/geodb.jl")
|
||||
include("graph/dist.jl")
|
||||
include("graph/build.jl")
|
||||
include("graph/csv.jl")
|
||||
include("instance/compress.jl")
|
||||
include("instance/parse.jl")
|
||||
include("instance/validate.jl")
|
||||
include("model/build.jl")
|
||||
include("model/getsol.jl")
|
||||
include("model/resolve.jl")
|
||||
include("model/solve.jl")
|
||||
include("reports/plant_emissions.jl")
|
||||
include("reports/plant_outputs.jl")
|
||||
include("reports/plants.jl")
|
||||
include("reports/products.jl")
|
||||
include("reports/tr_emissions.jl")
|
||||
include("reports/tr.jl")
|
||||
include("reports/write.jl")
|
||||
include("web/web.jl")
|
||||
end
|
||||
|
||||
end # module RELOG
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
function build_graph(instance::Instance)::Graph
|
||||
arcs = []
|
||||
next_index = 0
|
||||
process_nodes = ProcessNode[]
|
||||
plant_shipping_nodes = ShippingNode[]
|
||||
collection_shipping_nodes = ShippingNode[]
|
||||
|
||||
name_to_process_node_map = Dict{Tuple{AbstractString,AbstractString},ProcessNode}()
|
||||
collection_center_to_node = Dict()
|
||||
|
||||
process_nodes_by_input_product =
|
||||
Dict(product => ProcessNode[] for product in instance.products)
|
||||
shipping_nodes_by_plant = Dict(plant => [] for plant in instance.plants)
|
||||
|
||||
# Build collection center shipping nodes
|
||||
for center in instance.collection_centers
|
||||
node = ShippingNode(next_index, center, center.product, [], [])
|
||||
next_index += 1
|
||||
collection_center_to_node[center] = node
|
||||
push!(collection_shipping_nodes, node)
|
||||
end
|
||||
|
||||
# Build process and shipping nodes for plants
|
||||
for plant in instance.plants
|
||||
pn = ProcessNode(next_index, plant, [], [])
|
||||
next_index += 1
|
||||
push!(process_nodes, pn)
|
||||
push!(process_nodes_by_input_product[plant.input], pn)
|
||||
|
||||
name_to_process_node_map[(plant.plant_name, plant.location_name)] = pn
|
||||
|
||||
for product in keys(plant.output)
|
||||
sn = ShippingNode(next_index, plant, product, [], [])
|
||||
next_index += 1
|
||||
push!(plant_shipping_nodes, sn)
|
||||
push!(shipping_nodes_by_plant[plant], sn)
|
||||
end
|
||||
end
|
||||
|
||||
# Build arcs from collection centers to plants, and from one plant to another
|
||||
for source in [collection_shipping_nodes; plant_shipping_nodes]
|
||||
for dest in process_nodes_by_input_product[source.product]
|
||||
source.location != dest.location || continue
|
||||
distance = _calculate_distance(
|
||||
source.location.latitude,
|
||||
source.location.longitude,
|
||||
dest.location.latitude,
|
||||
dest.location.longitude,
|
||||
instance.distance_metric,
|
||||
)
|
||||
values = Dict("distance" => distance)
|
||||
arc = Arc(source, dest, values)
|
||||
push!(source.outgoing_arcs, arc)
|
||||
push!(dest.incoming_arcs, arc)
|
||||
push!(arcs, arc)
|
||||
end
|
||||
end
|
||||
|
||||
# Build arcs from process nodes to shipping nodes within a plant
|
||||
for source in process_nodes
|
||||
plant = source.location
|
||||
for dest in shipping_nodes_by_plant[plant]
|
||||
weight = plant.output[dest.product]
|
||||
values = Dict("weight" => weight)
|
||||
arc = Arc(source, dest, values)
|
||||
push!(source.outgoing_arcs, arc)
|
||||
push!(dest.incoming_arcs, arc)
|
||||
push!(arcs, arc)
|
||||
end
|
||||
end
|
||||
|
||||
return Graph(
|
||||
process_nodes,
|
||||
plant_shipping_nodes,
|
||||
collection_shipping_nodes,
|
||||
arcs,
|
||||
name_to_process_node_map,
|
||||
collection_center_to_node,
|
||||
)
|
||||
end
|
||||
|
||||
|
||||
function print_graph_stats(instance::Instance, graph::Graph)::Nothing
|
||||
@info @sprintf(" %12d time periods", instance.time)
|
||||
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
|
||||
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
|
||||
@info @sprintf(
|
||||
" %12d shipping nodes (collection)",
|
||||
length(graph.collection_shipping_nodes)
|
||||
)
|
||||
@info @sprintf(" %12d arcs", length(graph.arcs))
|
||||
return
|
||||
end
|
||||
@@ -1,11 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
function to_csv(graph::Graph)
|
||||
result = ""
|
||||
for a in graph.arcs
|
||||
result *= "$(a.source.index),$(a.dest.index)\n"
|
||||
end
|
||||
return result
|
||||
end
|
||||
@@ -1,60 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using Geodesy
|
||||
using NearestNeighbors
|
||||
using DataFrames
|
||||
|
||||
function _calculate_distance(
|
||||
source_lat,
|
||||
source_lon,
|
||||
dest_lat,
|
||||
dest_lon,
|
||||
::EuclideanDistance,
|
||||
)::Float64
|
||||
x = LLA(source_lat, source_lon, 0.0)
|
||||
y = LLA(dest_lat, dest_lon, 0.0)
|
||||
return round(euclidean_distance(x, y) / 1000.0, digits = 3)
|
||||
end
|
||||
|
||||
function _calculate_distance(
|
||||
source_lat,
|
||||
source_lon,
|
||||
dest_lat,
|
||||
dest_lon,
|
||||
metric::KnnDrivingDistance,
|
||||
)::Float64
|
||||
if metric.tree === nothing
|
||||
basedir = joinpath(dirname(@__FILE__), "..", "..", "data")
|
||||
csv_filename = joinpath(basedir, "dist_driving.csv")
|
||||
|
||||
# Download pre-computed driving data
|
||||
if !isfile(csv_filename)
|
||||
_download_zip(
|
||||
"https://axavier.org/RELOG/0.6/data/dist_driving_0b9a6ad6.zip",
|
||||
basedir,
|
||||
csv_filename,
|
||||
0x0b9a6ad6,
|
||||
)
|
||||
end
|
||||
|
||||
# Fit kNN model
|
||||
df = DataFrame(CSV.File(csv_filename, missingstring = "NaN"))
|
||||
dropmissing!(df)
|
||||
coords = Matrix(df[!, [:source_lat, :source_lon, :dest_lat, :dest_lon]])'
|
||||
metric.ratios = Matrix(df[!, [:ratio]])
|
||||
metric.tree = KDTree(coords)
|
||||
end
|
||||
|
||||
# Compute Euclidean distance
|
||||
dist_euclidean =
|
||||
_calculate_distance(source_lat, source_lon, dest_lat, dest_lon, EuclideanDistance())
|
||||
|
||||
# Predict ratio
|
||||
idxs, _ = knn(metric.tree, [source_lat, source_lon, dest_lat, dest_lon], 5)
|
||||
ratio_pred = mean(metric.ratios[idxs])
|
||||
dist_pred = round(dist_euclidean * ratio_pred, digits = 3)
|
||||
isfinite(dist_pred) || error("non-finite distance detected: $dist_pred")
|
||||
return dist_pred
|
||||
end
|
||||
@@ -1,45 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using Geodesy
|
||||
|
||||
abstract type Node end
|
||||
|
||||
mutable struct Arc
|
||||
source::Node
|
||||
dest::Node
|
||||
values::Dict{String,Float64}
|
||||
end
|
||||
|
||||
mutable struct ProcessNode <: Node
|
||||
index::Int
|
||||
location::Plant
|
||||
incoming_arcs::Vector{Arc}
|
||||
outgoing_arcs::Vector{Arc}
|
||||
end
|
||||
|
||||
mutable struct ShippingNode <: Node
|
||||
index::Int
|
||||
location::Union{Plant,CollectionCenter}
|
||||
product::Product
|
||||
incoming_arcs::Vector{Arc}
|
||||
outgoing_arcs::Vector{Arc}
|
||||
end
|
||||
|
||||
mutable struct Graph
|
||||
process_nodes::Vector{ProcessNode}
|
||||
plant_shipping_nodes::Vector{ShippingNode}
|
||||
collection_shipping_nodes::Vector{ShippingNode}
|
||||
arcs::Vector{Arc}
|
||||
name_to_process_node_map::Dict{Tuple{AbstractString,AbstractString},ProcessNode}
|
||||
collection_center_to_node::Dict{CollectionCenter,ShippingNode}
|
||||
end
|
||||
|
||||
function Base.show(io::IO, instance::Graph)
|
||||
print(io, "RELOG graph with ")
|
||||
print(io, "$(length(instance.process_nodes)) process nodes, ")
|
||||
print(io, "$(length(instance.plant_shipping_nodes)) plant shipping nodes, ")
|
||||
print(io, "$(length(instance.collection_shipping_nodes)) collection shipping nodes, ")
|
||||
print(io, "$(length(instance.arcs)) arcs")
|
||||
end
|
||||
@@ -1,63 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
"""
|
||||
_compress(instance::Instance)
|
||||
|
||||
Create a single-period instance from a multi-period one. Specifically,
|
||||
replaces every time-dependent attribute, such as initial_amounts,
|
||||
by a list with a single element, which is either a sum, an average,
|
||||
or something else that makes sense to that specific attribute.
|
||||
"""
|
||||
function _compress(instance::Instance)::Instance
|
||||
T = instance.time
|
||||
compressed = deepcopy(instance)
|
||||
compressed.time = 1
|
||||
compressed.building_period = [1]
|
||||
|
||||
# Compress products
|
||||
for p in compressed.products
|
||||
p.acquisition_cost = [mean(p.acquisition_cost)]
|
||||
p.disposal_cost = [mean(p.disposal_cost)]
|
||||
p.disposal_limit = [sum(p.disposal_limit)]
|
||||
p.transportation_cost = [mean(p.transportation_cost)]
|
||||
p.transportation_energy = [mean(p.transportation_energy)]
|
||||
for (emission_name, emission_value) in p.transportation_emissions
|
||||
p.transportation_emissions[emission_name] = [mean(emission_value)]
|
||||
end
|
||||
end
|
||||
|
||||
# Compress collection centers
|
||||
for c in compressed.collection_centers
|
||||
c.amount = [maximum(c.amount) * T]
|
||||
end
|
||||
|
||||
# Compress plants
|
||||
for plant in compressed.plants
|
||||
plant.energy = [mean(plant.energy)]
|
||||
for (emission_name, emission_value) in plant.emissions
|
||||
plant.emissions[emission_name] = [mean(emission_value)]
|
||||
end
|
||||
for s in plant.sizes
|
||||
s.capacity *= T
|
||||
s.variable_operating_cost = [mean(s.variable_operating_cost)]
|
||||
s.opening_cost = [s.opening_cost[1]]
|
||||
s.fixed_operating_cost = [sum(s.fixed_operating_cost)]
|
||||
end
|
||||
for (prod_name, disp_limit) in plant.disposal_limit
|
||||
plant.disposal_limit[prod_name] = [sum(disp_limit)]
|
||||
end
|
||||
for (prod_name, disp_cost) in plant.disposal_cost
|
||||
plant.disposal_cost[prod_name] = [mean(disp_cost)]
|
||||
end
|
||||
end
|
||||
|
||||
return compressed
|
||||
end
|
||||
@@ -1,212 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using CRC
|
||||
using CSV
|
||||
using DataFrames
|
||||
using Shapefile
|
||||
using Statistics
|
||||
using ZipFile
|
||||
using ProgressBars
|
||||
using OrderedCollections
|
||||
|
||||
import Downloads: download
|
||||
import Base: parse
|
||||
|
||||
crc32 = crc(CRC_32)
|
||||
|
||||
struct GeoPoint
|
||||
lat::Float64
|
||||
lon::Float64
|
||||
end
|
||||
|
||||
struct GeoRegion
|
||||
centroid::GeoPoint
|
||||
population::Int
|
||||
GeoRegion(; centroid, population) = new(centroid, population)
|
||||
end
|
||||
|
||||
DB_CACHE = Dict{String,Dict{String,GeoRegion}}()
|
||||
|
||||
function centroid(geom::Shapefile.Polygon)::GeoPoint
|
||||
x_max, x_min, y_max, y_min = -Inf, Inf, -Inf, Inf
|
||||
for p in geom.points
|
||||
x_max = max(x_max, p.x)
|
||||
x_min = min(x_min, p.x)
|
||||
y_max = max(y_max, p.y)
|
||||
y_min = min(y_min, p.y)
|
||||
end
|
||||
x_center = (x_max + x_min) / 2.0
|
||||
y_center = (y_max + y_min) / 2.0
|
||||
return GeoPoint(round(y_center, digits = 5), round(x_center, digits = 5))
|
||||
end
|
||||
|
||||
function _download_file(url, output, expected_crc32)::Nothing
|
||||
if isfile(output)
|
||||
return
|
||||
end
|
||||
mkpath(dirname(output))
|
||||
@info "Downloading: $url"
|
||||
fname = download(url)
|
||||
actual_crc32 = open(crc32, fname)
|
||||
expected_crc32 == actual_crc32 || error("CRC32 mismatch")
|
||||
cp(fname, output)
|
||||
return
|
||||
end
|
||||
|
||||
function _download_zip(url, outputdir, expected_output_file, expected_crc32)::Nothing
|
||||
if isfile(expected_output_file)
|
||||
return
|
||||
end
|
||||
mkpath(outputdir)
|
||||
@info "Downloading: $url"
|
||||
zip_filename = download(url)
|
||||
actual_crc32 = open(crc32, zip_filename)
|
||||
expected_crc32 == actual_crc32 || error("CRC32 mismatch")
|
||||
open(zip_filename) do zip_file
|
||||
zr = ZipFile.Reader(zip_file)
|
||||
for file in zr.files
|
||||
open(joinpath(outputdir, file.name), "w") do output_file
|
||||
write(output_file, read(file))
|
||||
end
|
||||
end
|
||||
end
|
||||
return
|
||||
end
|
||||
|
||||
function _geodb_load_gov_census(;
|
||||
db_name,
|
||||
extract_cols,
|
||||
shp_crc32,
|
||||
shp_filename,
|
||||
shp_url,
|
||||
population_url,
|
||||
population_crc32,
|
||||
population_col,
|
||||
population_preprocess,
|
||||
population_join,
|
||||
)::Dict{String,GeoRegion}
|
||||
basedir = joinpath(dirname(@__FILE__), "..", "..", "data", db_name)
|
||||
csv_filename = "$basedir/locations.csv"
|
||||
if !isfile(csv_filename)
|
||||
# Download required files
|
||||
_download_zip(shp_url, basedir, joinpath(basedir, shp_filename), shp_crc32)
|
||||
_download_file(population_url, "$basedir/population.csv", population_crc32)
|
||||
|
||||
# Read shapefile
|
||||
@info "Processing: $shp_filename"
|
||||
table = Shapefile.Table(joinpath(basedir, shp_filename))
|
||||
geoms = Shapefile.shapes(table)
|
||||
|
||||
# Build empty dataframe
|
||||
df = DataFrame()
|
||||
cols = extract_cols(table, 1)
|
||||
for k in keys(cols)
|
||||
df[!, k] = []
|
||||
end
|
||||
df[!, "latitude"] = Float64[]
|
||||
df[!, "longitude"] = Float64[]
|
||||
|
||||
# Add regions to dataframe
|
||||
for (i, geom) in tqdm(enumerate(geoms))
|
||||
c = centroid(geom)
|
||||
cols = extract_cols(table, i)
|
||||
push!(df, [values(cols)..., c.lat, c.lon])
|
||||
end
|
||||
sort!(df)
|
||||
|
||||
# Join with population data
|
||||
population = DataFrame(CSV.File("$basedir/population.csv"))
|
||||
population_preprocess(population)
|
||||
population = population[:, [population_join, population_col]]
|
||||
rename!(population, population_col => "population")
|
||||
df = leftjoin(df, population, on = population_join)
|
||||
|
||||
# Write output
|
||||
CSV.write(csv_filename, df)
|
||||
end
|
||||
if db_name ∉ keys(DB_CACHE)
|
||||
csv = CSV.File(csv_filename)
|
||||
DB_CACHE[db_name] = Dict(
|
||||
string(row.id) => GeoRegion(
|
||||
centroid = GeoPoint(row.latitude, row.longitude),
|
||||
population = (row.population === missing ? 0 : row.population),
|
||||
) for row in csv
|
||||
)
|
||||
end
|
||||
return DB_CACHE[db_name]
|
||||
end
|
||||
|
||||
# 2018 US counties
|
||||
# -----------------------------------------------------------------------------
|
||||
function _extract_cols_2018_us_county(
|
||||
table::Shapefile.Table,
|
||||
i::Int,
|
||||
)::OrderedDict{String,Any}
|
||||
return OrderedDict(
|
||||
"id" => table.STATEFP[i] * table.COUNTYFP[i],
|
||||
"statefp" => table.STATEFP[i],
|
||||
"countyfp" => table.COUNTYFP[i],
|
||||
"name" => table.NAME[i],
|
||||
)
|
||||
end
|
||||
|
||||
function _population_preprocess_2018_us_county(df)
|
||||
df[!, "id"] = [@sprintf("%02d%03d", row.STATE, row.COUNTY) for row in eachrow(df)]
|
||||
end
|
||||
|
||||
function _geodb_load_2018_us_county()::Dict{String,GeoRegion}
|
||||
return _geodb_load_gov_census(
|
||||
db_name = "2018-us-county",
|
||||
extract_cols = _extract_cols_2018_us_county,
|
||||
shp_crc32 = 0x83eaec6d,
|
||||
shp_filename = "cb_2018_us_county_500k.shp",
|
||||
shp_url = "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_county_500k.zip",
|
||||
population_url = "https://www2.census.gov/programs-surveys/popest/datasets/2010-2019/counties/totals/co-est2019-alldata.csv",
|
||||
population_crc32 = 0xf85b0405,
|
||||
population_col = "POPESTIMATE2019",
|
||||
population_join = "id",
|
||||
population_preprocess = _population_preprocess_2018_us_county,
|
||||
)
|
||||
end
|
||||
|
||||
# US States
|
||||
# -----------------------------------------------------------------------------
|
||||
function _extract_cols_us_state(table::Shapefile.Table, i::Int)::OrderedDict{String,Any}
|
||||
return OrderedDict(
|
||||
"id" => table.STUSPS[i],
|
||||
"statefp" => parse(Int, table.STATEFP[i]),
|
||||
"name" => table.NAME[i],
|
||||
)
|
||||
end
|
||||
|
||||
function _population_preprocess_us_state(df)
|
||||
rename!(df, "STATE" => "statefp")
|
||||
end
|
||||
|
||||
function _geodb_load_us_state()::Dict{String,GeoRegion}
|
||||
return _geodb_load_gov_census(
|
||||
db_name = "us-state",
|
||||
extract_cols = _extract_cols_us_state,
|
||||
shp_crc32 = 0x9469e5ca,
|
||||
shp_filename = "cb_2018_us_state_500k.shp",
|
||||
shp_url = "https://www2.census.gov/geo/tiger/GENZ2018/shp/cb_2018_us_state_500k.zip",
|
||||
population_url = "http://www2.census.gov/programs-surveys/popest/datasets/2010-2019/national/totals/nst-est2019-alldata.csv",
|
||||
population_crc32 = 0x191cc64c,
|
||||
population_col = "POPESTIMATE2019",
|
||||
population_join = "statefp",
|
||||
population_preprocess = _population_preprocess_us_state,
|
||||
)
|
||||
end
|
||||
|
||||
function geodb_load(db_name::AbstractString)::Dict{String,GeoRegion}
|
||||
db_name == "2018-us-county" && return _geodb_load_2018_us_county()
|
||||
db_name == "us-state" && return _geodb_load_us_state()
|
||||
error("Unknown database: $db_name")
|
||||
end
|
||||
|
||||
function geodb_query(name)::GeoRegion
|
||||
db_name, id = split(name, ":")
|
||||
return geodb_load(db_name)[id]
|
||||
end
|
||||
@@ -1,233 +1,76 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
using OrderedCollections
|
||||
|
||||
function parsefile(path::String)::Instance
|
||||
return RELOG.parse(JSON.parsefile(path))
|
||||
return RELOG.parse(JSON.parsefile(path, dicttype = () -> OrderedDict()))
|
||||
end
|
||||
|
||||
function parse(json)::Instance
|
||||
basedir = dirname(@__FILE__)
|
||||
json_schema = JSON.parsefile("$basedir/../schemas/input.json")
|
||||
validate(json, Schema(json_schema))
|
||||
# Read parameters
|
||||
time_horizon = json["parameters"]["time horizon (years)"]
|
||||
building_period = json["parameters"]["building period (years)"]
|
||||
distance_metric = json["parameters"]["distance metric"]
|
||||
|
||||
T = json["parameters"]["time horizon (years)"]
|
||||
json_schema["definitions"]["TimeSeries"]["minItems"] = T
|
||||
json_schema["definitions"]["TimeSeries"]["maxItems"] = T
|
||||
validate(json, Schema(json_schema))
|
||||
|
||||
building_period = [1]
|
||||
if "building period (years)" in keys(json["parameters"])
|
||||
building_period = json["parameters"]["building period (years)"]
|
||||
end
|
||||
|
||||
distance_metric = EuclideanDistance()
|
||||
if "distance metric" in keys(json["parameters"])
|
||||
metric_name = json["parameters"]["distance metric"]
|
||||
if metric_name == "driving"
|
||||
distance_metric = KnnDrivingDistance()
|
||||
elseif metric_name == "Euclidean"
|
||||
# nop
|
||||
else
|
||||
error("Unknown distance metric: $metric_name")
|
||||
end
|
||||
end
|
||||
|
||||
plants = Plant[]
|
||||
# Read products
|
||||
products = Product[]
|
||||
collection_centers = CollectionCenter[]
|
||||
prod_name_to_product = Dict{String,Product}()
|
||||
products_by_name = OrderedDict{String,Product}()
|
||||
for (pname, pdict) in json["products"]
|
||||
tr_cost = pdict["transportation cost (\$/km/tonne)"]
|
||||
tr_energy = pdict["transportation energy (J/km/tonne)"]
|
||||
tr_emissions = pdict["transportation emissions (tonne/km/tonne)"]
|
||||
prod = Product(; name = pname, tr_cost, tr_energy, tr_emissions)
|
||||
push!(products, prod)
|
||||
products_by_name[pname] = prod
|
||||
end
|
||||
|
||||
# Create products
|
||||
for (product_name, product_dict) in json["products"]
|
||||
cost = product_dict["transportation cost (\$/km/tonne)"]
|
||||
energy = zeros(T)
|
||||
emissions = Dict()
|
||||
disposal_limit = zeros(T)
|
||||
disposal_cost = zeros(T)
|
||||
acquisition_cost = zeros(T)
|
||||
|
||||
if "transportation energy (J/km/tonne)" in keys(product_dict)
|
||||
energy = product_dict["transportation energy (J/km/tonne)"]
|
||||
# Read centers
|
||||
centers = Center[]
|
||||
centers_by_name = OrderedDict{String,Center}()
|
||||
for (cname, cdict) in json["centers"]
|
||||
latitude = cdict["latitude (deg)"]
|
||||
longitude = cdict["longitude (deg)"]
|
||||
input = nothing
|
||||
revenue = [0.0 for t = 1:time_horizon]
|
||||
if cdict["input"] !== nothing
|
||||
input = products_by_name[cdict["input"]]
|
||||
revenue = cdict["revenue (\$/tonne)"]
|
||||
end
|
||||
|
||||
if "transportation emissions (tonne/km/tonne)" in keys(product_dict)
|
||||
emissions = product_dict["transportation emissions (tonne/km/tonne)"]
|
||||
end
|
||||
|
||||
if "disposal limit (tonne)" in keys(product_dict)
|
||||
disposal_limit = product_dict["disposal limit (tonne)"]
|
||||
end
|
||||
|
||||
if "disposal cost (\$/tonne)" in keys(product_dict)
|
||||
disposal_cost = product_dict["disposal cost (\$/tonne)"]
|
||||
end
|
||||
|
||||
if "acquisition cost (\$/tonne)" in keys(product_dict)
|
||||
acquisition_cost = product_dict["acquisition cost (\$/tonne)"]
|
||||
end
|
||||
|
||||
prod_centers = []
|
||||
|
||||
product = Product(
|
||||
acquisition_cost = acquisition_cost,
|
||||
collection_centers = prod_centers,
|
||||
disposal_cost = disposal_cost,
|
||||
disposal_limit = disposal_limit,
|
||||
name = product_name,
|
||||
transportation_cost = cost,
|
||||
transportation_emissions = emissions,
|
||||
transportation_energy = energy,
|
||||
outputs = [products_by_name[p] for p in cdict["outputs"]]
|
||||
operating_cost = cdict["operating cost (\$)"]
|
||||
prod_dict(key, null_val) = OrderedDict(
|
||||
p => [v === nothing ? null_val : v for v in cdict[key][p.name]] for
|
||||
p in outputs
|
||||
)
|
||||
push!(products, product)
|
||||
prod_name_to_product[product_name] = product
|
||||
fixed_output = prod_dict("fixed output (tonne)", 0.0)
|
||||
var_output = prod_dict("variable output (tonne/tonne)", 0.0)
|
||||
collection_cost = prod_dict("collection cost (\$/tonne)", 0.0)
|
||||
disposal_limit = prod_dict("disposal limit (tonne)", Inf)
|
||||
disposal_cost = prod_dict("disposal cost (\$/tonne)", 0.0)
|
||||
|
||||
# Create collection centers
|
||||
if "initial amounts" in keys(product_dict)
|
||||
for (center_name, center_dict) in product_dict["initial amounts"]
|
||||
if "location" in keys(center_dict)
|
||||
region = geodb_query(center_dict["location"])
|
||||
center_dict["latitude (deg)"] = region.centroid.lat
|
||||
center_dict["longitude (deg)"] = region.centroid.lon
|
||||
end
|
||||
center = CollectionCenter(
|
||||
amount = center_dict["amount (tonne)"],
|
||||
index = length(collection_centers) + 1,
|
||||
latitude = center_dict["latitude (deg)"],
|
||||
longitude = center_dict["longitude (deg)"],
|
||||
name = center_name,
|
||||
product = product,
|
||||
)
|
||||
push!(prod_centers, center)
|
||||
push!(collection_centers, center)
|
||||
end
|
||||
end
|
||||
center = Center(;
|
||||
latitude,
|
||||
longitude,
|
||||
input,
|
||||
outputs,
|
||||
revenue,
|
||||
operating_cost,
|
||||
fixed_output,
|
||||
var_output,
|
||||
collection_cost,
|
||||
disposal_cost,
|
||||
disposal_limit,
|
||||
)
|
||||
push!(centers, center)
|
||||
centers_by_name[cname] = center
|
||||
end
|
||||
|
||||
# Create plants
|
||||
for (plant_name, plant_dict) in json["plants"]
|
||||
input = prod_name_to_product[plant_dict["input"]]
|
||||
output = Dict()
|
||||
|
||||
# Plant outputs
|
||||
if "outputs (tonne/tonne)" in keys(plant_dict)
|
||||
output = Dict(
|
||||
prod_name_to_product[key] => value for
|
||||
(key, value) in plant_dict["outputs (tonne/tonne)"] if value > 0
|
||||
)
|
||||
end
|
||||
|
||||
energy = zeros(T)
|
||||
emissions = Dict()
|
||||
|
||||
if "energy (GJ/tonne)" in keys(plant_dict)
|
||||
energy = plant_dict["energy (GJ/tonne)"]
|
||||
end
|
||||
|
||||
if "emissions (tonne/tonne)" in keys(plant_dict)
|
||||
emissions = plant_dict["emissions (tonne/tonne)"]
|
||||
end
|
||||
|
||||
for (location_name, location_dict) in plant_dict["locations"]
|
||||
sizes = PlantSize[]
|
||||
disposal_limit = Dict(p => [0.0 for t = 1:T] for p in keys(output))
|
||||
disposal_cost = Dict(p => [0.0 for t = 1:T] for p in keys(output))
|
||||
|
||||
# GeoDB
|
||||
if "location" in keys(location_dict)
|
||||
region = geodb_query(location_dict["location"])
|
||||
location_dict["latitude (deg)"] = region.centroid.lat
|
||||
location_dict["longitude (deg)"] = region.centroid.lon
|
||||
end
|
||||
|
||||
# Disposal
|
||||
if "disposal" in keys(location_dict)
|
||||
for (product_name, disposal_dict) in location_dict["disposal"]
|
||||
limit = [1e8 for t = 1:T]
|
||||
if "limit (tonne)" in keys(disposal_dict)
|
||||
limit = disposal_dict["limit (tonne)"]
|
||||
end
|
||||
disposal_limit[prod_name_to_product[product_name]] = limit
|
||||
disposal_cost[prod_name_to_product[product_name]] =
|
||||
disposal_dict["cost (\$/tonne)"]
|
||||
end
|
||||
end
|
||||
|
||||
# Capacities
|
||||
for (capacity_name, capacity_dict) in location_dict["capacities (tonne)"]
|
||||
push!(
|
||||
sizes,
|
||||
PlantSize(
|
||||
capacity = Base.parse(Float64, capacity_name),
|
||||
fixed_operating_cost = capacity_dict["fixed operating cost (\$)"],
|
||||
opening_cost = capacity_dict["opening cost (\$)"],
|
||||
variable_operating_cost = capacity_dict["variable operating cost (\$/tonne)"],
|
||||
),
|
||||
)
|
||||
end
|
||||
length(sizes) > 1 || push!(sizes, deepcopy(sizes[1]))
|
||||
sort!(sizes, by = x -> x.capacity)
|
||||
|
||||
# Initial capacity
|
||||
initial_capacity = 0
|
||||
if "initial capacity (tonne)" in keys(location_dict)
|
||||
initial_capacity = location_dict["initial capacity (tonne)"]
|
||||
end
|
||||
|
||||
# Storage
|
||||
storage_limit = 0
|
||||
storage_cost = zeros(T)
|
||||
if "storage" in keys(location_dict)
|
||||
storage_dict = location_dict["storage"]
|
||||
storage_limit = storage_dict["limit (tonne)"]
|
||||
storage_cost = storage_dict["cost (\$/tonne)"]
|
||||
end
|
||||
|
||||
# Validation: Capacities
|
||||
if length(sizes) != 2
|
||||
throw("At most two capacities are supported")
|
||||
end
|
||||
if sizes[1].variable_operating_cost != sizes[2].variable_operating_cost
|
||||
throw("Variable operating costs must be the same for all capacities")
|
||||
end
|
||||
|
||||
plant = Plant(
|
||||
disposal_cost = disposal_cost,
|
||||
disposal_limit = disposal_limit,
|
||||
emissions = emissions,
|
||||
energy = energy,
|
||||
index = length(plants) + 1,
|
||||
initial_capacity = initial_capacity,
|
||||
input = input,
|
||||
latitude = location_dict["latitude (deg)"],
|
||||
location_name = location_name,
|
||||
longitude = location_dict["longitude (deg)"],
|
||||
output = output,
|
||||
plant_name = plant_name,
|
||||
sizes = sizes,
|
||||
storage_cost = storage_cost,
|
||||
storage_limit = storage_limit,
|
||||
)
|
||||
|
||||
push!(plants, plant)
|
||||
end
|
||||
end
|
||||
|
||||
@info @sprintf("%12d collection centers", length(collection_centers))
|
||||
@info @sprintf("%12d candidate plant locations", length(plants))
|
||||
|
||||
return Instance(
|
||||
time = T,
|
||||
products = products,
|
||||
collection_centers = collection_centers,
|
||||
plants = plants,
|
||||
building_period = building_period,
|
||||
distance_metric = distance_metric,
|
||||
return Instance(;
|
||||
time_horizon,
|
||||
building_period,
|
||||
distance_metric,
|
||||
products,
|
||||
products_by_name,
|
||||
centers,
|
||||
centers_by_name,
|
||||
)
|
||||
end
|
||||
|
||||
@@ -1,73 +1,32 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
using OrderedCollections
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
Base.@kwdef mutable struct Product
|
||||
acquisition_cost::Vector{Float64}
|
||||
collection_centers::Vector
|
||||
disposal_cost::Vector{Float64}
|
||||
disposal_limit::Vector{Float64}
|
||||
Base.@kwdef struct Product
|
||||
name::String
|
||||
transportation_cost::Vector{Float64}
|
||||
transportation_emissions::Dict{String,Vector{Float64}}
|
||||
transportation_energy::Vector{Float64}
|
||||
tr_cost::Vector{Float64}
|
||||
tr_energy::Vector{Float64}
|
||||
tr_emissions::OrderedDict{String,Vector{Float64}}
|
||||
end
|
||||
|
||||
Base.@kwdef mutable struct CollectionCenter
|
||||
amount::Vector{Float64}
|
||||
index::Int64
|
||||
Base.@kwdef struct Center
|
||||
latitude::Float64
|
||||
longitude::Float64
|
||||
name::String
|
||||
product::Product
|
||||
input::Union{Product,Nothing}
|
||||
outputs::Vector{Product}
|
||||
fixed_output::OrderedDict{Product,Vector{Float64}}
|
||||
var_output::OrderedDict{Product,Vector{Float64}}
|
||||
revenue::Vector{Float64}
|
||||
collection_cost::OrderedDict{Product,Vector{Float64}}
|
||||
operating_cost::Vector{Float64}
|
||||
disposal_limit::OrderedDict{Product,Vector{Float64}}
|
||||
disposal_cost::OrderedDict{Product,Vector{Float64}}
|
||||
end
|
||||
|
||||
Base.@kwdef mutable struct PlantSize
|
||||
capacity::Float64
|
||||
fixed_operating_cost::Vector{Float64}
|
||||
opening_cost::Vector{Float64}
|
||||
variable_operating_cost::Vector{Float64}
|
||||
end
|
||||
|
||||
Base.@kwdef mutable struct Plant
|
||||
disposal_cost::Dict{Product,Vector{Float64}}
|
||||
disposal_limit::Dict{Product,Vector{Float64}}
|
||||
emissions::Dict{String,Vector{Float64}}
|
||||
energy::Vector{Float64}
|
||||
index::Int64
|
||||
initial_capacity::Float64
|
||||
input::Product
|
||||
latitude::Float64
|
||||
location_name::String
|
||||
longitude::Float64
|
||||
output::Dict{Product,Float64}
|
||||
plant_name::String
|
||||
sizes::Vector{PlantSize}
|
||||
storage_cost::Vector{Float64}
|
||||
storage_limit::Float64
|
||||
end
|
||||
|
||||
|
||||
abstract type DistanceMetric end
|
||||
|
||||
Base.@kwdef mutable struct KnnDrivingDistance <: DistanceMetric
|
||||
tree = nothing
|
||||
ratios = nothing
|
||||
end
|
||||
|
||||
mutable struct EuclideanDistance <: DistanceMetric end
|
||||
|
||||
Base.@kwdef mutable struct Instance
|
||||
building_period::Vector{Int64}
|
||||
collection_centers::Vector{CollectionCenter}
|
||||
distance_metric::DistanceMetric
|
||||
plants::Vector{Plant}
|
||||
Base.@kwdef struct Instance
|
||||
building_period::Vector{Int}
|
||||
centers_by_name::OrderedDict{String,Center}
|
||||
centers::Vector{Center}
|
||||
distance_metric::String
|
||||
products_by_name::OrderedDict{String,Product}
|
||||
products::Vector{Product}
|
||||
time::Int64
|
||||
time_horizon::Int
|
||||
end
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
function validate(json, schema)
|
||||
result = JSONSchema.validate(json, schema)
|
||||
if result !== nothing
|
||||
if result isa JSONSchema.SingleIssue
|
||||
msg = "$(result.reason) in $(result.path)"
|
||||
else
|
||||
msg = convert(String, result)
|
||||
end
|
||||
throw("Error parsing input file: $(msg)")
|
||||
end
|
||||
end
|
||||
@@ -1,294 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, ProgressBars, Printf, DataStructures
|
||||
|
||||
function build_model(instance::Instance, graph::Graph, optimizer)::JuMP.Model
|
||||
model = Model(optimizer)
|
||||
model[:instance] = instance
|
||||
model[:graph] = graph
|
||||
create_vars!(model)
|
||||
create_objective_function!(model)
|
||||
create_shipping_node_constraints!(model)
|
||||
create_process_node_constraints!(model)
|
||||
return model
|
||||
end
|
||||
|
||||
|
||||
function create_vars!(model::JuMP.Model)
|
||||
graph, T = model[:graph], model[:instance].time
|
||||
model[:flow] =
|
||||
Dict((a, t) => @variable(model, lower_bound = 0) for a in graph.arcs, t = 1:T)
|
||||
model[:plant_dispose] = Dict(
|
||||
(n, t) => @variable(
|
||||
model,
|
||||
lower_bound = 0,
|
||||
upper_bound = n.location.disposal_limit[n.product][t]
|
||||
) for n in values(graph.plant_shipping_nodes), t = 1:T
|
||||
)
|
||||
model[:collection_dispose] = Dict(
|
||||
(n, t) => @variable(model, lower_bound = 0,) for
|
||||
n in values(graph.collection_shipping_nodes), t = 1:T
|
||||
)
|
||||
model[:store] = Dict(
|
||||
(n, t) =>
|
||||
@variable(model, lower_bound = 0, upper_bound = n.location.storage_limit)
|
||||
for n in values(graph.process_nodes), t = 1:T
|
||||
)
|
||||
model[:process] = Dict(
|
||||
(n, t) => @variable(model, lower_bound = 0) for
|
||||
n in values(graph.process_nodes), t = 1:T
|
||||
)
|
||||
model[:open_plant] = Dict(
|
||||
(n, t) => @variable(model, binary = true) for n in values(graph.process_nodes),
|
||||
t = 1:T
|
||||
)
|
||||
model[:is_open] = Dict{Tuple,Any}(
|
||||
(n, t) => @variable(model, binary = true) for n in values(graph.process_nodes),
|
||||
t = 1:T
|
||||
)
|
||||
model[:capacity] = Dict(
|
||||
(n, t) => @variable(
|
||||
model,
|
||||
lower_bound = 0,
|
||||
upper_bound = n.location.sizes[2].capacity
|
||||
) for n in values(graph.process_nodes), t = 1:T
|
||||
)
|
||||
model[:expansion] = Dict{Tuple,Any}(
|
||||
(n, t) => @variable(
|
||||
model,
|
||||
lower_bound = 0,
|
||||
upper_bound = n.location.sizes[2].capacity - n.location.sizes[1].capacity
|
||||
) for n in values(graph.process_nodes), t = 1:T
|
||||
)
|
||||
|
||||
# Boundary constants
|
||||
for n in values(graph.process_nodes)
|
||||
m_init = n.location.initial_capacity
|
||||
m_min = n.location.sizes[1].capacity
|
||||
model[:is_open][n, 0] = m_init == 0 ? 0 : 1
|
||||
model[:expansion][n, 0] = max(0, m_init - m_min)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function slope_open(plant, t)
|
||||
if plant.sizes[2].capacity <= plant.sizes[1].capacity
|
||||
0.0
|
||||
else
|
||||
(plant.sizes[2].opening_cost[t] - plant.sizes[1].opening_cost[t]) /
|
||||
(plant.sizes[2].capacity - plant.sizes[1].capacity)
|
||||
end
|
||||
end
|
||||
|
||||
function slope_fix_oper_cost(plant, t)
|
||||
if plant.sizes[2].capacity <= plant.sizes[1].capacity
|
||||
0.0
|
||||
else
|
||||
(plant.sizes[2].fixed_operating_cost[t] - plant.sizes[1].fixed_operating_cost[t]) /
|
||||
(plant.sizes[2].capacity - plant.sizes[1].capacity)
|
||||
end
|
||||
end
|
||||
|
||||
function create_objective_function!(model::JuMP.Model)
|
||||
graph, T = model[:graph], model[:instance].time
|
||||
obj = AffExpr(0.0)
|
||||
|
||||
# Process node costs
|
||||
for n in values(graph.process_nodes), t = 1:T
|
||||
|
||||
# Transportation and variable operating costs
|
||||
for a in n.incoming_arcs
|
||||
c = n.location.input.transportation_cost[t] * a.values["distance"]
|
||||
add_to_expression!(obj, c, model[:flow][a, t])
|
||||
end
|
||||
|
||||
# Opening costs
|
||||
add_to_expression!(
|
||||
obj,
|
||||
n.location.sizes[1].opening_cost[t],
|
||||
model[:open_plant][n, t],
|
||||
)
|
||||
|
||||
# Fixed operating costs (base)
|
||||
add_to_expression!(
|
||||
obj,
|
||||
n.location.sizes[1].fixed_operating_cost[t],
|
||||
model[:is_open][n, t],
|
||||
)
|
||||
|
||||
# Fixed operating costs (expansion)
|
||||
add_to_expression!(obj, slope_fix_oper_cost(n.location, t), model[:expansion][n, t])
|
||||
|
||||
# Processing costs
|
||||
add_to_expression!(
|
||||
obj,
|
||||
n.location.sizes[1].variable_operating_cost[t],
|
||||
model[:process][n, t],
|
||||
)
|
||||
|
||||
# Storage costs
|
||||
add_to_expression!(obj, n.location.storage_cost[t], model[:store][n, t])
|
||||
|
||||
# Expansion costs
|
||||
if t < T
|
||||
add_to_expression!(
|
||||
obj,
|
||||
slope_open(n.location, t) - slope_open(n.location, t + 1),
|
||||
model[:expansion][n, t],
|
||||
)
|
||||
else
|
||||
add_to_expression!(obj, slope_open(n.location, t), model[:expansion][n, t])
|
||||
add_to_expression!(obj, -slope_open(n.location, 1) * model[:expansion][n, 0])
|
||||
end
|
||||
end
|
||||
|
||||
# Plant shipping node costs
|
||||
for n in values(graph.plant_shipping_nodes), t = 1:T
|
||||
|
||||
# Disposal costs
|
||||
add_to_expression!(
|
||||
obj,
|
||||
n.location.disposal_cost[n.product][t],
|
||||
model[:plant_dispose][n, t],
|
||||
)
|
||||
end
|
||||
|
||||
# Collection shipping node costs
|
||||
for n in values(graph.collection_shipping_nodes), t = 1:T
|
||||
|
||||
# Acquisition costs
|
||||
add_to_expression!(
|
||||
obj,
|
||||
n.location.product.acquisition_cost[t] * n.location.amount[t],
|
||||
)
|
||||
|
||||
# Disposal costs -- in this case, we recover the acquisition cost.
|
||||
add_to_expression!(
|
||||
obj,
|
||||
(n.location.product.disposal_cost[t] - n.location.product.acquisition_cost[t]),
|
||||
model[:collection_dispose][n, t],
|
||||
)
|
||||
end
|
||||
|
||||
@objective(model, Min, obj)
|
||||
end
|
||||
|
||||
|
||||
function create_shipping_node_constraints!(model::JuMP.Model)
|
||||
graph, T = model[:graph], model[:instance].time
|
||||
model[:eq_balance] = OrderedDict()
|
||||
for t = 1:T
|
||||
# Collection centers
|
||||
for n in graph.collection_shipping_nodes
|
||||
model[:eq_balance][n, t] = @constraint(
|
||||
model,
|
||||
sum(model[:flow][a, t] for a in n.outgoing_arcs) +
|
||||
model[:collection_dispose][n, t] == n.location.amount[t]
|
||||
)
|
||||
end
|
||||
for prod in model[:instance].products
|
||||
if isempty(prod.collection_centers)
|
||||
continue
|
||||
end
|
||||
expr = AffExpr()
|
||||
for center in prod.collection_centers
|
||||
n = graph.collection_center_to_node[center]
|
||||
add_to_expression!(expr, model[:collection_dispose][n, t])
|
||||
end
|
||||
@constraint(model, expr <= prod.disposal_limit[t])
|
||||
end
|
||||
|
||||
# Plants
|
||||
for n in graph.plant_shipping_nodes
|
||||
@constraint(
|
||||
model,
|
||||
sum(model[:flow][a, t] for a in n.incoming_arcs) ==
|
||||
sum(model[:flow][a, t] for a in n.outgoing_arcs) +
|
||||
model[:plant_dispose][n, t]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
function create_process_node_constraints!(model::JuMP.Model)
|
||||
graph, T = model[:graph], model[:instance].time
|
||||
|
||||
for t = 1:T, n in graph.process_nodes
|
||||
input_sum = AffExpr(0.0)
|
||||
for a in n.incoming_arcs
|
||||
add_to_expression!(input_sum, 1.0, model[:flow][a, t])
|
||||
end
|
||||
|
||||
# Output amount is implied by amount processed
|
||||
for a in n.outgoing_arcs
|
||||
@constraint(
|
||||
model,
|
||||
model[:flow][a, t] == a.values["weight"] * model[:process][n, t]
|
||||
)
|
||||
end
|
||||
|
||||
# If plant is closed, capacity is zero
|
||||
@constraint(
|
||||
model,
|
||||
model[:capacity][n, t] <= n.location.sizes[2].capacity * model[:is_open][n, t]
|
||||
)
|
||||
|
||||
# If plant is closed, storage cannot be used
|
||||
@constraint(
|
||||
model,
|
||||
model[:store][n, t] <= n.location.storage_limit * model[:is_open][n, t]
|
||||
)
|
||||
|
||||
# If plant is open, capacity is greater than base
|
||||
@constraint(
|
||||
model,
|
||||
model[:capacity][n, t] >= n.location.sizes[1].capacity * model[:is_open][n, t]
|
||||
)
|
||||
|
||||
# Capacity is linked to expansion
|
||||
@constraint(
|
||||
model,
|
||||
model[:capacity][n, t] <=
|
||||
n.location.sizes[1].capacity + model[:expansion][n, t]
|
||||
)
|
||||
|
||||
# Can only process up to capacity
|
||||
@constraint(model, model[:process][n, t] <= model[:capacity][n, t])
|
||||
|
||||
# Plant capacity can only increase over time
|
||||
if t > 1
|
||||
@constraint(model, model[:capacity][n, t] >= model[:capacity][n, t-1])
|
||||
end
|
||||
@constraint(model, model[:expansion][n, t] >= model[:expansion][n, t-1])
|
||||
|
||||
# Amount received equals amount processed plus stored
|
||||
store_in = 0
|
||||
if t > 1
|
||||
store_in = model[:store][n, t-1]
|
||||
end
|
||||
if t == T
|
||||
@constraint(model, model[:store][n, t] == 0)
|
||||
end
|
||||
@constraint(
|
||||
model,
|
||||
input_sum + store_in == model[:store][n, t] + model[:process][n, t]
|
||||
)
|
||||
|
||||
|
||||
# Plant is currently open if it was already open in the previous time period or
|
||||
# if it was built just now
|
||||
@constraint(
|
||||
model,
|
||||
model[:is_open][n, t] == model[:is_open][n, t-1] + model[:open_plant][n, t]
|
||||
)
|
||||
|
||||
# Plant can only be opened during building period
|
||||
if t ∉ model[:instance].building_period
|
||||
@constraint(model, model[:open_plant][n, t] == 0)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,243 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, ProgressBars, Printf, DataStructures
|
||||
|
||||
function get_solution(model::JuMP.Model; marginal_costs = true)
|
||||
graph, instance = model[:graph], model[:instance]
|
||||
T = instance.time
|
||||
|
||||
output = OrderedDict(
|
||||
"Plants" => OrderedDict(),
|
||||
"Products" => OrderedDict(),
|
||||
"Costs" => OrderedDict(
|
||||
"Fixed operating (\$)" => zeros(T),
|
||||
"Variable operating (\$)" => zeros(T),
|
||||
"Opening (\$)" => zeros(T),
|
||||
"Transportation (\$)" => zeros(T),
|
||||
"Disposal (\$)" => zeros(T),
|
||||
"Expansion (\$)" => zeros(T),
|
||||
"Storage (\$)" => zeros(T),
|
||||
"Total (\$)" => zeros(T),
|
||||
),
|
||||
"Energy" =>
|
||||
OrderedDict("Plants (GJ)" => zeros(T), "Transportation (GJ)" => zeros(T)),
|
||||
"Emissions" => OrderedDict(
|
||||
"Plants (tonne)" => OrderedDict(),
|
||||
"Transportation (tonne)" => OrderedDict(),
|
||||
),
|
||||
)
|
||||
|
||||
plant_to_process_node = OrderedDict(n.location => n for n in graph.process_nodes)
|
||||
plant_to_shipping_nodes = OrderedDict()
|
||||
for p in instance.plants
|
||||
plant_to_shipping_nodes[p] = []
|
||||
for a in plant_to_process_node[p].outgoing_arcs
|
||||
push!(plant_to_shipping_nodes[p], a.dest)
|
||||
end
|
||||
end
|
||||
|
||||
# Products
|
||||
for n in graph.collection_shipping_nodes
|
||||
location_dict = OrderedDict{Any,Any}(
|
||||
"Latitude (deg)" => n.location.latitude,
|
||||
"Longitude (deg)" => n.location.longitude,
|
||||
"Amount (tonne)" => n.location.amount,
|
||||
"Dispose (tonne)" =>
|
||||
[JuMP.value(model[:collection_dispose][n, t]) for t = 1:T],
|
||||
"Acquisition cost (\$)" => [
|
||||
(n.location.amount[t] - JuMP.value(model[:collection_dispose][n, t])) * n.location.product.acquisition_cost[t] for t = 1:T
|
||||
],
|
||||
"Disposal cost (\$)" => [
|
||||
(
|
||||
JuMP.value(model[:collection_dispose][n, t]) *
|
||||
n.location.product.disposal_cost[t]
|
||||
) for t = 1:T
|
||||
],
|
||||
)
|
||||
if marginal_costs
|
||||
location_dict["Marginal cost (\$/tonne)"] = [
|
||||
round(abs(JuMP.shadow_price(model[:eq_balance][n, t])), digits = 2) for
|
||||
t = 1:T
|
||||
]
|
||||
end
|
||||
if n.product.name ∉ keys(output["Products"])
|
||||
output["Products"][n.product.name] = OrderedDict()
|
||||
end
|
||||
output["Products"][n.product.name][n.location.name] = location_dict
|
||||
end
|
||||
|
||||
# Plants
|
||||
for plant in instance.plants
|
||||
skip_plant = true
|
||||
process_node = plant_to_process_node[plant]
|
||||
plant_dict = OrderedDict{Any,Any}(
|
||||
"Input" => OrderedDict(),
|
||||
"Output" =>
|
||||
OrderedDict("Send" => OrderedDict(), "Dispose" => OrderedDict()),
|
||||
"Input product" => plant.input.name,
|
||||
"Total input (tonne)" => [0.0 for t = 1:T],
|
||||
"Total output" => OrderedDict(),
|
||||
"Latitude (deg)" => plant.latitude,
|
||||
"Longitude (deg)" => plant.longitude,
|
||||
"Capacity (tonne)" =>
|
||||
[JuMP.value(model[:capacity][process_node, t]) for t = 1:T],
|
||||
"Opening cost (\$)" => [
|
||||
JuMP.value(model[:open_plant][process_node, t]) *
|
||||
plant.sizes[1].opening_cost[t] for t = 1:T
|
||||
],
|
||||
"Fixed operating cost (\$)" => [
|
||||
JuMP.value(model[:is_open][process_node, t]) *
|
||||
plant.sizes[1].fixed_operating_cost[t] +
|
||||
JuMP.value(model[:expansion][process_node, t]) *
|
||||
slope_fix_oper_cost(plant, t) for t = 1:T
|
||||
],
|
||||
"Expansion cost (\$)" => [
|
||||
(
|
||||
if t == 1
|
||||
slope_open(plant, t) * (
|
||||
JuMP.value(model[:expansion][process_node, t]) -
|
||||
model[:expansion][process_node, 0]
|
||||
)
|
||||
else
|
||||
slope_open(plant, t) * (
|
||||
JuMP.value(model[:expansion][process_node, t]) -
|
||||
JuMP.value(model[:expansion][process_node, t-1])
|
||||
)
|
||||
end
|
||||
) for t = 1:T
|
||||
],
|
||||
"Process (tonne)" =>
|
||||
[JuMP.value(model[:process][process_node, t]) for t = 1:T],
|
||||
"Variable operating cost (\$)" => [
|
||||
JuMP.value(model[:process][process_node, t]) *
|
||||
plant.sizes[1].variable_operating_cost[t] for t = 1:T
|
||||
],
|
||||
"Storage (tonne)" =>
|
||||
[JuMP.value(model[:store][process_node, t]) for t = 1:T],
|
||||
"Storage cost (\$)" => [
|
||||
JuMP.value(model[:store][process_node, t]) * plant.storage_cost[t]
|
||||
for t = 1:T
|
||||
],
|
||||
)
|
||||
output["Costs"]["Fixed operating (\$)"] += plant_dict["Fixed operating cost (\$)"]
|
||||
output["Costs"]["Variable operating (\$)"] +=
|
||||
plant_dict["Variable operating cost (\$)"]
|
||||
output["Costs"]["Opening (\$)"] += plant_dict["Opening cost (\$)"]
|
||||
output["Costs"]["Expansion (\$)"] += plant_dict["Expansion cost (\$)"]
|
||||
output["Costs"]["Storage (\$)"] += plant_dict["Storage cost (\$)"]
|
||||
|
||||
# Inputs
|
||||
for a in process_node.incoming_arcs
|
||||
vals = [JuMP.value(model[:flow][a, t]) for t = 1:T]
|
||||
if sum(vals) <= 1e-3
|
||||
continue
|
||||
end
|
||||
skip_plant = false
|
||||
dict = OrderedDict{Any,Any}(
|
||||
"Amount (tonne)" => vals,
|
||||
"Distance (km)" => a.values["distance"],
|
||||
"Latitude (deg)" => a.source.location.latitude,
|
||||
"Longitude (deg)" => a.source.location.longitude,
|
||||
"Transportation cost (\$)" =>
|
||||
a.source.product.transportation_cost .* vals .* a.values["distance"],
|
||||
"Transportation energy (J)" =>
|
||||
vals .* a.values["distance"] .* a.source.product.transportation_energy,
|
||||
"Emissions (tonne)" => OrderedDict(),
|
||||
)
|
||||
emissions_dict = output["Emissions"]["Transportation (tonne)"]
|
||||
for (em_name, em_values) in a.source.product.transportation_emissions
|
||||
dict["Emissions (tonne)"][em_name] =
|
||||
em_values .* dict["Amount (tonne)"] .* a.values["distance"]
|
||||
if em_name ∉ keys(emissions_dict)
|
||||
emissions_dict[em_name] = zeros(T)
|
||||
end
|
||||
emissions_dict[em_name] += dict["Emissions (tonne)"][em_name]
|
||||
end
|
||||
if a.source.location isa CollectionCenter
|
||||
plant_name = "Origin"
|
||||
location_name = a.source.location.name
|
||||
else
|
||||
plant_name = a.source.location.plant_name
|
||||
location_name = a.source.location.location_name
|
||||
end
|
||||
|
||||
if plant_name ∉ keys(plant_dict["Input"])
|
||||
plant_dict["Input"][plant_name] = OrderedDict()
|
||||
end
|
||||
plant_dict["Input"][plant_name][location_name] = dict
|
||||
plant_dict["Total input (tonne)"] += vals
|
||||
output["Costs"]["Transportation (\$)"] += dict["Transportation cost (\$)"]
|
||||
output["Energy"]["Transportation (GJ)"] +=
|
||||
dict["Transportation energy (J)"] / 1e9
|
||||
end
|
||||
|
||||
plant_dict["Energy (GJ)"] = plant_dict["Total input (tonne)"] .* plant.energy
|
||||
output["Energy"]["Plants (GJ)"] += plant_dict["Energy (GJ)"]
|
||||
|
||||
plant_dict["Emissions (tonne)"] = OrderedDict()
|
||||
emissions_dict = output["Emissions"]["Plants (tonne)"]
|
||||
for (em_name, em_values) in plant.emissions
|
||||
plant_dict["Emissions (tonne)"][em_name] =
|
||||
em_values .* plant_dict["Total input (tonne)"]
|
||||
if em_name ∉ keys(emissions_dict)
|
||||
emissions_dict[em_name] = zeros(T)
|
||||
end
|
||||
emissions_dict[em_name] += plant_dict["Emissions (tonne)"][em_name]
|
||||
end
|
||||
|
||||
# Outputs
|
||||
for shipping_node in plant_to_shipping_nodes[plant]
|
||||
product_name = shipping_node.product.name
|
||||
plant_dict["Total output"][product_name] = zeros(T)
|
||||
plant_dict["Output"]["Send"][product_name] = product_dict = OrderedDict()
|
||||
|
||||
disposal_amount =
|
||||
[JuMP.value(model[:plant_dispose][shipping_node, t]) for t = 1:T]
|
||||
if sum(disposal_amount) > 1e-5
|
||||
skip_plant = false
|
||||
plant_dict["Output"]["Dispose"][product_name] =
|
||||
disposal_dict = OrderedDict()
|
||||
disposal_dict["Amount (tonne)"] =
|
||||
[JuMP.value(model[:plant_dispose][shipping_node, t]) for t = 1:T]
|
||||
disposal_dict["Cost (\$)"] = [
|
||||
disposal_dict["Amount (tonne)"][t] *
|
||||
plant.disposal_cost[shipping_node.product][t] for t = 1:T
|
||||
]
|
||||
plant_dict["Total output"][product_name] += disposal_amount
|
||||
output["Costs"]["Disposal (\$)"] += disposal_dict["Cost (\$)"]
|
||||
end
|
||||
|
||||
for a in shipping_node.outgoing_arcs
|
||||
vals = [JuMP.value(model[:flow][a, t]) for t = 1:T]
|
||||
if sum(vals) <= 1e-3
|
||||
continue
|
||||
end
|
||||
skip_plant = false
|
||||
dict = OrderedDict(
|
||||
"Amount (tonne)" => vals,
|
||||
"Distance (km)" => a.values["distance"],
|
||||
"Latitude (deg)" => a.dest.location.latitude,
|
||||
"Longitude (deg)" => a.dest.location.longitude,
|
||||
)
|
||||
if a.dest.location.plant_name ∉ keys(product_dict)
|
||||
product_dict[a.dest.location.plant_name] = OrderedDict()
|
||||
end
|
||||
product_dict[a.dest.location.plant_name][a.dest.location.location_name] =
|
||||
dict
|
||||
plant_dict["Total output"][product_name] += vals
|
||||
end
|
||||
end
|
||||
|
||||
if !skip_plant
|
||||
if plant.plant_name ∉ keys(output["Plants"])
|
||||
output["Plants"][plant.plant_name] = OrderedDict()
|
||||
end
|
||||
output["Plants"][plant.plant_name][plant.location_name] = plant_dict
|
||||
end
|
||||
end
|
||||
|
||||
output["Costs"]["Total (\$)"] = sum(values(output["Costs"]))
|
||||
return output
|
||||
end
|
||||
@@ -1,99 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP
|
||||
|
||||
function resolve(model_old, filename::AbstractString; kwargs...)::OrderedDict
|
||||
@info "Reading $filename..."
|
||||
instance = RELOG.parsefile(filename)
|
||||
return resolve(model_old, instance; kwargs...)
|
||||
end
|
||||
|
||||
function resolve(model_old, instance::Instance; optimizer = nothing)::OrderedDict
|
||||
milp_optimizer = lp_optimizer = optimizer
|
||||
if optimizer === nothing
|
||||
milp_optimizer = _get_default_milp_optimizer()
|
||||
lp_optimizer = _get_default_lp_optimizer()
|
||||
end
|
||||
|
||||
@info "Building new graph..."
|
||||
graph = build_graph(instance)
|
||||
_print_graph_stats(instance, graph)
|
||||
|
||||
@info "Building new optimization model..."
|
||||
model_new = RELOG.build_model(instance, graph, milp_optimizer)
|
||||
|
||||
@info "Fixing decision variables..."
|
||||
_fix_plants!(model_old, model_new)
|
||||
JuMP.set_optimizer(model_new, lp_optimizer)
|
||||
|
||||
@info "Optimizing MILP..."
|
||||
JuMP.optimize!(model_new)
|
||||
|
||||
if !has_values(model_new)
|
||||
@warn("No solution available")
|
||||
return OrderedDict()
|
||||
end
|
||||
|
||||
@info "Extracting solution..."
|
||||
solution = get_solution(model_new, marginal_costs = true)
|
||||
|
||||
return solution
|
||||
end
|
||||
|
||||
function _fix_plants!(model_old, model_new)::Nothing
|
||||
T = model_new[:instance].time
|
||||
|
||||
# Fix open_plant variables
|
||||
for ((node_old, t), var_old) in model_old[:open_plant]
|
||||
value_old = JuMP.value(var_old)
|
||||
node_new = model_new[:graph].name_to_process_node_map[(
|
||||
node_old.location.plant_name,
|
||||
node_old.location.location_name,
|
||||
)]
|
||||
var_new = model_new[:open_plant][node_new, t]
|
||||
JuMP.unset_binary(var_new)
|
||||
JuMP.fix(var_new, value_old)
|
||||
end
|
||||
|
||||
# Fix is_open variables
|
||||
for ((node_old, t), var_old) in model_old[:is_open]
|
||||
t > 0 || continue
|
||||
value_old = JuMP.value(var_old)
|
||||
node_new = model_new[:graph].name_to_process_node_map[(
|
||||
node_old.location.plant_name,
|
||||
node_old.location.location_name,
|
||||
)]
|
||||
var_new = model_new[:is_open][node_new, t]
|
||||
JuMP.unset_binary(var_new)
|
||||
JuMP.fix(var_new, value_old)
|
||||
end
|
||||
|
||||
# Fix plant capacities
|
||||
for ((node_old, t), var_old) in model_old[:capacity]
|
||||
value_old = JuMP.value(var_old)
|
||||
node_new = model_new[:graph].name_to_process_node_map[(
|
||||
node_old.location.plant_name,
|
||||
node_old.location.location_name,
|
||||
)]
|
||||
var_new = model_new[:capacity][node_new, t]
|
||||
JuMP.delete_lower_bound(var_new)
|
||||
JuMP.delete_upper_bound(var_new)
|
||||
JuMP.fix(var_new, value_old)
|
||||
end
|
||||
|
||||
# Fix plant expansion
|
||||
for ((node_old, t), var_old) in model_old[:expansion]
|
||||
t > 0 || continue
|
||||
value_old = JuMP.value(var_old)
|
||||
node_new = model_new[:graph].name_to_process_node_map[(
|
||||
node_old.location.plant_name,
|
||||
node_old.location.location_name,
|
||||
)]
|
||||
var_new = model_new[:expansion][node_new, t]
|
||||
JuMP.delete_lower_bound(var_new)
|
||||
JuMP.delete_upper_bound(var_new)
|
||||
JuMP.fix(var_new, value_old)
|
||||
end
|
||||
end
|
||||
@@ -1,126 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, ProgressBars, Printf, DataStructures, HiGHS
|
||||
|
||||
function _get_default_milp_optimizer()
|
||||
return optimizer_with_attributes(HiGHS.Optimizer)
|
||||
end
|
||||
|
||||
function _get_default_lp_optimizer()
|
||||
return optimizer_with_attributes(HiGHS.Optimizer)
|
||||
end
|
||||
|
||||
|
||||
function _print_graph_stats(instance::Instance, graph::Graph)::Nothing
|
||||
@info @sprintf("%12d time periods", instance.time)
|
||||
@info @sprintf("%12d process nodes", length(graph.process_nodes))
|
||||
@info @sprintf("%12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
|
||||
@info @sprintf(
|
||||
"%12d shipping nodes (collection)",
|
||||
length(graph.collection_shipping_nodes)
|
||||
)
|
||||
@info @sprintf("%12d arcs", length(graph.arcs))
|
||||
return
|
||||
end
|
||||
|
||||
function solve(
|
||||
instance::Instance;
|
||||
optimizer = nothing,
|
||||
lp_optimizer = nothing,
|
||||
output = nothing,
|
||||
marginal_costs = true,
|
||||
return_model = false,
|
||||
)
|
||||
|
||||
if lp_optimizer == nothing
|
||||
if optimizer == nothing
|
||||
# If neither is provided, use default LP optimizer.
|
||||
lp_optimizer = _get_default_lp_optimizer()
|
||||
else
|
||||
# If only MIP optimizer is provided, use it as
|
||||
# LP solver too.
|
||||
lp_optimizer = optimizer
|
||||
end
|
||||
end
|
||||
|
||||
if optimizer == nothing
|
||||
optimizer = _get_default_milp_optimizer()
|
||||
end
|
||||
|
||||
|
||||
@info "Building graph..."
|
||||
graph = RELOG.build_graph(instance)
|
||||
_print_graph_stats(instance, graph)
|
||||
|
||||
@info "Building optimization model..."
|
||||
model = RELOG.build_model(instance, graph, optimizer)
|
||||
|
||||
@info "Optimizing MILP..."
|
||||
JuMP.optimize!(model)
|
||||
|
||||
if !has_values(model)
|
||||
error("No solution available")
|
||||
end
|
||||
|
||||
if marginal_costs
|
||||
@info "Re-optimizing with integer variables fixed..."
|
||||
all_vars = JuMP.all_variables(model)
|
||||
vals = OrderedDict(var => JuMP.value(var) for var in all_vars)
|
||||
JuMP.set_optimizer(model, lp_optimizer)
|
||||
for var in all_vars
|
||||
if JuMP.is_binary(var)
|
||||
JuMP.unset_binary(var)
|
||||
JuMP.fix(var, vals[var])
|
||||
end
|
||||
end
|
||||
JuMP.optimize!(model)
|
||||
end
|
||||
|
||||
@info "Extracting solution..."
|
||||
solution = get_solution(model, marginal_costs = marginal_costs)
|
||||
|
||||
if output != nothing
|
||||
write(solution, output)
|
||||
end
|
||||
|
||||
if return_model
|
||||
return solution, model
|
||||
else
|
||||
return solution
|
||||
end
|
||||
end
|
||||
|
||||
function solve(filename::AbstractString; heuristic = false, kwargs...)
|
||||
@info "Reading $filename..."
|
||||
instance = RELOG.parsefile(filename)
|
||||
if heuristic && instance.time > 1
|
||||
@info "Solving single-period version..."
|
||||
compressed = _compress(instance)
|
||||
csol, _ = solve(
|
||||
compressed;
|
||||
return_model = true,
|
||||
output = nothing,
|
||||
marginal_costs = false,
|
||||
kwargs...,
|
||||
)
|
||||
@info "Filtering candidate locations..."
|
||||
selected_pairs = []
|
||||
for (plant_name, plant_dict) in csol["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
push!(selected_pairs, (plant_name, location_name))
|
||||
end
|
||||
end
|
||||
filtered_plants = []
|
||||
for p in instance.plants
|
||||
if (p.plant_name, p.location_name) in selected_pairs
|
||||
push!(filtered_plants, p)
|
||||
end
|
||||
end
|
||||
instance.plants = filtered_plants
|
||||
@info "Solving original version..."
|
||||
end
|
||||
sol = solve(instance; kwargs...)
|
||||
return sol
|
||||
end
|
||||
@@ -1,38 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function plant_emissions_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."emission type" = String[]
|
||||
df."emission amount (tonne)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for (emission_name, emission_amount) in location_dict["Emissions (tonne)"]
|
||||
for year = 1:T
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
emission_name,
|
||||
round(emission_amount[year], digits = 6),
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_plant_emissions_report(solution, filename) =
|
||||
CSV.write(filename, plant_emissions_report(solution))
|
||||
@@ -1,66 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function plant_outputs_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."product name" = String[]
|
||||
df."amount produced (tonne)" = Float64[]
|
||||
df."amount sent (tonne)" = Float64[]
|
||||
df."amount disposed (tonne)" = Float64[]
|
||||
df."disposal cost (\$)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for (product_name, amount_produced) in location_dict["Total output"]
|
||||
send_dict = location_dict["Output"]["Send"]
|
||||
disposal_dict = location_dict["Output"]["Dispose"]
|
||||
|
||||
sent = zeros(T)
|
||||
if product_name in keys(send_dict)
|
||||
for (dst_plant_name, dst_plant_dict) in send_dict[product_name]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
sent += dst_location_dict["Amount (tonne)"]
|
||||
end
|
||||
end
|
||||
end
|
||||
sent = round.(sent, digits = 6)
|
||||
|
||||
disposal_amount = zeros(T)
|
||||
disposal_cost = zeros(T)
|
||||
if product_name in keys(disposal_dict)
|
||||
disposal_amount += disposal_dict[product_name]["Amount (tonne)"]
|
||||
disposal_cost += disposal_dict[product_name]["Cost (\$)"]
|
||||
end
|
||||
disposal_amount = round.(disposal_amount, digits = 6)
|
||||
disposal_cost = round.(disposal_cost, digits = 6)
|
||||
|
||||
for year = 1:T
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
product_name,
|
||||
round(amount_produced[year], digits = 6),
|
||||
sent[year],
|
||||
disposal_amount[year],
|
||||
disposal_cost[year],
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_plant_outputs_report(solution, filename) =
|
||||
CSV.write(filename, plant_outputs_report(solution))
|
||||
@@ -1,79 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function plants_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."plant type" = String[]
|
||||
df."location name" = String[]
|
||||
df."year" = Int[]
|
||||
df."latitude (deg)" = Float64[]
|
||||
df."longitude (deg)" = Float64[]
|
||||
df."capacity (tonne)" = Float64[]
|
||||
df."amount processed (tonne)" = Float64[]
|
||||
df."amount received (tonne)" = Float64[]
|
||||
df."amount in storage (tonne)" = Float64[]
|
||||
df."utilization factor (%)" = Float64[]
|
||||
df."energy (GJ)" = Float64[]
|
||||
df."opening cost (\$)" = Float64[]
|
||||
df."expansion cost (\$)" = Float64[]
|
||||
df."fixed operating cost (\$)" = Float64[]
|
||||
df."variable operating cost (\$)" = Float64[]
|
||||
df."storage cost (\$)" = Float64[]
|
||||
df."total cost (\$)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (plant_name, plant_dict) in solution["Plants"]
|
||||
for (location_name, location_dict) in plant_dict
|
||||
for year = 1:T
|
||||
capacity = round(location_dict["Capacity (tonne)"][year], digits = 6)
|
||||
received = round(location_dict["Total input (tonne)"][year], digits = 6)
|
||||
processed = round(location_dict["Process (tonne)"][year], digits = 6)
|
||||
in_storage = round(location_dict["Storage (tonne)"][year], digits = 6)
|
||||
utilization_factor = round(processed / capacity * 100.0, digits = 6)
|
||||
energy = round(location_dict["Energy (GJ)"][year], digits = 6)
|
||||
latitude = round(location_dict["Latitude (deg)"], digits = 6)
|
||||
longitude = round(location_dict["Longitude (deg)"], digits = 6)
|
||||
opening_cost = round(location_dict["Opening cost (\$)"][year], digits = 6)
|
||||
expansion_cost =
|
||||
round(location_dict["Expansion cost (\$)"][year], digits = 6)
|
||||
fixed_cost =
|
||||
round(location_dict["Fixed operating cost (\$)"][year], digits = 6)
|
||||
var_cost =
|
||||
round(location_dict["Variable operating cost (\$)"][year], digits = 6)
|
||||
storage_cost = round(location_dict["Storage cost (\$)"][year], digits = 6)
|
||||
total_cost = round(
|
||||
opening_cost + expansion_cost + fixed_cost + var_cost + storage_cost,
|
||||
digits = 6,
|
||||
)
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
plant_name,
|
||||
location_name,
|
||||
year,
|
||||
latitude,
|
||||
longitude,
|
||||
capacity,
|
||||
processed,
|
||||
received,
|
||||
in_storage,
|
||||
utilization_factor,
|
||||
energy,
|
||||
opening_cost,
|
||||
expansion_cost,
|
||||
fixed_cost,
|
||||
var_cost,
|
||||
storage_cost,
|
||||
total_cost,
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_plants_report(solution, filename) = CSV.write(filename, plants_report(solution))
|
||||
@@ -1,56 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function products_report(solution; marginal_costs)::DataFrame
|
||||
df = DataFrame()
|
||||
df."product name" = String[]
|
||||
df."location name" = String[]
|
||||
df."latitude (deg)" = Float64[]
|
||||
df."longitude (deg)" = Float64[]
|
||||
df."year" = Int[]
|
||||
df."amount (tonne)" = Float64[]
|
||||
df."amount disposed (tonne)" = Float64[]
|
||||
df."marginal cost (\$/tonne)" = Float64[]
|
||||
df."acquisition cost (\$)" = Float64[]
|
||||
df."disposal cost (\$)" = Float64[]
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (prod_name, prod_dict) in solution["Products"]
|
||||
for (location_name, location_dict) in prod_dict
|
||||
for year = 1:T
|
||||
if marginal_costs
|
||||
marginal_cost = location_dict["Marginal cost (\$/tonne)"][year]
|
||||
else
|
||||
marginal_cost = 0.0
|
||||
end
|
||||
latitude = round(location_dict["Latitude (deg)"], digits = 6)
|
||||
longitude = round(location_dict["Longitude (deg)"], digits = 6)
|
||||
amount = location_dict["Amount (tonne)"][year]
|
||||
amount_disposed = location_dict["Dispose (tonne)"][year]
|
||||
acquisition_cost = location_dict["Acquisition cost (\$)"][year]
|
||||
disposal_cost = location_dict["Disposal cost (\$)"][year]
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
prod_name,
|
||||
location_name,
|
||||
latitude,
|
||||
longitude,
|
||||
year,
|
||||
amount,
|
||||
amount_disposed,
|
||||
marginal_cost,
|
||||
acquisition_cost,
|
||||
disposal_cost,
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_products_report(solution, filename; marginal_costs = true) = CSV.write(filename, products_report(solution; marginal_costs))
|
||||
@@ -1,75 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function transportation_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."source type" = String[]
|
||||
df."source location name" = String[]
|
||||
df."source latitude (deg)" = Float64[]
|
||||
df."source longitude (deg)" = Float64[]
|
||||
df."destination type" = String[]
|
||||
df."destination location name" = String[]
|
||||
df."destination latitude (deg)" = Float64[]
|
||||
df."destination longitude (deg)" = Float64[]
|
||||
df."product" = String[]
|
||||
df."year" = Int[]
|
||||
df."distance (km)" = Float64[]
|
||||
df."amount (tonne)" = Float64[]
|
||||
df."amount-distance (tonne-km)" = Float64[]
|
||||
df."transportation cost (\$)" = Float64[]
|
||||
df."transportation energy (GJ)" = Float64[]
|
||||
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
|
||||
for (src_location_name, src_location_dict) in src_plant_dict
|
||||
for year = 1:T
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
src_plant_name,
|
||||
src_location_name,
|
||||
round(src_location_dict["Latitude (deg)"], digits = 6),
|
||||
round(src_location_dict["Longitude (deg)"], digits = 6),
|
||||
dst_plant_name,
|
||||
dst_location_name,
|
||||
round(dst_location_dict["Latitude (deg)"], digits = 6),
|
||||
round(dst_location_dict["Longitude (deg)"], digits = 6),
|
||||
dst_location_dict["Input product"],
|
||||
year,
|
||||
round(src_location_dict["Distance (km)"], digits = 6),
|
||||
round(
|
||||
src_location_dict["Amount (tonne)"][year],
|
||||
digits = 6,
|
||||
),
|
||||
round(
|
||||
src_location_dict["Amount (tonne)"][year] *
|
||||
src_location_dict["Distance (km)"],
|
||||
digits = 6,
|
||||
),
|
||||
round(
|
||||
src_location_dict["Transportation cost (\$)"][year],
|
||||
digits = 6,
|
||||
),
|
||||
round(
|
||||
src_location_dict["Transportation energy (J)"][year] /
|
||||
1e9,
|
||||
digits = 6,
|
||||
),
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_transportation_report(solution, filename) =
|
||||
CSV.write(filename, transportation_report(solution))
|
||||
@@ -1,71 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
|
||||
function transportation_emissions_report(solution)::DataFrame
|
||||
df = DataFrame()
|
||||
df."source type" = String[]
|
||||
df."source location name" = String[]
|
||||
df."source latitude (deg)" = Float64[]
|
||||
df."source longitude (deg)" = Float64[]
|
||||
df."destination type" = String[]
|
||||
df."destination location name" = String[]
|
||||
df."destination latitude (deg)" = Float64[]
|
||||
df."destination longitude (deg)" = Float64[]
|
||||
df."product" = String[]
|
||||
df."year" = Int[]
|
||||
df."distance (km)" = Float64[]
|
||||
df."shipped amount (tonne)" = Float64[]
|
||||
df."shipped amount-distance (tonne-km)" = Float64[]
|
||||
df."emission type" = String[]
|
||||
df."emission amount (tonne)" = Float64[]
|
||||
|
||||
T = length(solution["Energy"]["Plants (GJ)"])
|
||||
for (dst_plant_name, dst_plant_dict) in solution["Plants"]
|
||||
for (dst_location_name, dst_location_dict) in dst_plant_dict
|
||||
for (src_plant_name, src_plant_dict) in dst_location_dict["Input"]
|
||||
for (src_location_name, src_location_dict) in src_plant_dict
|
||||
for (emission_name, emission_amount) in
|
||||
src_location_dict["Emissions (tonne)"]
|
||||
for year = 1:T
|
||||
push!(
|
||||
df,
|
||||
[
|
||||
src_plant_name,
|
||||
src_location_name,
|
||||
round(src_location_dict["Latitude (deg)"], digits = 6),
|
||||
round(src_location_dict["Longitude (deg)"], digits = 6),
|
||||
dst_plant_name,
|
||||
dst_location_name,
|
||||
round(dst_location_dict["Latitude (deg)"], digits = 6),
|
||||
round(dst_location_dict["Longitude (deg)"], digits = 6),
|
||||
dst_location_dict["Input product"],
|
||||
year,
|
||||
round(src_location_dict["Distance (km)"], digits = 6),
|
||||
round(
|
||||
src_location_dict["Amount (tonne)"][year],
|
||||
digits = 6,
|
||||
),
|
||||
round(
|
||||
src_location_dict["Amount (tonne)"][year] *
|
||||
src_location_dict["Distance (km)"],
|
||||
digits = 6,
|
||||
),
|
||||
emission_name,
|
||||
round(emission_amount[year], digits = 6),
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return df
|
||||
end
|
||||
|
||||
write_transportation_emissions_report(solution, filename) =
|
||||
CSV.write(filename, transportation_emissions_report(solution))
|
||||
@@ -1,24 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataFrames
|
||||
using CSV
|
||||
import Base: write
|
||||
|
||||
function write(solution::AbstractDict, filename::AbstractString)
|
||||
@info "Writing solution: $filename"
|
||||
open(filename, "w") do file
|
||||
JSON.print(file, solution, 2)
|
||||
end
|
||||
end
|
||||
|
||||
function write_reports(solution::AbstractDict, basename::AbstractString; marginal_costs = true)
|
||||
RELOG.write_products_report(solution, "$(basename)_products.csv"; marginal_costs)
|
||||
RELOG.write_plants_report(solution, "$(basename)_plants.csv")
|
||||
RELOG.write_plant_outputs_report(solution, "$(basename)_plant_outputs.csv")
|
||||
RELOG.write_plant_emissions_report(solution, "$(basename)_plant_emissions.csv")
|
||||
RELOG.write_transportation_report(solution, "$(basename)_tr.csv")
|
||||
RELOG.write_transportation_emissions_report(solution, "$(basename)_tr_emissions.csv")
|
||||
return
|
||||
end
|
||||
@@ -1,192 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://anl-ceeesa.github.io/RELOG/input",
|
||||
"title": "Schema for RELOG Input File",
|
||||
"definitions": {
|
||||
"TimeSeries": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"Parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"time horizon (years)": {
|
||||
"type": "number"
|
||||
},
|
||||
"distance metric": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["time horizon (years)"]
|
||||
},
|
||||
"Plant": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "string"
|
||||
},
|
||||
"outputs (tonne/tonne)": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"energy (GJ/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"emissions (tonne/tonne)": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"locations": {
|
||||
"$ref": "#/definitions/PlantLocation"
|
||||
}
|
||||
},
|
||||
"required": ["input", "locations"]
|
||||
}
|
||||
},
|
||||
"PlantLocation": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string"
|
||||
},
|
||||
"latitude (deg)": {
|
||||
"type": "number"
|
||||
},
|
||||
"longitude (deg)": {
|
||||
"type": "number"
|
||||
},
|
||||
"initial capacity (tonne)": {
|
||||
"type": "number"
|
||||
},
|
||||
"disposal": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"limit (tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"required": ["cost ($/tonne)"]
|
||||
}
|
||||
},
|
||||
"storage": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"limit (tonne)": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": ["cost ($/tonne)", "limit (tonne)"]
|
||||
},
|
||||
"capacities (tonne)": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"variable operating cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"fixed operating cost ($)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"opening cost ($)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"variable operating cost ($/tonne)",
|
||||
"fixed operating cost ($)",
|
||||
"opening cost ($)"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["capacities (tonne)"]
|
||||
}
|
||||
},
|
||||
"InitialAmount": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string"
|
||||
},
|
||||
"latitude (deg)": {
|
||||
"type": "number"
|
||||
},
|
||||
"longitude (deg)": {
|
||||
"type": "number"
|
||||
},
|
||||
"amount (tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"required": ["amount (tonne)"]
|
||||
}
|
||||
},
|
||||
"Product": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"transportation cost ($/km/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"transportation energy (J/km/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"transportation emissions (tonne/km/tonne)": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"initial amounts": {
|
||||
"$ref": "#/definitions/InitialAmount"
|
||||
},
|
||||
"disposal limit (tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"disposal cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
},
|
||||
"acquisition cost ($/tonne)": {
|
||||
"$ref": "#/definitions/TimeSeries"
|
||||
}
|
||||
},
|
||||
"required": ["transportation cost ($/km/tonne)"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parameters": {
|
||||
"$ref": "#/definitions/Parameters"
|
||||
},
|
||||
"plants": {
|
||||
"$ref": "#/definitions/Plant"
|
||||
},
|
||||
"products": {
|
||||
"$ref": "#/definitions/Product"
|
||||
}
|
||||
},
|
||||
"required": ["parameters", "plants", "products"]
|
||||
}
|
||||
115
src/web/run.jl
115
src/web/run.jl
@@ -1,115 +0,0 @@
|
||||
println("Initializing...")
|
||||
|
||||
using Logging
|
||||
using JSON
|
||||
using JuMP
|
||||
using HiGHS
|
||||
using RELOG
|
||||
|
||||
function solve(root, filename)
|
||||
ref_file = "$root/$filename"
|
||||
optimizer = optimizer_with_attributes(
|
||||
HiGHS.Optimizer,
|
||||
"time_limit" => parse(Float64, ENV["RELOG_TIME_LIMIT_SEC"]),
|
||||
)
|
||||
ref_solution, ref_model = RELOG.solve(
|
||||
ref_file,
|
||||
optimizer = optimizer,
|
||||
lp_optimizer = HiGHS.Optimizer,
|
||||
return_model = true,
|
||||
marginal_costs = true,
|
||||
)
|
||||
Libc.flush_cstdio()
|
||||
flush(stdout)
|
||||
sleep(1)
|
||||
|
||||
if length(ref_solution) == 0
|
||||
return
|
||||
end
|
||||
RELOG.write_products_report(ref_solution, replace(ref_file, ".json" => "_products.csv"))
|
||||
RELOG.write_plants_report(ref_solution, replace(ref_file, ".json" => "_plants.csv"))
|
||||
RELOG.write_plant_outputs_report(
|
||||
ref_solution,
|
||||
replace(ref_file, ".json" => "_plant_outputs.csv"),
|
||||
)
|
||||
RELOG.write_plant_emissions_report(
|
||||
ref_solution,
|
||||
replace(ref_file, ".json" => "_plant_emissions.csv"),
|
||||
)
|
||||
RELOG.write_transportation_report(ref_solution, replace(ref_file, ".json" => "_tr.csv"))
|
||||
RELOG.write_transportation_emissions_report(
|
||||
ref_solution,
|
||||
replace(ref_file, ".json" => "_tr_emissions.csv"),
|
||||
)
|
||||
|
||||
isdir("$root/scenarios") || return
|
||||
for filename in readdir("$root/scenarios")
|
||||
scenario = "$root/scenarios/$filename"
|
||||
endswith(filename, ".json") || continue
|
||||
|
||||
sc_solution = RELOG.resolve(
|
||||
ref_model,
|
||||
scenario,
|
||||
optimizer = optimizer,
|
||||
lp_optimizer = HiGHS.Optimizer,
|
||||
)
|
||||
if length(sc_solution) == 0
|
||||
return
|
||||
end
|
||||
RELOG.write_plants_report(sc_solution, replace(scenario, ".json" => "_plants.csv"))
|
||||
RELOG.write_products_report(
|
||||
sc_solution,
|
||||
replace(scenario, ".json" => "_products.csv"),
|
||||
)
|
||||
RELOG.write_plant_outputs_report(
|
||||
sc_solution,
|
||||
replace(scenario, ".json" => "_plant_outputs.csv"),
|
||||
)
|
||||
RELOG.write_plant_emissions_report(
|
||||
sc_solution,
|
||||
replace(scenario, ".json" => "_plant_emissions.csv"),
|
||||
)
|
||||
RELOG.write_transportation_report(
|
||||
sc_solution,
|
||||
replace(scenario, ".json" => "_tr.csv"),
|
||||
)
|
||||
RELOG.write_transportation_emissions_report(
|
||||
sc_solution,
|
||||
replace(scenario, ".json" => "_tr_emissions.csv"),
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
function solve_recursive(path)
|
||||
cd(path)
|
||||
|
||||
# Solve instances
|
||||
for (root, dirs, files) in walkdir(".")
|
||||
if occursin(r"scenarios"i, root)
|
||||
continue
|
||||
end
|
||||
for filename in files
|
||||
endswith(filename, ".json") || continue
|
||||
solve(root, filename)
|
||||
end
|
||||
end
|
||||
|
||||
# Collect results
|
||||
results = []
|
||||
for (root, dirs, files) in walkdir(".")
|
||||
for filename in files
|
||||
endswith(filename, "_plants.csv") || continue
|
||||
push!(
|
||||
results,
|
||||
joinpath(replace(root, path => ""), replace(filename, "_plants.csv" => "")),
|
||||
)
|
||||
end
|
||||
end
|
||||
open("output.json", "w") do file
|
||||
JSON.print(file, results)
|
||||
end
|
||||
|
||||
run(`zip -r output.zip .`)
|
||||
end
|
||||
|
||||
solve_recursive(ARGS[1])
|
||||
@@ -1,65 +0,0 @@
|
||||
import HTTP
|
||||
import JSON
|
||||
using Random
|
||||
|
||||
const ROUTER = HTTP.Router()
|
||||
const PROJECT_DIR = joinpath(dirname(@__FILE__), "..", "..")
|
||||
const STATIC_DIR = joinpath(PROJECT_DIR, "relog-web", "build", "static")
|
||||
const JOBS_DIR = joinpath(PROJECT_DIR, "jobs")
|
||||
|
||||
function serve_file(req::HTTP.Request, filename)
|
||||
if isfile(filename)
|
||||
open(filename) do file
|
||||
return HTTP.Response(200, read(file))
|
||||
end
|
||||
else
|
||||
return HTTP.Response(404)
|
||||
end
|
||||
end
|
||||
|
||||
function submit(req::HTTP.Request)
|
||||
# Generate random job id
|
||||
job_id = lowercase(randstring(12))
|
||||
|
||||
# Create job folder
|
||||
job_path = joinpath(JOBS_DIR, job_id)
|
||||
mkpath(job_path)
|
||||
|
||||
# Write JSON file
|
||||
case = JSON.parse(String(req.body))
|
||||
open(joinpath(job_path, "case.json"), "w") do file
|
||||
JSON.print(file, case)
|
||||
end
|
||||
|
||||
# Run job
|
||||
run(
|
||||
`bash -c "(julia --project=$PROJECT_DIR $PROJECT_DIR/src/web/run.jl $job_path 2>&1 | tee $job_path/solve.log) >/dev/null 2>&1 &"`,
|
||||
)
|
||||
|
||||
response = Dict("job_id" => job_id)
|
||||
return HTTP.Response(200, body = JSON.json(response))
|
||||
end
|
||||
|
||||
function get_index(req::HTTP.Request)
|
||||
return serve_file(req, joinpath(STATIC_DIR, "..", "index.html"))
|
||||
end
|
||||
|
||||
function get_static(req::HTTP.Request)
|
||||
return serve_file(req, joinpath(STATIC_DIR, req.target[9:end]))
|
||||
end
|
||||
|
||||
function get_jobs(req::HTTP.Request)
|
||||
return serve_file(req, joinpath(JOBS_DIR, req.target[7:end]))
|
||||
end
|
||||
|
||||
HTTP.@register(ROUTER, "GET", "/static", get_static)
|
||||
HTTP.@register(ROUTER, "GET", "/jobs", get_jobs)
|
||||
HTTP.@register(ROUTER, "POST", "/submit", submit)
|
||||
HTTP.@register(ROUTER, "GET", "/", get_index)
|
||||
|
||||
function web(host = "127.0.0.1", port = 8080)
|
||||
@info "Launching web interface: http://$(host):$(port)/"
|
||||
Base.exit_on_sigint(false)
|
||||
HTTP.serve(ROUTER, host, port)
|
||||
Base.exit_on_sigint(true)
|
||||
end
|
||||
Reference in New Issue
Block a user