mirror of
https://github.com/ANL-CEEESA/RELOG.git
synced 2025-12-05 23:38:52 -06:00
Implement resolve
This commit is contained in:
@@ -17,6 +17,7 @@ include("instance/validate.jl")
|
||||
include("model/build.jl")
|
||||
include("model/getsol.jl")
|
||||
include("model/solve.jl")
|
||||
include("model/resolve.jl")
|
||||
include("reports/plant_emissions.jl")
|
||||
include("reports/plant_outputs.jl")
|
||||
include("reports/plants.jl")
|
||||
|
||||
@@ -79,14 +79,14 @@ The following snippet shows how to use the method:
|
||||
using RELOG
|
||||
|
||||
# Optimize for the average scenario
|
||||
solution_avg = RELOG.solve("input_avg.json")
|
||||
solution_avg, model_avg = RELOG.solve("input_avg.json", return_model=true)
|
||||
|
||||
# Write reports for the average scenario
|
||||
RELOG.write_plants_report(solution_avg, "plants_avg.csv")
|
||||
RELOG.write_transportation_report(solution_avg, "transportation_avg.csv")
|
||||
|
||||
# Re-optimize for the high-demand scenario, keeping plants fixed
|
||||
solution_high = RELOG.resolve(solution_avg, "input_high.json")
|
||||
solution_high = RELOG.resolve(model_avg, "input_high.json")
|
||||
|
||||
# Write reports for the high-demand scenario
|
||||
RELOG.write_plants_report(solution_high, "plants_high.csv")
|
||||
|
||||
@@ -17,6 +17,8 @@ function build_graph(instance::Instance)::Graph
|
||||
plant_shipping_nodes = ShippingNode[]
|
||||
collection_shipping_nodes = ShippingNode[]
|
||||
|
||||
name_to_process_node_map = Dict{Tuple{AbstractString,AbstractString},ProcessNode}()
|
||||
|
||||
process_nodes_by_input_product =
|
||||
Dict(product => ProcessNode[] for product in instance.products)
|
||||
shipping_nodes_by_plant = Dict(plant => [] for plant in instance.plants)
|
||||
@@ -35,6 +37,8 @@ function build_graph(instance::Instance)::Graph
|
||||
push!(process_nodes, pn)
|
||||
push!(process_nodes_by_input_product[plant.input], pn)
|
||||
|
||||
name_to_process_node_map[(plant.plant_name, plant.location_name)] = pn
|
||||
|
||||
for product in keys(plant.output)
|
||||
sn = ShippingNode(next_index, plant, product, [], [])
|
||||
next_index += 1
|
||||
@@ -73,5 +77,24 @@ function build_graph(instance::Instance)::Graph
|
||||
end
|
||||
end
|
||||
|
||||
return Graph(process_nodes, plant_shipping_nodes, collection_shipping_nodes, arcs)
|
||||
return Graph(
|
||||
process_nodes,
|
||||
plant_shipping_nodes,
|
||||
collection_shipping_nodes,
|
||||
arcs,
|
||||
name_to_process_node_map,
|
||||
)
|
||||
end
|
||||
|
||||
|
||||
function print_graph_stats(instance::Instance, graph::Graph)::Nothing
|
||||
@info @sprintf(" %12d time periods", instance.time)
|
||||
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
|
||||
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
|
||||
@info @sprintf(
|
||||
" %12d shipping nodes (collection)",
|
||||
length(graph.collection_shipping_nodes)
|
||||
)
|
||||
@info @sprintf(" %12d arcs", length(graph.arcs))
|
||||
return
|
||||
end
|
||||
|
||||
@@ -32,6 +32,7 @@ mutable struct Graph
|
||||
plant_shipping_nodes::Vector{ShippingNode}
|
||||
collection_shipping_nodes::Vector{ShippingNode}
|
||||
arcs::Vector{Arc}
|
||||
name_to_process_node_map::Dict{Tuple{AbstractString,AbstractString},ProcessNode}
|
||||
end
|
||||
|
||||
function Base.show(io::IO, instance::Graph)
|
||||
|
||||
289
src/instance.jl
289
src/instance.jl
@@ -1,289 +0,0 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using DataStructures
|
||||
using JSON
|
||||
using JSONSchema
|
||||
using Printf
|
||||
using Statistics
|
||||
|
||||
|
||||
mutable struct Product
|
||||
name::String
|
||||
transportation_cost::Array{Float64}
|
||||
transportation_energy::Array{Float64}
|
||||
transportation_emissions::Dict{String, Array{Float64}}
|
||||
end
|
||||
|
||||
|
||||
mutable struct CollectionCenter
|
||||
index::Int64
|
||||
name::String
|
||||
latitude::Float64
|
||||
longitude::Float64
|
||||
product::Product
|
||||
amount::Array{Float64}
|
||||
end
|
||||
|
||||
|
||||
mutable struct PlantSize
|
||||
capacity::Float64
|
||||
variable_operating_cost::Array{Float64}
|
||||
fixed_operating_cost::Array{Float64}
|
||||
opening_cost::Array{Float64}
|
||||
end
|
||||
|
||||
|
||||
mutable struct Plant
|
||||
index::Int64
|
||||
plant_name::String
|
||||
location_name::String
|
||||
input::Product
|
||||
output::Dict{Product, Float64}
|
||||
latitude::Float64
|
||||
longitude::Float64
|
||||
disposal_limit::Dict{Product, Array{Float64}}
|
||||
disposal_cost::Dict{Product, Array{Float64}}
|
||||
sizes::Array{PlantSize}
|
||||
energy::Array{Float64}
|
||||
emissions::Dict{String, Array{Float64}}
|
||||
storage_limit::Float64
|
||||
storage_cost::Array{Float64}
|
||||
end
|
||||
|
||||
|
||||
mutable struct Instance
|
||||
time::Int64
|
||||
products::Array{Product, 1}
|
||||
collection_centers::Array{CollectionCenter, 1}
|
||||
plants::Array{Plant, 1}
|
||||
building_period::Array{Int64}
|
||||
end
|
||||
|
||||
|
||||
function Base.show(io::IO, instance::Instance)
|
||||
print(io, "RELOG instance with ")
|
||||
print(io, "$(length(instance.products)) products, ")
|
||||
print(io, "$(length(instance.collection_centers)) collection centers, ")
|
||||
print(io, "$(length(instance.plants)) plants")
|
||||
end
|
||||
|
||||
|
||||
function validate(json, schema)
|
||||
result = JSONSchema.validate(json, schema)
|
||||
if result !== nothing
|
||||
if result isa JSONSchema.SingleIssue
|
||||
path = join(result.path, " → ")
|
||||
if length(path) == 0
|
||||
path = "root"
|
||||
end
|
||||
msg = "$(result.msg) in $(path)"
|
||||
else
|
||||
msg = convert(String, result)
|
||||
end
|
||||
throw(msg)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
function parsefile(path::String)::Instance
|
||||
return RELOG.parse(JSON.parsefile(path))
|
||||
end
|
||||
|
||||
|
||||
function parse(json)::Instance
|
||||
basedir = dirname(@__FILE__)
|
||||
json_schema = JSON.parsefile("$basedir/schemas/input.json")
|
||||
validate(json, Schema(json_schema))
|
||||
|
||||
T = json["parameters"]["time horizon (years)"]
|
||||
json_schema["definitions"]["TimeSeries"]["minItems"] = T
|
||||
json_schema["definitions"]["TimeSeries"]["maxItems"] = T
|
||||
validate(json, Schema(json_schema))
|
||||
|
||||
building_period = [1]
|
||||
if "building period (years)" in keys(json)
|
||||
building_period = json["building period (years)"]
|
||||
end
|
||||
|
||||
plants = Plant[]
|
||||
products = Product[]
|
||||
collection_centers = CollectionCenter[]
|
||||
prod_name_to_product = Dict{String, Product}()
|
||||
|
||||
# Create products
|
||||
for (product_name, product_dict) in json["products"]
|
||||
cost = product_dict["transportation cost (\$/km/tonne)"]
|
||||
energy = zeros(T)
|
||||
emissions = Dict()
|
||||
|
||||
if "transportation energy (J/km/tonne)" in keys(product_dict)
|
||||
energy = product_dict["transportation energy (J/km/tonne)"]
|
||||
end
|
||||
|
||||
if "transportation emissions (tonne/km/tonne)" in keys(product_dict)
|
||||
emissions = product_dict["transportation emissions (tonne/km/tonne)"]
|
||||
end
|
||||
|
||||
product = Product(product_name, cost, energy, emissions)
|
||||
push!(products, product)
|
||||
prod_name_to_product[product_name] = product
|
||||
|
||||
# Create collection centers
|
||||
if "initial amounts" in keys(product_dict)
|
||||
for (center_name, center_dict) in product_dict["initial amounts"]
|
||||
center = CollectionCenter(length(collection_centers) + 1,
|
||||
center_name,
|
||||
center_dict["latitude (deg)"],
|
||||
center_dict["longitude (deg)"],
|
||||
product,
|
||||
center_dict["amount (tonne)"])
|
||||
push!(collection_centers, center)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Create plants
|
||||
for (plant_name, plant_dict) in json["plants"]
|
||||
input = prod_name_to_product[plant_dict["input"]]
|
||||
output = Dict()
|
||||
|
||||
# Plant outputs
|
||||
if "outputs (tonne/tonne)" in keys(plant_dict)
|
||||
output = Dict(prod_name_to_product[key] => value
|
||||
for (key, value) in plant_dict["outputs (tonne/tonne)"]
|
||||
if value > 0)
|
||||
end
|
||||
|
||||
energy = zeros(T)
|
||||
emissions = Dict()
|
||||
|
||||
if "energy (GJ/tonne)" in keys(plant_dict)
|
||||
energy = plant_dict["energy (GJ/tonne)"]
|
||||
end
|
||||
|
||||
if "emissions (tonne/tonne)" in keys(plant_dict)
|
||||
emissions = plant_dict["emissions (tonne/tonne)"]
|
||||
end
|
||||
|
||||
for (location_name, location_dict) in plant_dict["locations"]
|
||||
sizes = PlantSize[]
|
||||
disposal_limit = Dict(p => [0.0 for t in 1:T] for p in keys(output))
|
||||
disposal_cost = Dict(p => [0.0 for t in 1:T] for p in keys(output))
|
||||
|
||||
# Disposal
|
||||
if "disposal" in keys(location_dict)
|
||||
for (product_name, disposal_dict) in location_dict["disposal"]
|
||||
limit = [1e8 for t in 1:T]
|
||||
if "limit (tonne)" in keys(disposal_dict)
|
||||
limit = disposal_dict["limit (tonne)"]
|
||||
end
|
||||
disposal_limit[prod_name_to_product[product_name]] = limit
|
||||
disposal_cost[prod_name_to_product[product_name]] = disposal_dict["cost (\$/tonne)"]
|
||||
end
|
||||
end
|
||||
|
||||
# Capacities
|
||||
for (capacity_name, capacity_dict) in location_dict["capacities (tonne)"]
|
||||
push!(sizes, PlantSize(Base.parse(Float64, capacity_name),
|
||||
capacity_dict["variable operating cost (\$/tonne)"],
|
||||
capacity_dict["fixed operating cost (\$)"],
|
||||
capacity_dict["opening cost (\$)"]))
|
||||
end
|
||||
length(sizes) > 1 || push!(sizes, sizes[1])
|
||||
sort!(sizes, by = x -> x.capacity)
|
||||
|
||||
# Storage
|
||||
storage_limit = 0
|
||||
storage_cost = zeros(T)
|
||||
if "storage" in keys(location_dict)
|
||||
storage_dict = location_dict["storage"]
|
||||
storage_limit = storage_dict["limit (tonne)"]
|
||||
storage_cost = storage_dict["cost (\$/tonne)"]
|
||||
end
|
||||
|
||||
# Validation: Capacities
|
||||
if length(sizes) != 2
|
||||
throw("At most two capacities are supported")
|
||||
end
|
||||
if sizes[1].variable_operating_cost != sizes[2].variable_operating_cost
|
||||
throw("Variable operating costs must be the same for all capacities")
|
||||
end
|
||||
|
||||
plant = Plant(length(plants) + 1,
|
||||
plant_name,
|
||||
location_name,
|
||||
input,
|
||||
output,
|
||||
location_dict["latitude (deg)"],
|
||||
location_dict["longitude (deg)"],
|
||||
disposal_limit,
|
||||
disposal_cost,
|
||||
sizes,
|
||||
energy,
|
||||
emissions,
|
||||
storage_limit,
|
||||
storage_cost)
|
||||
|
||||
push!(plants, plant)
|
||||
end
|
||||
end
|
||||
|
||||
@info @sprintf("%12d collection centers", length(collection_centers))
|
||||
@info @sprintf("%12d candidate plant locations", length(plants))
|
||||
|
||||
return Instance(T, products, collection_centers, plants, building_period)
|
||||
end
|
||||
|
||||
|
||||
"""
|
||||
_compress(instance::Instance)
|
||||
|
||||
Create a single-period instance from a multi-period one. Specifically,
|
||||
replaces every time-dependent attribute, such as initial_amounts,
|
||||
by a list with a single element, which is either a sum, an average,
|
||||
or something else that makes sense to that specific attribute.
|
||||
"""
|
||||
function _compress(instance::Instance)::Instance
|
||||
T = instance.time
|
||||
compressed = deepcopy(instance)
|
||||
compressed.time = 1
|
||||
compressed.building_period = [1]
|
||||
|
||||
# Compress products
|
||||
for p in compressed.products
|
||||
p.transportation_cost = [mean(p.transportation_cost)]
|
||||
p.transportation_energy = [mean(p.transportation_energy)]
|
||||
for (emission_name, emission_value) in p.transportation_emissions
|
||||
p.transportation_emissions[emission_name] = [mean(emission_value)]
|
||||
end
|
||||
end
|
||||
|
||||
# Compress collection centers
|
||||
for c in compressed.collection_centers
|
||||
c.amount = [maximum(c.amount) * T]
|
||||
end
|
||||
|
||||
# Compress plants
|
||||
for plant in compressed.plants
|
||||
plant.energy = [mean(plant.energy)]
|
||||
for (emission_name, emission_value) in plant.emissions
|
||||
plant.emissions[emission_name] = [mean(emission_value)]
|
||||
end
|
||||
for s in plant.sizes
|
||||
s.capacity *= T
|
||||
s.variable_operating_cost = [mean(s.variable_operating_cost)]
|
||||
s.opening_cost = [s.opening_cost[1]]
|
||||
s.fixed_operating_cost = [sum(s.fixed_operating_cost)]
|
||||
end
|
||||
for (prod_name, disp_limit) in plant.disposal_limit
|
||||
plant.disposal_limit[prod_name] = [sum(disp_limit)]
|
||||
end
|
||||
for (prod_name, disp_cost) in plant.disposal_cost
|
||||
plant.disposal_cost[prod_name] = [mean(disp_cost)]
|
||||
end
|
||||
end
|
||||
|
||||
return compressed
|
||||
end
|
||||
@@ -4,7 +4,6 @@
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, Cbc, Clp, ProgressBars, Printf, DataStructures
|
||||
|
||||
|
||||
function build_model(instance::Instance, graph::Graph, optimizer)::JuMP.Model
|
||||
model = Model(optimizer)
|
||||
model[:instance] = instance
|
||||
|
||||
97
src/model/resolve.jl
Normal file
97
src/model/resolve.jl
Normal file
@@ -0,0 +1,97 @@
|
||||
# RELOG: Reverse Logistics Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP
|
||||
|
||||
function resolve(model_old, filename::AbstractString; kwargs...)::OrderedDict
|
||||
@info "Reading $filename..."
|
||||
instance = RELOG.parsefile(filename)
|
||||
return resolve(model_old, instance; kwargs...)
|
||||
end
|
||||
|
||||
function resolve(model_old, instance::Instance; optimizer = nothing)::OrderedDict
|
||||
milp_optimizer = lp_optimizer = optimizer
|
||||
if optimizer === nothing
|
||||
milp_optimizer = _get_default_milp_optimizer()
|
||||
lp_optimizer = _get_default_lp_optimizer()
|
||||
end
|
||||
|
||||
@info "Building new graph..."
|
||||
graph = build_graph(instance)
|
||||
_print_graph_stats(instance, graph)
|
||||
|
||||
@info "Building new optimization model..."
|
||||
model_new = RELOG.build_model(instance, graph, milp_optimizer)
|
||||
|
||||
@info "Fixing decision variables..."
|
||||
_fix_plants!(model_old, model_new)
|
||||
JuMP.set_optimizer(model_new, lp_optimizer)
|
||||
|
||||
@info "Optimizing MILP..."
|
||||
JuMP.optimize!(model_new)
|
||||
|
||||
if !has_values(model_new)
|
||||
@warn("No solution available")
|
||||
return OrderedDict()
|
||||
end
|
||||
|
||||
@info "Extracting solution..."
|
||||
solution = get_solution(model_new, marginal_costs = true)
|
||||
|
||||
return solution
|
||||
end
|
||||
|
||||
function _fix_plants!(model_old, model_new)::Nothing
|
||||
T = model_new[:instance].time
|
||||
|
||||
# Fix open_plant variables
|
||||
for ((node_old, t), var_old) in model_old[:open_plant]
|
||||
value_old = JuMP.value(var_old)
|
||||
node_new = model_new[:graph].name_to_process_node_map[(
|
||||
node_old.location.plant_name,
|
||||
node_old.location.location_name,
|
||||
)]
|
||||
var_new = model_new[:open_plant][node_new, t]
|
||||
JuMP.unset_binary(var_new)
|
||||
JuMP.fix(var_new, value_old)
|
||||
end
|
||||
|
||||
# Fix is_open variables
|
||||
for ((node_old, t), var_old) in model_old[:is_open]
|
||||
value_old = JuMP.value(var_old)
|
||||
node_new = model_new[:graph].name_to_process_node_map[(
|
||||
node_old.location.plant_name,
|
||||
node_old.location.location_name,
|
||||
)]
|
||||
var_new = model_new[:is_open][node_new, t]
|
||||
JuMP.unset_binary(var_new)
|
||||
JuMP.fix(var_new, value_old)
|
||||
end
|
||||
|
||||
# Fix plant capacities
|
||||
for ((node_old, t), var_old) in model_old[:capacity]
|
||||
value_old = JuMP.value(var_old)
|
||||
node_new = model_new[:graph].name_to_process_node_map[(
|
||||
node_old.location.plant_name,
|
||||
node_old.location.location_name,
|
||||
)]
|
||||
var_new = model_new[:capacity][node_new, t]
|
||||
JuMP.delete_lower_bound(var_new)
|
||||
JuMP.delete_upper_bound(var_new)
|
||||
JuMP.fix(var_new, value_old)
|
||||
end
|
||||
|
||||
# Fix plant expansion
|
||||
for ((node_old, t), var_old) in model_old[:expansion]
|
||||
value_old = JuMP.value(var_old)
|
||||
node_new = model_new[:graph].name_to_process_node_map[(
|
||||
node_old.location.plant_name,
|
||||
node_old.location.location_name,
|
||||
)]
|
||||
var_new = model_new[:expansion][node_new, t]
|
||||
JuMP.delete_lower_bound(var_new)
|
||||
JuMP.delete_upper_bound(var_new)
|
||||
JuMP.fix(var_new, value_old)
|
||||
end
|
||||
end
|
||||
@@ -4,24 +4,16 @@
|
||||
|
||||
using JuMP, LinearAlgebra, Geodesy, Cbc, Clp, ProgressBars, Printf, DataStructures
|
||||
|
||||
default_milp_optimizer = optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
|
||||
default_lp_optimizer = optimizer_with_attributes(Clp.Optimizer, "LogLevel" => 0)
|
||||
function _get_default_milp_optimizer()
|
||||
return optimizer_with_attributes(Cbc.Optimizer, "logLevel" => 0)
|
||||
end
|
||||
|
||||
function solve(
|
||||
instance::Instance;
|
||||
optimizer = nothing,
|
||||
output = nothing,
|
||||
marginal_costs = true,
|
||||
)
|
||||
function _get_default_lp_optimizer()
|
||||
return optimizer_with_attributes(Clp.Optimizer, "LogLevel" => 0)
|
||||
end
|
||||
|
||||
milp_optimizer = lp_optimizer = optimizer
|
||||
if optimizer == nothing
|
||||
milp_optimizer = default_milp_optimizer
|
||||
lp_optimizer = default_lp_optimizer
|
||||
end
|
||||
|
||||
@info "Building graph..."
|
||||
graph = RELOG.build_graph(instance)
|
||||
function _print_graph_stats(instance::Instance, graph::Graph)::Nothing
|
||||
@info @sprintf(" %12d time periods", instance.time)
|
||||
@info @sprintf(" %12d process nodes", length(graph.process_nodes))
|
||||
@info @sprintf(" %12d shipping nodes (plant)", length(graph.plant_shipping_nodes))
|
||||
@@ -30,6 +22,26 @@ function solve(
|
||||
length(graph.collection_shipping_nodes)
|
||||
)
|
||||
@info @sprintf(" %12d arcs", length(graph.arcs))
|
||||
return
|
||||
end
|
||||
|
||||
function solve(
|
||||
instance::Instance;
|
||||
optimizer = nothing,
|
||||
output = nothing,
|
||||
marginal_costs = true,
|
||||
return_model = false,
|
||||
)
|
||||
|
||||
milp_optimizer = lp_optimizer = optimizer
|
||||
if optimizer == nothing
|
||||
milp_optimizer = _get_default_milp_optimizer()
|
||||
lp_optimizer = _get_default_lp_optimizer()
|
||||
end
|
||||
|
||||
@info "Building graph..."
|
||||
graph = RELOG.build_graph(instance)
|
||||
_print_graph_stats(instance, graph)
|
||||
|
||||
@info "Building optimization model..."
|
||||
model = RELOG.build_model(instance, graph, milp_optimizer)
|
||||
@@ -62,7 +74,11 @@ function solve(
|
||||
write(solution, output)
|
||||
end
|
||||
|
||||
if return_model
|
||||
return solution, model
|
||||
else
|
||||
return solution
|
||||
end
|
||||
end
|
||||
|
||||
function solve(filename::AbstractString; heuristic = false, kwargs...)
|
||||
|
||||
11
test/model/resolve_test.jl
Normal file
11
test/model/resolve_test.jl
Normal file
@@ -0,0 +1,11 @@
|
||||
# Copyright (C) 2020 Argonne National Laboratory
|
||||
# Written by Alinson Santos Xavier <axavier@anl.gov>
|
||||
|
||||
using RELOG
|
||||
|
||||
@testset "Resolve" begin
|
||||
# Shoud not crash
|
||||
filename = "$(pwd())/../instances/s1.json"
|
||||
solution_old, model_old = RELOG.solve(filename, return_model = true)
|
||||
solution_new = RELOG.resolve(model_old, filename)
|
||||
end
|
||||
@@ -15,6 +15,7 @@ using Test
|
||||
@testset "Model" begin
|
||||
include("model/build_test.jl")
|
||||
include("model/solve_test.jl")
|
||||
include("model/resolve_test.jl")
|
||||
end
|
||||
include("reports_test.jl")
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user