18 Commits

Author SHA1 Message Date
Kavitha G Menon
2cf97d6bee Capacity expansion model
Add the model file with capacity expansion constraints and subsequent modifications
2024-08-09 15:59:15 -05:00
9e0f8c5796 solve: Allow custom graph 2023-07-27 10:38:53 -05:00
5693ef2aa2 Reformat source code 2023-07-26 10:25:11 -05:00
bc05b49222 Make resolve compatible with solve(heuristic=true) 2023-07-26 10:17:37 -05:00
3e54e767c4 Fix failing test 2023-07-26 10:00:07 -05:00
84bd25b04d Fix: Remove disposal when deleting product 2023-07-07 10:22:57 -05:00
c86dda12cd Make marginal costs optional in write_reports 2023-07-07 10:20:25 -05:00
f3a2d1d616 Fix bug in _compress when plants have fixed size
Capacity was being incorrectly multiplied by T twice. This
happened because there were two references to the same struct
in the plant sizes array. This fix replaces the second reference
by an actual copy of the struct.
2023-07-07 10:05:31 -05:00
029a47a64b compress: Update disposal/acquisition limits/costs 2023-05-16 15:29:08 -05:00
de27a6202d Bump version to 0.7.2 2023-03-10 16:27:32 -06:00
7d4a763910 Fix issue with collection disposal; increase precision in CSV reports 2023-03-10 14:20:21 -06:00
8432c49050 Add .zenodo.json 2023-03-08 10:17:18 -06:00
2d860326fe Bump version to 0.7.1 2023-03-08 10:01:45 -06:00
be37934b87 Web: Do not use heuristics 2023-03-08 09:44:44 -06:00
3c354ec3e4 Add write_reports function 2023-03-08 09:44:27 -06:00
f5a92358d7 Formulation: If plant is closed, storage cannot be used 2023-03-08 09:44:08 -06:00
69f205be77 Formulation: Prevent plants from sending products to themselves 2023-03-08 09:42:53 -06:00
3b3ecbde27 Web: Fix parsing of disposal limit 2023-03-08 09:42:02 -06:00
23 changed files with 1867 additions and 57 deletions

28
.zenodo.json Normal file
View File

@@ -0,0 +1,28 @@
{
"creators": [
{
"orcid": "0000-0002-5022-9802",
"affiliation": "Argonne National Laboratory",
"name": "Santos Xavier, Alinson"
},
{
"orcid": "0000-0002-3426-9425",
"affiliation": "Argonne National Laboratory",
"name": "Iloeje, Chukwunwike"
},
{
"affiliation": "Argonne National Laboratory",
"name": "Atkins, John"
},
{
"affiliation": "Argonne National Laboratory",
"name": "Sun, Kyle"
},
{
"affiliation": "Argonne National Laboratory",
"name": "Gallier, Audrey"
}
],
"title": "RELOG: Reverse Logistics Optimization",
"description": "<b>RELOG</b> is a supply chain optimization package focusing on reverse logistics and reverse manufacturing. For example, the package can be used to determine where to build recycling plants, what sizes should they have and which customers should be served by which plants. The package supports customized reverse logistics pipelines, with multiple types of plants, multiple types of product and multiple time periods."
}

View File

@@ -11,6 +11,29 @@ All notable changes to this project will be documented in this file.
[semver]: https://semver.org/spec/v2.0.0.html
[pkjjl]: https://pkgdocs.julialang.org/v1/compatibility/#compat-pre-1.0
## [0.7.2] -- 2023-03-10
### Fixed
- Core: Fixed modeling issue with collection disposal
- Core: Fix column names in products CSV file
## [0.7.1] -- 2023-03-08
### Added
- Core: Add `write_reports` function
### Changed
- Web UI: Disable usage of heuristic method
### Fixed
- Core: Prevent plants from sending products to themselves
- Core: Enforce constraint that, if plant is closed, storage cannot be used
- Web UI: Fix parsing bug in disposal limit
## [0.7.0] -- 2023-02-23
### Added

View File

@@ -1,7 +1,7 @@
name = "RELOG"
uuid = "a2afcdf7-cf04-4913-85f9-c0d81ddf2008"
authors = ["Alinson S Xavier <axavier@anl.gov>"]
version = "0.7.0"
version = "0.7.2"
[deps]
CRC = "44b605c4-b955-5f2b-9b6d-d2bd01d3d205"

1699
model-3-CapEx.jl Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -207,6 +207,8 @@ const InputPage = () => {
}
if (outputFound) {
delete plant["outputs (tonne/tonne)"][productName];
delete plant["disposal cost ($/tonne)"][productName];
delete plant["disposal limit (tonne)"][productName];
}
}
save(newData);
@@ -235,7 +237,7 @@ const InputPage = () => {
"disposal limit (tonne)",
].forEach((key) => {
newData.plants[plantName][key] = { ...newData.plants[plantName][key] };
newData.plants[plantName][key][productName] = 0;
newData.plants[plantName][key][productName] = "0";
});
save(newData);
return newData;

View File

@@ -277,7 +277,7 @@ export const exportPlant = (original, parameters) => {
const v = exportValueAcf(dispCost, origDict);
if (v) {
resDict.disposal[dispName] = { "cost ($/tonne)": v };
const limit = original["disposal limit (tonne)"][dispName];
const limit = String(original["disposal limit (tonne)"][dispName]);
if (limit.length > 0) {
resDict.disposal[dispName]["limit (tonne)"] = exportValue(
limit,

View File

@@ -213,7 +213,7 @@ const samplePlantsOriginal = [
},
"disposal limit (tonne)": {
"Hydrogen gas": "10",
"Carbon dioxide": "",
"Carbon dioxide": 0,
Tar: "",
},
"emissions (tonne/tonne)": {
@@ -406,6 +406,7 @@ const samplePlantsExported = [
},
"Carbon dioxide": {
"cost ($/tonne)": [0, 0, 0],
"limit (tonne)": [0, 0, 0],
},
Tar: {
"cost ($/tonne)": [200, 400, 800],
@@ -439,6 +440,7 @@ const samplePlantsExported = [
},
"Carbon dioxide": {
"cost ($/tonne)": [0, 0, 0],
"limit (tonne)": [0, 0, 0],
},
Tar: {
"cost ($/tonne)": [100, 200.0, 400],

View File

@@ -44,6 +44,7 @@ function build_graph(instance::Instance)::Graph
# Build arcs from collection centers to plants, and from one plant to another
for source in [collection_shipping_nodes; plant_shipping_nodes]
for dest in process_nodes_by_input_product[source.product]
source.location != dest.location || continue
distance = _calculate_distance(
source.location.latitude,
source.location.longitude,

View File

@@ -24,6 +24,9 @@ function _compress(instance::Instance)::Instance
# Compress products
for p in compressed.products
p.acquisition_cost = [mean(p.acquisition_cost)]
p.disposal_cost = [mean(p.disposal_cost)]
p.disposal_limit = [sum(p.disposal_limit)]
p.transportation_cost = [mean(p.transportation_cost)]
p.transportation_energy = [mean(p.transportation_energy)]
for (emission_name, emission_value) in p.transportation_emissions

View File

@@ -171,7 +171,7 @@ function parse(json)::Instance
),
)
end
length(sizes) > 1 || push!(sizes, sizes[1])
length(sizes) > 1 || push!(sizes, deepcopy(sizes[1]))
sort!(sizes, by = x -> x.capacity)
# Initial capacity

View File

@@ -184,8 +184,8 @@ function create_shipping_node_constraints!(model::JuMP.Model)
for n in graph.collection_shipping_nodes
model[:eq_balance][n, t] = @constraint(
model,
sum(model[:flow][a, t] for a in n.outgoing_arcs) ==
n.location.amount[t] + model[:collection_dispose][n, t]
sum(model[:flow][a, t] for a in n.outgoing_arcs) +
model[:collection_dispose][n, t] == n.location.amount[t]
)
end
for prod in model[:instance].products
@@ -237,6 +237,12 @@ function create_process_node_constraints!(model::JuMP.Model)
model[:capacity][n, t] <= n.location.sizes[2].capacity * model[:is_open][n, t]
)
# If plant is closed, storage cannot be used
@constraint(
model,
model[:store][n, t] <= n.location.storage_limit * model[:is_open][n, t]
)
# If plant is open, capacity is greater than base
@constraint(
model,

View File

@@ -17,6 +17,24 @@ function resolve(model_old, instance::Instance; optimizer = nothing)::OrderedDic
lp_optimizer = _get_default_lp_optimizer()
end
@info "Filtering candidate locations..."
selected_pairs = Set()
for ((node_old, t), var_old) in model_old[:is_open]
if JuMP.value(var_old) > 0.1
push!(
selected_pairs,
(node_old.location.plant_name, node_old.location.location_name),
)
end
end
filtered_plants = []
for p in instance.plants
if (p.plant_name, p.location_name) in selected_pairs
push!(filtered_plants, p)
end
end
instance.plants = filtered_plants
@info "Building new graph..."
graph = build_graph(instance)
_print_graph_stats(instance, graph)
@@ -48,10 +66,9 @@ function _fix_plants!(model_old, model_new)::Nothing
# Fix open_plant variables
for ((node_old, t), var_old) in model_old[:open_plant]
value_old = JuMP.value(var_old)
node_new = model_new[:graph].name_to_process_node_map[(
node_old.location.plant_name,
node_old.location.location_name,
)]
key = (node_old.location.plant_name, node_old.location.location_name)
key keys(model_new[:graph].name_to_process_node_map) || continue
node_new = model_new[:graph].name_to_process_node_map[key]
var_new = model_new[:open_plant][node_new, t]
JuMP.unset_binary(var_new)
JuMP.fix(var_new, value_old)
@@ -61,10 +78,9 @@ function _fix_plants!(model_old, model_new)::Nothing
for ((node_old, t), var_old) in model_old[:is_open]
t > 0 || continue
value_old = JuMP.value(var_old)
node_new = model_new[:graph].name_to_process_node_map[(
node_old.location.plant_name,
node_old.location.location_name,
)]
key = (node_old.location.plant_name, node_old.location.location_name)
key keys(model_new[:graph].name_to_process_node_map) || continue
node_new = model_new[:graph].name_to_process_node_map[key]
var_new = model_new[:is_open][node_new, t]
JuMP.unset_binary(var_new)
JuMP.fix(var_new, value_old)
@@ -73,10 +89,9 @@ function _fix_plants!(model_old, model_new)::Nothing
# Fix plant capacities
for ((node_old, t), var_old) in model_old[:capacity]
value_old = JuMP.value(var_old)
node_new = model_new[:graph].name_to_process_node_map[(
node_old.location.plant_name,
node_old.location.location_name,
)]
key = (node_old.location.plant_name, node_old.location.location_name)
key keys(model_new[:graph].name_to_process_node_map) || continue
node_new = model_new[:graph].name_to_process_node_map[key]
var_new = model_new[:capacity][node_new, t]
JuMP.delete_lower_bound(var_new)
JuMP.delete_upper_bound(var_new)
@@ -87,10 +102,9 @@ function _fix_plants!(model_old, model_new)::Nothing
for ((node_old, t), var_old) in model_old[:expansion]
t > 0 || continue
value_old = JuMP.value(var_old)
node_new = model_new[:graph].name_to_process_node_map[(
node_old.location.plant_name,
node_old.location.location_name,
)]
key = (node_old.location.plant_name, node_old.location.location_name)
key keys(model_new[:graph].name_to_process_node_map) || continue
node_new = model_new[:graph].name_to_process_node_map[key]
var_new = model_new[:expansion][node_new, t]
JuMP.delete_lower_bound(var_new)
JuMP.delete_upper_bound(var_new)

View File

@@ -32,6 +32,7 @@ function solve(
output = nothing,
marginal_costs = true,
return_model = false,
graph = nothing,
)
if lp_optimizer == nothing
@@ -51,7 +52,9 @@ function solve(
@info "Building graph..."
graph = RELOG.build_graph(instance)
if graph === nothing
graph = RELOG.build_graph(instance)
end
_print_graph_stats(instance, graph)
@info "Building optimization model..."

View File

@@ -24,7 +24,7 @@ function plant_emissions_report(solution)::DataFrame
location_name,
year,
emission_name,
round(emission_amount[year], digits = 2),
round(emission_amount[year], digits = 6),
],
)
end

View File

@@ -30,7 +30,7 @@ function plant_outputs_report(solution)::DataFrame
end
end
end
sent = round.(sent, digits = 2)
sent = round.(sent, digits = 6)
disposal_amount = zeros(T)
disposal_cost = zeros(T)
@@ -38,8 +38,8 @@ function plant_outputs_report(solution)::DataFrame
disposal_amount += disposal_dict[product_name]["Amount (tonne)"]
disposal_cost += disposal_dict[product_name]["Cost (\$)"]
end
disposal_amount = round.(disposal_amount, digits = 2)
disposal_cost = round.(disposal_cost, digits = 2)
disposal_amount = round.(disposal_amount, digits = 6)
disposal_cost = round.(disposal_cost, digits = 6)
for year = 1:T
push!(
@@ -49,7 +49,7 @@ function plant_outputs_report(solution)::DataFrame
location_name,
year,
product_name,
round(amount_produced[year], digits = 2),
round(amount_produced[year], digits = 6),
sent[year],
disposal_amount[year],
disposal_cost[year],

View File

@@ -28,25 +28,25 @@ function plants_report(solution)::DataFrame
for (plant_name, plant_dict) in solution["Plants"]
for (location_name, location_dict) in plant_dict
for year = 1:T
capacity = round(location_dict["Capacity (tonne)"][year], digits = 2)
received = round(location_dict["Total input (tonne)"][year], digits = 2)
processed = round(location_dict["Process (tonne)"][year], digits = 2)
in_storage = round(location_dict["Storage (tonne)"][year], digits = 2)
utilization_factor = round(processed / capacity * 100.0, digits = 2)
energy = round(location_dict["Energy (GJ)"][year], digits = 2)
capacity = round(location_dict["Capacity (tonne)"][year], digits = 6)
received = round(location_dict["Total input (tonne)"][year], digits = 6)
processed = round(location_dict["Process (tonne)"][year], digits = 6)
in_storage = round(location_dict["Storage (tonne)"][year], digits = 6)
utilization_factor = round(processed / capacity * 100.0, digits = 6)
energy = round(location_dict["Energy (GJ)"][year], digits = 6)
latitude = round(location_dict["Latitude (deg)"], digits = 6)
longitude = round(location_dict["Longitude (deg)"], digits = 6)
opening_cost = round(location_dict["Opening cost (\$)"][year], digits = 2)
opening_cost = round(location_dict["Opening cost (\$)"][year], digits = 6)
expansion_cost =
round(location_dict["Expansion cost (\$)"][year], digits = 2)
round(location_dict["Expansion cost (\$)"][year], digits = 6)
fixed_cost =
round(location_dict["Fixed operating cost (\$)"][year], digits = 2)
round(location_dict["Fixed operating cost (\$)"][year], digits = 6)
var_cost =
round(location_dict["Variable operating cost (\$)"][year], digits = 2)
storage_cost = round(location_dict["Storage cost (\$)"][year], digits = 2)
round(location_dict["Variable operating cost (\$)"][year], digits = 6)
storage_cost = round(location_dict["Storage cost (\$)"][year], digits = 6)
total_cost = round(
opening_cost + expansion_cost + fixed_cost + var_cost + storage_cost,
digits = 2,
digits = 6,
)
push!(
df,

View File

@@ -5,7 +5,7 @@
using DataFrames
using CSV
function products_report(solution; marginal_costs = true)::DataFrame
function products_report(solution; marginal_costs)::DataFrame
df = DataFrame()
df."product name" = String[]
df."location name" = String[]
@@ -21,7 +21,11 @@ function products_report(solution; marginal_costs = true)::DataFrame
for (prod_name, prod_dict) in solution["Products"]
for (location_name, location_dict) in prod_dict
for year = 1:T
marginal_cost = location_dict["Marginal cost (\$/tonne)"][year]
if marginal_costs
marginal_cost = location_dict["Marginal cost (\$/tonne)"][year]
else
marginal_cost = 0.0
end
latitude = round(location_dict["Latitude (deg)"], digits = 6)
longitude = round(location_dict["Longitude (deg)"], digits = 6)
amount = location_dict["Amount (tonne)"][year]
@@ -37,8 +41,8 @@ function products_report(solution; marginal_costs = true)::DataFrame
longitude,
year,
amount,
marginal_cost,
amount_disposed,
marginal_cost,
acquisition_cost,
disposal_cost,
],
@@ -49,4 +53,5 @@ function products_report(solution; marginal_costs = true)::DataFrame
return df
end
write_products_report(solution, filename) = CSV.write(filename, products_report(solution))
write_products_report(solution, filename; marginal_costs = true) =
CSV.write(filename, products_report(solution; marginal_costs))

View File

@@ -42,24 +42,24 @@ function transportation_report(solution)::DataFrame
round(dst_location_dict["Longitude (deg)"], digits = 6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits = 2),
round(src_location_dict["Distance (km)"], digits = 6),
round(
src_location_dict["Amount (tonne)"][year],
digits = 2,
digits = 6,
),
round(
src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits = 2,
digits = 6,
),
round(
src_location_dict["Transportation cost (\$)"][year],
digits = 2,
digits = 6,
),
round(
src_location_dict["Transportation energy (J)"][year] /
1e9,
digits = 2,
digits = 6,
),
],
)

View File

@@ -44,18 +44,18 @@ function transportation_emissions_report(solution)::DataFrame
round(dst_location_dict["Longitude (deg)"], digits = 6),
dst_location_dict["Input product"],
year,
round(src_location_dict["Distance (km)"], digits = 2),
round(src_location_dict["Distance (km)"], digits = 6),
round(
src_location_dict["Amount (tonne)"][year],
digits = 2,
digits = 6,
),
round(
src_location_dict["Amount (tonne)"][year] *
src_location_dict["Distance (km)"],
digits = 2,
digits = 6,
),
emission_name,
round(emission_amount[year], digits = 2),
round(emission_amount[year], digits = 6),
],
)
end

View File

@@ -12,3 +12,17 @@ function write(solution::AbstractDict, filename::AbstractString)
JSON.print(file, solution, 2)
end
end
function write_reports(
solution::AbstractDict,
basename::AbstractString;
marginal_costs = true,
)
RELOG.write_products_report(solution, "$(basename)_products.csv"; marginal_costs)
RELOG.write_plants_report(solution, "$(basename)_plants.csv")
RELOG.write_plant_outputs_report(solution, "$(basename)_plant_outputs.csv")
RELOG.write_plant_emissions_report(solution, "$(basename)_plant_emissions.csv")
RELOG.write_transportation_report(solution, "$(basename)_tr.csv")
RELOG.write_transportation_emissions_report(solution, "$(basename)_tr_emissions.csv")
return
end

View File

@@ -14,7 +14,6 @@ function solve(root, filename)
)
ref_solution, ref_model = RELOG.solve(
ref_file,
heuristic = true,
optimizer = optimizer,
lp_optimizer = HiGHS.Optimizer,
return_model = true,

View File

@@ -72,7 +72,10 @@ function instance_parse_test()
@test plant.sizes[1].opening_cost == [3000, 3000]
@test plant.sizes[1].fixed_operating_cost == [50, 50]
@test plant.sizes[1].variable_operating_cost == [50, 50]
@test plant.sizes[1] == plant.sizes[2]
@test plant.sizes[2].capacity == 1000.0
@test plant.sizes[2].opening_cost == [3000, 3000]
@test plant.sizes[2].fixed_operating_cost == [50, 50]
@test plant.sizes[2].variable_operating_cost == [50, 50]
p4 = product_name_to_product["P4"]
@test plant.output[p3] == 0.05

View File

@@ -4,10 +4,18 @@
using RELOG
function model_resolve_test()
@testset "Resolve" begin
@testset "Resolve (exact)" begin
# Shoud not crash
filename = fixture("s1.json")
solution_old, model_old = RELOG.solve(filename, return_model = true)
solution_new = RELOG.resolve(model_old, filename)
end
@testset "Resolve (heuristic)" begin
# Shoud not crash
filename = fixture("s1.json")
solution_old, model_old =
RELOG.solve(filename, return_model = true, heuristic = true)
solution_new = RELOG.resolve(model_old, filename)
end
end