Compare commits
3 Commits
54b5b9dd7f
...
35dd5ab1a9
| Author | SHA1 | Date | |
|---|---|---|---|
| 35dd5ab1a9 | |||
| 5c7b8038a1 | |||
| c2d5e58c75 |
1
web/backend/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
jobs
|
||||||
17
web/backend/Project.toml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
name = "Backend"
|
||||||
|
uuid = "948642ed-e3f9-4642-9296-0f1eaf40c938"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Alinson S. Xavier <git@axavier.org>"]
|
||||||
|
|
||||||
|
[deps]
|
||||||
|
CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193"
|
||||||
|
HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3"
|
||||||
|
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
|
||||||
|
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
|
||||||
|
UnitCommitment = "64606440-39ea-11e9-0f29-3303a1d3d877"
|
||||||
|
|
||||||
|
[compat]
|
||||||
|
CodecZlib = "0.7.8"
|
||||||
|
HTTP = "1.10.19"
|
||||||
|
JSON = "0.21.4"
|
||||||
|
Random = "1.11.0"
|
||||||
12
web/backend/src/Backend.jl
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
|
||||||
|
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
module Backend
|
||||||
|
|
||||||
|
basedir = joinpath(dirname(@__FILE__), "..")
|
||||||
|
|
||||||
|
include("jobs.jl")
|
||||||
|
include("server.jl")
|
||||||
|
|
||||||
|
end
|
||||||
99
web/backend/src/jobs.jl
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
|
||||||
|
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
using UnitCommitment
|
||||||
|
|
||||||
|
import Base: put!
|
||||||
|
|
||||||
|
Base.@kwdef mutable struct JobProcessor
|
||||||
|
pending::Channel{String} = Channel{String}(Inf)
|
||||||
|
processing::Channel{String} = Channel{String}(Inf)
|
||||||
|
shutdown::Channel{Bool} = Channel{Bool}(1)
|
||||||
|
worker_task::Union{Task,Nothing} = nothing
|
||||||
|
optimizer = nothing
|
||||||
|
end
|
||||||
|
|
||||||
|
function Base.put!(processor::JobProcessor, job_id::String)
|
||||||
|
@info "New job received: $job_id"
|
||||||
|
return put!(processor.pending, job_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
function isbusy(processor::JobProcessor)
|
||||||
|
return isready(processor.pending) || isready(processor.processing)
|
||||||
|
end
|
||||||
|
|
||||||
|
function run!(processor::JobProcessor)
|
||||||
|
while true
|
||||||
|
# Check for shutdown signal
|
||||||
|
if isready(processor.shutdown)
|
||||||
|
break
|
||||||
|
end
|
||||||
|
|
||||||
|
# Wait for a job with timeout
|
||||||
|
if !isready(processor.pending)
|
||||||
|
sleep(0.1)
|
||||||
|
continue
|
||||||
|
end
|
||||||
|
|
||||||
|
# Move job from pending to processing queue
|
||||||
|
job_id = take!(processor.pending)
|
||||||
|
@info "Processing job: $job_id"
|
||||||
|
job_dir = joinpath(basedir, "jobs", job_id)
|
||||||
|
log_path = joinpath(job_dir, "output.log")
|
||||||
|
put!(processor.processing, job_id)
|
||||||
|
|
||||||
|
# Run optimization
|
||||||
|
try
|
||||||
|
open(log_path, "w") do io
|
||||||
|
redirect_stdout(io) do
|
||||||
|
redirect_stderr(io) do
|
||||||
|
json_path = joinpath(job_dir, "input.json.gz")
|
||||||
|
instance = UnitCommitment.read(json_path)
|
||||||
|
model = UnitCommitment.build_model(;
|
||||||
|
instance,
|
||||||
|
optimizer = processor.optimizer,
|
||||||
|
)
|
||||||
|
UnitCommitment.optimize!(model)
|
||||||
|
solution = UnitCommitment.solution(model)
|
||||||
|
return UnitCommitment.write(
|
||||||
|
joinpath(job_dir, "output.json"),
|
||||||
|
solution,
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Remove job from processing queue
|
||||||
|
take!(processor.processing)
|
||||||
|
catch e
|
||||||
|
open(log_path, "a") do io
|
||||||
|
println(io, "\nError: ", e)
|
||||||
|
println(io, "\nStacktrace:")
|
||||||
|
return Base.show_backtrace(io, catch_backtrace())
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function start(processor::JobProcessor)
|
||||||
|
processor.worker_task = @async run!(processor)
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
function stop(processor::JobProcessor)
|
||||||
|
# Signal worker to stop
|
||||||
|
put!(processor.shutdown, true)
|
||||||
|
|
||||||
|
# Wait for worker to finish
|
||||||
|
if processor.worker_task !== nothing
|
||||||
|
try
|
||||||
|
wait(processor.worker_task)
|
||||||
|
catch
|
||||||
|
# Worker may have already exited
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
export JobProcessor, start, stop, put!, isbusy
|
||||||
73
web/backend/src/server.jl
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
using HTTP
|
||||||
|
using Random
|
||||||
|
using JSON
|
||||||
|
using CodecZlib
|
||||||
|
using UnitCommitment
|
||||||
|
|
||||||
|
struct ServerHandle
|
||||||
|
server::HTTP.Server
|
||||||
|
processor::JobProcessor
|
||||||
|
end
|
||||||
|
|
||||||
|
function submit(req, processor::JobProcessor)
|
||||||
|
# Check if request body is empty
|
||||||
|
compressed_body = HTTP.payload(req)
|
||||||
|
if isempty(compressed_body)
|
||||||
|
return HTTP.Response(400, "Error: No file provided")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Validate compressed JSON by decompressing and parsing
|
||||||
|
try
|
||||||
|
decompressed_data = transcode(GzipDecompressor, compressed_body)
|
||||||
|
JSON.parse(String(decompressed_data))
|
||||||
|
catch e
|
||||||
|
return HTTP.Response(400, "Error: Invalid compressed JSON")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Generate random job ID (lowercase letters and numbers)
|
||||||
|
job_id = randstring(['a':'z'; '0':'9'], 16)
|
||||||
|
|
||||||
|
# Create job directory
|
||||||
|
job_dir = joinpath(basedir, "jobs", job_id)
|
||||||
|
mkpath(job_dir)
|
||||||
|
|
||||||
|
# Save input file
|
||||||
|
json_path = joinpath(job_dir, "input.json.gz")
|
||||||
|
write(json_path, compressed_body)
|
||||||
|
|
||||||
|
# Add job to queue
|
||||||
|
put!(processor, job_id)
|
||||||
|
|
||||||
|
# Return job ID as JSON
|
||||||
|
response_body = JSON.json(Dict("job_id" => job_id))
|
||||||
|
return HTTP.Response(200, response_body)
|
||||||
|
end
|
||||||
|
|
||||||
|
function jobs_view(req)
|
||||||
|
return HTTP.Response(200, "OK")
|
||||||
|
end
|
||||||
|
|
||||||
|
function start_server(port::Int = 8080; optimizer)
|
||||||
|
Random.seed!()
|
||||||
|
|
||||||
|
# Create and start job processor
|
||||||
|
processor = JobProcessor(optimizer = optimizer)
|
||||||
|
start(processor)
|
||||||
|
|
||||||
|
router = HTTP.Router()
|
||||||
|
|
||||||
|
# Register /submit endpoint
|
||||||
|
HTTP.register!(router, "POST", "/submit", req -> submit(req, processor))
|
||||||
|
|
||||||
|
# Register job/*/view endpoint
|
||||||
|
HTTP.register!(router, "GET", "/jobs/*/view", jobs_view)
|
||||||
|
|
||||||
|
server = HTTP.serve!(router, port; verbose = false)
|
||||||
|
return ServerHandle(server, processor)
|
||||||
|
end
|
||||||
|
|
||||||
|
function stop(handle::ServerHandle)
|
||||||
|
stop(handle.processor)
|
||||||
|
close(handle.server)
|
||||||
|
return nothing
|
||||||
|
end
|
||||||
23
web/backend/test/Project.toml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name = "BackendT"
|
||||||
|
uuid = "27da795e-16fd-43bd-a2ba-f77bdecaf977"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Alinson S. Xavier <git@axavier.org>"]
|
||||||
|
|
||||||
|
[deps]
|
||||||
|
Backend = "948642ed-e3f9-4642-9296-0f1eaf40c938"
|
||||||
|
CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193"
|
||||||
|
HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3"
|
||||||
|
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
|
||||||
|
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
|
||||||
|
JuliaFormatter = "98e50ef6-434e-11e9-1051-2b60c6c9e899"
|
||||||
|
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
|
||||||
|
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
|
||||||
|
|
||||||
|
[compat]
|
||||||
|
CodecZlib = "0.7.8"
|
||||||
|
HTTP = "1.10.19"
|
||||||
|
HiGHS = "1.20.1"
|
||||||
|
JSON = "0.21.4"
|
||||||
|
JuliaFormatter = "2.2.0"
|
||||||
|
Revise = "3.12.0"
|
||||||
|
Test = "1.11.0"
|
||||||
40
web/backend/test/src/BackendT.jl
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
|
||||||
|
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
module BackendT
|
||||||
|
|
||||||
|
using Test
|
||||||
|
using HTTP
|
||||||
|
using JSON
|
||||||
|
using CodecZlib
|
||||||
|
import Backend
|
||||||
|
import JuliaFormatter
|
||||||
|
using HiGHS
|
||||||
|
|
||||||
|
BASEDIR = dirname(@__FILE__)
|
||||||
|
|
||||||
|
include("jobs_test.jl")
|
||||||
|
include("server_test.jl")
|
||||||
|
|
||||||
|
function fixture(path::String)::String
|
||||||
|
return "$BASEDIR/../fixtures/$path"
|
||||||
|
end
|
||||||
|
|
||||||
|
function runtests()
|
||||||
|
@testset "UCJL Backend" begin
|
||||||
|
server_test_usage()
|
||||||
|
# jobs_test_usage()
|
||||||
|
end
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
function format()
|
||||||
|
JuliaFormatter.format(BASEDIR, verbose = true)
|
||||||
|
JuliaFormatter.format("$BASEDIR/../../src", verbose = true)
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
export runtests, format
|
||||||
|
|
||||||
|
end
|
||||||
43
web/backend/test/src/jobs_test.jl
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
|
||||||
|
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
using Backend
|
||||||
|
using Test
|
||||||
|
using HiGHS
|
||||||
|
|
||||||
|
function jobs_test_usage()
|
||||||
|
@testset "JobProcessor" begin
|
||||||
|
# Setup job directory
|
||||||
|
job_id = "qwe123"
|
||||||
|
job_dir = joinpath(Backend.basedir, "jobs", job_id)
|
||||||
|
mkpath(job_dir)
|
||||||
|
cp(fixture("case14.json.gz"), joinpath(job_dir, "input.json.gz"))
|
||||||
|
|
||||||
|
try
|
||||||
|
# Create processor with HiGHS optimizer
|
||||||
|
processor = JobProcessor(optimizer = HiGHS.Optimizer)
|
||||||
|
|
||||||
|
# Start the worker
|
||||||
|
start(processor)
|
||||||
|
|
||||||
|
# Push job to queue
|
||||||
|
put!(processor, job_id)
|
||||||
|
|
||||||
|
# Wait until all jobs are processed
|
||||||
|
while isbusy(processor)
|
||||||
|
sleep(0.1)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Check that solution file exists
|
||||||
|
output_path = joinpath(job_dir, "output.json")
|
||||||
|
@test isfile(output_path)
|
||||||
|
|
||||||
|
# Stop the worker
|
||||||
|
stop(processor)
|
||||||
|
finally
|
||||||
|
# Cleanup
|
||||||
|
rm(job_dir, recursive = true, force = true)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
44
web/backend/test/src/server_test.jl
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# UnitCommitment.jl: Optimization Package for Security-Constrained Unit Commitment
|
||||||
|
# Copyright (C) 2025, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
const PORT = 32617
|
||||||
|
|
||||||
|
function server_test_usage()
|
||||||
|
server = Backend.start_server(PORT; optimizer = HiGHS.Optimizer)
|
||||||
|
|
||||||
|
# Read the compressed fixture file
|
||||||
|
compressed_data = read(fixture("case14.json.gz"))
|
||||||
|
|
||||||
|
# Submit test case
|
||||||
|
response = HTTP.post(
|
||||||
|
"http://localhost:$PORT/submit",
|
||||||
|
["Content-Type" => "application/gzip"],
|
||||||
|
compressed_data,
|
||||||
|
)
|
||||||
|
@test response.status == 200
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
response_data = JSON.parse(String(response.body))
|
||||||
|
@test haskey(response_data, "job_id")
|
||||||
|
job_id = response_data["job_id"]
|
||||||
|
@test length(job_id) == 16
|
||||||
|
|
||||||
|
# Wait for jobs to finish and stop server
|
||||||
|
sleep(0.1)
|
||||||
|
stop(server)
|
||||||
|
|
||||||
|
# Verify the compressed file was saved correctly
|
||||||
|
job_dir = joinpath(Backend.basedir, "jobs", job_id)
|
||||||
|
saved_input_path = joinpath(job_dir, "input.json.gz")
|
||||||
|
saved_log_path = joinpath(job_dir, "output.log")
|
||||||
|
saved_output_path = joinpath(job_dir, "output.json")
|
||||||
|
@test isfile(saved_input_path)
|
||||||
|
@test isfile(saved_log_path)
|
||||||
|
@test isfile(saved_output_path)
|
||||||
|
saved_data = read(saved_input_path)
|
||||||
|
@test saved_data == compressed_data
|
||||||
|
|
||||||
|
# Clean up: remove the job directory
|
||||||
|
# rm(job_dir, recursive=true)
|
||||||
|
end
|
||||||
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 5.2 KiB After Width: | Height: | Size: 5.2 KiB |
|
Before Width: | Height: | Size: 9.4 KiB After Width: | Height: | Size: 9.4 KiB |
|
Before Width: | Height: | Size: 2.8 KiB After Width: | Height: | Size: 2.8 KiB |