Reformat source code

This commit is contained in:
2024-05-29 09:04:59 -05:00
parent e9deac94a5
commit 93e604817b
15 changed files with 106 additions and 107 deletions

View File

@@ -43,8 +43,8 @@ function runtests()
end
function format()
JuliaFormatter.format(BASEDIR, verbose=true)
JuliaFormatter.format("$BASEDIR/../../src", verbose=true)
JuliaFormatter.format(BASEDIR, verbose = true)
JuliaFormatter.format("$BASEDIR/../../src", verbose = true)
return
end

View File

@@ -10,34 +10,32 @@ function gen_stab()
randint = pyimport("scipy.stats").randint
np.random.seed(42)
gen = MaxWeightStableSetGenerator(
w=uniform(10.0, scale=1.0),
n=randint(low=50, high=51),
p=uniform(loc=0.5, scale=0.0),
fix_graph=true,
w = uniform(10.0, scale = 1.0),
n = randint(low = 50, high = 51),
p = uniform(loc = 0.5, scale = 0.0),
fix_graph = true,
)
data = gen.generate(1)
data_filenames = write_pkl_gz(data, "$BASEDIR/../fixtures", prefix="stab-n50-")
data_filenames = write_pkl_gz(data, "$BASEDIR/../fixtures", prefix = "stab-n50-")
collector = BasicCollector()
collector.collect(
data_filenames,
data -> build_stab_model_jump(data, optimizer=SCIP.Optimizer),
progress=true,
verbose=true,
data -> build_stab_model_jump(data, optimizer = SCIP.Optimizer),
progress = true,
verbose = true,
)
end
function test_cuts()
data_filenames = ["$BASEDIR/../fixtures/stab-n50-00000.pkl.gz"]
clf = pyimport("sklearn.dummy").DummyClassifier()
extractor = H5FieldsExtractor(
instance_fields=["static_var_obj_coeffs"],
)
comp = MemorizingCutsComponent(clf=clf, extractor=extractor)
solver = LearningSolver(components=[comp])
extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"])
comp = MemorizingCutsComponent(clf = clf, extractor = extractor)
solver = LearningSolver(components = [comp])
solver.fit(data_filenames)
stats = solver.optimize(
data_filenames[1],
data -> build_stab_model_jump(data, optimizer=SCIP.Optimizer),
data -> build_stab_model_jump(data, optimizer = SCIP.Optimizer),
)
@test stats["Cuts: AOT"] > 0
end

View File

@@ -11,36 +11,34 @@ function gen_tsp()
np.random.seed(42)
gen = TravelingSalesmanGenerator(
x=uniform(loc=0.0, scale=1000.0),
y=uniform(loc=0.0, scale=1000.0),
n=randint(low=20, high=21),
gamma=uniform(loc=1.0, scale=0.25),
fix_cities=true,
round=true,
x = uniform(loc = 0.0, scale = 1000.0),
y = uniform(loc = 0.0, scale = 1000.0),
n = randint(low = 20, high = 21),
gamma = uniform(loc = 1.0, scale = 0.25),
fix_cities = true,
round = true,
)
data = gen.generate(1)
data_filenames = write_pkl_gz(data, "$BASEDIR/../fixtures", prefix="tsp-n20-")
data_filenames = write_pkl_gz(data, "$BASEDIR/../fixtures", prefix = "tsp-n20-")
collector = BasicCollector()
collector.collect(
data_filenames,
data -> build_tsp_model_jump(data, optimizer=GLPK.Optimizer),
progress=true,
verbose=true,
data -> build_tsp_model_jump(data, optimizer = GLPK.Optimizer),
progress = true,
verbose = true,
)
end
function test_lazy()
data_filenames = ["$BASEDIR/../fixtures/tsp-n20-00000.pkl.gz"]
clf = pyimport("sklearn.dummy").DummyClassifier()
extractor = H5FieldsExtractor(
instance_fields=["static_var_obj_coeffs"],
)
comp = MemorizingLazyComponent(clf=clf, extractor=extractor)
solver = LearningSolver(components=[comp])
extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"])
comp = MemorizingLazyComponent(clf = clf, extractor = extractor)
solver = LearningSolver(components = [comp])
solver.fit(data_filenames)
stats = solver.optimize(
data_filenames[1],
data -> build_tsp_model_jump(data, optimizer=GLPK.Optimizer),
data -> build_tsp_model_jump(data, optimizer = GLPK.Optimizer),
)
@test stats["Lazy Constraints: AOT"] > 0
end

View File

@@ -8,11 +8,11 @@ using SCIP
function test_problems_stab()
nx = pyimport("networkx")
data = MaxWeightStableSetData(
graph=nx.gnp_random_graph(25, 0.5, seed=42),
weights=repeat([1.0], 25),
graph = nx.gnp_random_graph(25, 0.5, seed = 42),
weights = repeat([1.0], 25),
)
h5 = H5File(tempname(), "w")
model = build_stab_model_jump(data, optimizer=SCIP.Optimizer)
model = build_stab_model_jump(data, optimizer = SCIP.Optimizer)
model.extract_after_load(h5)
model.optimize()
model.extract_after_mip(h5)

View File

@@ -10,17 +10,12 @@ function test_problems_tsp()
squareform = pyimport("scipy.spatial.distance").squareform
data = TravelingSalesmanData(
n_cities=6,
distances=squareform(pdist([
[0.0, 0.0],
[1.0, 0.0],
[2.0, 0.0],
[3.0, 0.0],
[0.0, 1.0],
[3.0, 1.0],
])),
n_cities = 6,
distances = squareform(
pdist([[0.0, 0.0], [1.0, 0.0], [2.0, 0.0], [3.0, 0.0], [0.0, 1.0], [3.0, 1.0]]),
),
)
model = build_tsp_model_jump(data, optimizer=GLPK.Optimizer)
model = build_tsp_model_jump(data, optimizer = GLPK.Optimizer)
model.optimize()
@test objective_value(model.inner) == 8.0
return

View File

@@ -46,7 +46,7 @@ function test_jld2()
_TestStruct(2, [1.0, 2.0, 3.0]),
_TestStruct(3, [3.0, 3.0, 3.0]),
]
filenames = write_jld2(data, dirname, prefix="obj")
filenames = write_jld2(data, dirname, prefix = "obj")
@test all(
filenames .==
["$dirname/obj00001.jld2", "$dirname/obj00002.jld2", "$dirname/obj00003.jld2"],

View File

@@ -13,16 +13,16 @@ function test_usage()
@debug "Setting up LearningSolver..."
solver = LearningSolver(
components=[
components = [
IndependentVarsPrimalComponent(
base_clf=SingleClassFix(
base_clf = SingleClassFix(
MinProbabilityClassifier(
base_clf=LogisticRegression(),
thresholds=[0.95, 0.95],
base_clf = LogisticRegression(),
thresholds = [0.95, 0.95],
),
),
extractor=AlvLouWeh2017Extractor(),
action=SetWarmStart(),
extractor = AlvLouWeh2017Extractor(),
action = SetWarmStart(),
),
],
)