mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Use np.ndarray in instance features
This commit is contained in:
@@ -52,7 +52,7 @@ class DynamicConstraintsComponent(Component):
|
|||||||
cids: Dict[str, List[str]] = {}
|
cids: Dict[str, List[str]] = {}
|
||||||
constr_categories_dict = instance.get_constraint_categories()
|
constr_categories_dict = instance.get_constraint_categories()
|
||||||
constr_features_dict = instance.get_constraint_features()
|
constr_features_dict = instance.get_constraint_features()
|
||||||
instance_features = sample.get_vector("static_instance_features")
|
instance_features = sample.get_array("static_instance_features")
|
||||||
assert instance_features is not None
|
assert instance_features is not None
|
||||||
for cid in self.known_cids:
|
for cid in self.known_cids:
|
||||||
# Initialize categories
|
# Initialize categories
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from typing import TYPE_CHECKING, Dict, Optional, List, Any, Tuple, KeysView, ca
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from miplearn.features.sample import Sample
|
from miplearn.features.sample import Sample
|
||||||
|
from miplearn.solvers.internal import LPSolveStats
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from miplearn.solvers.internal import InternalSolver
|
from miplearn.solvers.internal import InternalSolver
|
||||||
@@ -66,7 +67,10 @@ class FeaturesExtractor:
|
|||||||
self,
|
self,
|
||||||
solver: "InternalSolver",
|
solver: "InternalSolver",
|
||||||
sample: Sample,
|
sample: Sample,
|
||||||
|
lp_stats: LPSolveStats,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
for (k, v) in lp_stats.__dict__.items():
|
||||||
|
sample.put_scalar(k, v)
|
||||||
variables = solver.get_variables(with_static=False, with_sa=self.with_sa)
|
variables = solver.get_variables(with_static=False, with_sa=self.with_sa)
|
||||||
constraints = solver.get_constraints(with_static=False, with_sa=self.with_sa)
|
constraints = solver.get_constraints(with_static=False, with_sa=self.with_sa)
|
||||||
sample.put_array("lp_var_basis_status", variables.basis_status)
|
sample.put_array("lp_var_basis_status", variables.basis_status)
|
||||||
@@ -113,15 +117,21 @@ class FeaturesExtractor:
|
|||||||
],
|
],
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
static_instance_features = sample.get_vector("static_instance_features")
|
|
||||||
|
# Build lp_instance_features
|
||||||
|
static_instance_features = sample.get_array("static_instance_features")
|
||||||
assert static_instance_features is not None
|
assert static_instance_features is not None
|
||||||
sample.put_vector(
|
assert lp_stats.lp_value is not None
|
||||||
|
assert lp_stats.lp_wallclock_time is not None
|
||||||
|
sample.put_array(
|
||||||
"lp_instance_features",
|
"lp_instance_features",
|
||||||
static_instance_features
|
np.hstack(
|
||||||
+ [
|
[
|
||||||
sample.get_scalar("lp_value"),
|
static_instance_features,
|
||||||
sample.get_scalar("lp_wallclock_time"),
|
lp_stats.lp_value,
|
||||||
],
|
lp_stats.lp_wallclock_time,
|
||||||
|
]
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def extract_after_mip_features(
|
def extract_after_mip_features(
|
||||||
@@ -241,30 +251,22 @@ class FeaturesExtractor:
|
|||||||
else:
|
else:
|
||||||
lazy.append(False)
|
lazy.append(False)
|
||||||
sample.put_vector_list("static_constr_features", user_features)
|
sample.put_vector_list("static_constr_features", user_features)
|
||||||
sample.put_array("static_constr_lazy", np.array(lazy, dtype=bool))
|
|
||||||
sample.put_array("static_constr_categories", np.array(categories, dtype="S"))
|
sample.put_array("static_constr_categories", np.array(categories, dtype="S"))
|
||||||
|
constr_lazy = np.array(lazy, dtype=bool)
|
||||||
|
sample.put_array("static_constr_lazy", constr_lazy)
|
||||||
|
sample.put_scalar("static_constr_lazy_count", int(constr_lazy.sum()))
|
||||||
|
|
||||||
def _extract_user_features_instance(
|
def _extract_user_features_instance(
|
||||||
self,
|
self,
|
||||||
instance: "Instance",
|
instance: "Instance",
|
||||||
sample: Sample,
|
sample: Sample,
|
||||||
) -> None:
|
) -> None:
|
||||||
user_features = instance.get_instance_features()
|
features = cast(np.ndarray, instance.get_instance_features())
|
||||||
if isinstance(user_features, np.ndarray):
|
if isinstance(features, list):
|
||||||
user_features = user_features.tolist()
|
features = np.array(features, dtype=float)
|
||||||
assert isinstance(user_features, list), (
|
assert isinstance(features, np.ndarray)
|
||||||
f"Instance features must be a list. "
|
assert features.dtype.kind in ["f"], f"Unsupported dtype: {features.dtype}"
|
||||||
f"Found {type(user_features).__name__} instead."
|
sample.put_array("static_instance_features", features)
|
||||||
)
|
|
||||||
for v in user_features:
|
|
||||||
assert isinstance(v, numbers.Real), (
|
|
||||||
f"Instance features must be a list of numbers. "
|
|
||||||
f"Found {type(v).__name__} instead."
|
|
||||||
)
|
|
||||||
constr_lazy = sample.get_array("static_constr_lazy")
|
|
||||||
assert constr_lazy is not None
|
|
||||||
sample.put_vector("static_instance_features", user_features)
|
|
||||||
sample.put_scalar("static_constr_lazy_count", int(sum(constr_lazy)))
|
|
||||||
|
|
||||||
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
|
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
|
||||||
# approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195.
|
# approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195.
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import traceback
|
|
||||||
import time
|
import time
|
||||||
|
import traceback
|
||||||
from typing import Optional, List, Any, cast, Dict, Tuple
|
from typing import Optional, List, Any, cast, Dict, Tuple
|
||||||
|
|
||||||
from p_tqdm import p_map
|
from p_tqdm import p_map
|
||||||
@@ -15,7 +15,6 @@ from miplearn.components.dynamic_user_cuts import UserCutsComponent
|
|||||||
from miplearn.components.objective import ObjectiveValueComponent
|
from miplearn.components.objective import ObjectiveValueComponent
|
||||||
from miplearn.components.primal import PrimalSolutionComponent
|
from miplearn.components.primal import PrimalSolutionComponent
|
||||||
from miplearn.features.extractor import FeaturesExtractor
|
from miplearn.features.extractor import FeaturesExtractor
|
||||||
from miplearn.features.sample import Sample, MemorySample
|
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
from miplearn.instance.picklegz import PickleGzInstance
|
from miplearn.instance.picklegz import PickleGzInstance
|
||||||
from miplearn.solvers import _RedirectOutput
|
from miplearn.solvers import _RedirectOutput
|
||||||
@@ -208,9 +207,9 @@ class LearningSolver:
|
|||||||
# -------------------------------------------------------
|
# -------------------------------------------------------
|
||||||
logger.info("Extracting features (after-lp)...")
|
logger.info("Extracting features (after-lp)...")
|
||||||
initial_time = time.time()
|
initial_time = time.time()
|
||||||
for (k, v) in lp_stats.__dict__.items():
|
self.extractor.extract_after_lp_features(
|
||||||
sample.put_scalar(k, v)
|
self.internal_solver, sample, lp_stats
|
||||||
self.extractor.extract_after_lp_features(self.internal_solver, sample)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
"Features (after-lp) extracted in %.2f seconds"
|
"Features (after-lp) extracted in %.2f seconds"
|
||||||
% (time.time() - initial_time)
|
% (time.time() - initial_time)
|
||||||
|
|||||||
@@ -85,8 +85,8 @@ def test_knapsack() -> None:
|
|||||||
|
|
||||||
# after-lp
|
# after-lp
|
||||||
# -------------------------------------------------------
|
# -------------------------------------------------------
|
||||||
solver.solve_lp()
|
lp_stats = solver.solve_lp()
|
||||||
extractor.extract_after_lp_features(solver, sample)
|
extractor.extract_after_lp_features(solver, sample, lp_stats)
|
||||||
assert_equals(
|
assert_equals(
|
||||||
sample.get_array("lp_var_basis_status"),
|
sample.get_array("lp_var_basis_status"),
|
||||||
np.array(["U", "B", "U", "L", "U"], dtype="S"),
|
np.array(["U", "B", "U", "L", "U"], dtype="S"),
|
||||||
@@ -204,12 +204,12 @@ if __name__ == "__main__":
|
|||||||
solver = GurobiSolver()
|
solver = GurobiSolver()
|
||||||
instance = MpsInstance(sys.argv[1])
|
instance = MpsInstance(sys.argv[1])
|
||||||
solver.set_instance(instance)
|
solver.set_instance(instance)
|
||||||
solver.solve_lp(tee=True)
|
lp_stats = solver.solve_lp(tee=True)
|
||||||
extractor = FeaturesExtractor(with_lhs=False)
|
extractor = FeaturesExtractor(with_lhs=False)
|
||||||
sample = Hdf5Sample("tmp/prof.h5", mode="w")
|
sample = Hdf5Sample("tmp/prof.h5", mode="w")
|
||||||
|
|
||||||
def run() -> None:
|
def run() -> None:
|
||||||
extractor.extract_after_load_features(instance, solver, sample)
|
extractor.extract_after_load_features(instance, solver, sample)
|
||||||
extractor.extract_after_lp_features(solver, sample)
|
extractor.extract_after_lp_features(solver, sample, lp_stats)
|
||||||
|
|
||||||
cProfile.run("run()", filename="tmp/prof")
|
cProfile.run("run()", filename="tmp/prof")
|
||||||
|
|||||||
Reference in New Issue
Block a user