From bd78518c1fe91b31b881780adeb8c154b18b06a2 Mon Sep 17 00:00:00 2001 From: "Alinson S. Xavier" Date: Sun, 11 Apr 2021 09:10:14 -0500 Subject: [PATCH] Convert MIPSolveStats into dataclass --- miplearn/solvers/gurobi.py | 19 +++++++-------- miplearn/solvers/internal.py | 12 +++++++++- miplearn/solvers/learning.py | 12 +++++----- miplearn/solvers/pyomo/base.py | 19 +++++++-------- miplearn/solvers/tests/__init__.py | 34 ++++++++++++++++----------- miplearn/types.py | 30 +++++++---------------- tests/components/test_objective.py | 4 ++-- tests/components/test_primal.py | 2 +- tests/problems/test_stab.py | 2 +- tests/problems/test_tsp.py | 4 ++-- tests/solvers/test_learning_solver.py | 2 +- 11 files changed, 71 insertions(+), 69 deletions(-) diff --git a/miplearn/solvers/gurobi.py b/miplearn/solvers/gurobi.py index 8c226db..d8c0829 100644 --- a/miplearn/solvers/gurobi.py +++ b/miplearn/solvers/gurobi.py @@ -220,16 +220,15 @@ class GurobiSolver(InternalSolver): lb = self.model.objVal ub = self.model.objBound ws_value = self._extract_warm_start_value(log) - stats: MIPSolveStats = { - "Lower bound": lb, - "Upper bound": ub, - "Wallclock time": total_wallclock_time, - "Nodes": total_nodes, - "Sense": sense, - "MIP log": log, - "Warm start value": ws_value, - } - return stats + return MIPSolveStats( + mip_lower_bound=lb, + mip_upper_bound=ub, + mip_wallclock_time=total_wallclock_time, + mip_nodes=total_nodes, + mip_sense=sense, + mip_log=log, + mip_warm_start_value=ws_value, + ) @overrides def get_solution(self) -> Optional[Solution]: diff --git a/miplearn/solvers/internal.py b/miplearn/solvers/internal.py index 95b02c4..921ed69 100644 --- a/miplearn/solvers/internal.py +++ b/miplearn/solvers/internal.py @@ -14,7 +14,6 @@ from miplearn.instance.base import Instance from miplearn.types import ( IterationCallback, LazyCallback, - MIPSolveStats, BranchPriorities, UserCutCallback, Solution, @@ -31,6 +30,17 @@ class LPSolveStats: lp_wallclock_time: Optional[float] = None +@dataclass +class MIPSolveStats: + mip_lower_bound: Optional[float] + mip_log: str + mip_nodes: Optional[int] + mip_sense: str + mip_upper_bound: Optional[float] + mip_wallclock_time: float + mip_warm_start_value: Optional[float] + + class InternalSolver(ABC, EnforceOverrides): """ Abstract class representing the MIP solver used internally by LearningSolver. diff --git a/miplearn/solvers/learning.py b/miplearn/solvers/learning.py index f1cefc3..8b05cb0 100644 --- a/miplearn/solvers/learning.py +++ b/miplearn/solvers/learning.py @@ -240,11 +240,11 @@ class LearningSolver: user_cut_cb=user_cut_cb, lazy_cb=lazy_cb, ) - stats.update(cast(LearningSolveStats, mip_stats)) + stats.update(cast(LearningSolveStats, mip_stats.__dict__)) stats["Solver"] = "default" stats["Gap"] = self._compute_gap( - ub=stats["Upper bound"], - lb=stats["Lower bound"], + ub=mip_stats.mip_upper_bound, + lb=mip_stats.mip_lower_bound, ) stats["Mode"] = self.mode @@ -256,9 +256,9 @@ class LearningSolver: # Add some information to training_sample # ------------------------------------------------------- - training_sample.lower_bound = stats["Lower bound"] - training_sample.upper_bound = stats["Upper bound"] - training_sample.mip_log = stats["MIP log"] + training_sample.lower_bound = mip_stats.mip_lower_bound + training_sample.upper_bound = mip_stats.mip_upper_bound + training_sample.mip_log = mip_stats.mip_log training_sample.solution = self.internal_solver.get_solution() # After-solve callbacks diff --git a/miplearn/solvers/pyomo/base.py b/miplearn/solvers/pyomo/base.py index 5c7f905..dca2062 100644 --- a/miplearn/solvers/pyomo/base.py +++ b/miplearn/solvers/pyomo/base.py @@ -136,16 +136,15 @@ class BasePyomoSolver(InternalSolver): self._has_mip_solution = True lb = results["Problem"][0]["Lower bound"] ub = results["Problem"][0]["Upper bound"] - stats: MIPSolveStats = { - "Lower bound": lb, - "Upper bound": ub, - "Wallclock time": total_wallclock_time, - "Sense": self._obj_sense, - "MIP log": log, - "Nodes": node_count, - "Warm start value": ws_value, - } - return stats + return MIPSolveStats( + mip_lower_bound=lb, + mip_upper_bound=ub, + mip_wallclock_time=total_wallclock_time, + mip_sense=self._obj_sense, + mip_log=log, + mip_nodes=node_count, + mip_warm_start_value=ws_value, + ) @overrides def get_solution(self) -> Optional[Solution]: diff --git a/miplearn/solvers/tests/__init__.py b/miplearn/solvers/tests/__init__.py index 1e53d85..9901dc0 100644 --- a/miplearn/solvers/tests/__init__.py +++ b/miplearn/solvers/tests/__init__.py @@ -243,11 +243,17 @@ def run_basic_usage_tests(solver: InternalSolver) -> None: user_cut_cb=None, ) assert not solver.is_infeasible() - assert len(mip_stats["MIP log"]) > 100 - assert_equals(mip_stats["Lower bound"], 1183.0) - assert_equals(mip_stats["Upper bound"], 1183.0) - assert_equals(mip_stats["Sense"], "max") - assert isinstance(mip_stats["Wallclock time"], float) + assert mip_stats.mip_log is not None + assert len(mip_stats.mip_log) > 100 + assert mip_stats.mip_lower_bound is not None + assert_equals(mip_stats.mip_lower_bound, 1183.0) + assert mip_stats.mip_upper_bound is not None + assert_equals(mip_stats.mip_upper_bound, 1183.0) + assert mip_stats.mip_sense is not None + assert_equals(mip_stats.mip_sense, "max") + assert mip_stats.mip_wallclock_time is not None + assert isinstance(mip_stats.mip_wallclock_time, float) + assert mip_stats.mip_wallclock_time > 0 # Fetch variables (after-load) assert_equals( @@ -325,7 +331,7 @@ def run_basic_usage_tests(solver: InternalSolver) -> None: # Re-solve MIP and verify that constraint affects the solution stats = solver.solve() - assert_equals(stats["Lower bound"], 1030.0) + assert_equals(stats.mip_lower_bound, 1030.0) assert solver.is_constraint_satisfied(cut) # Remove the new constraint @@ -333,7 +339,7 @@ def run_basic_usage_tests(solver: InternalSolver) -> None: # New constraint should no longer affect solution stats = solver.solve() - assert_equals(stats["Lower bound"], 1183.0) + assert_equals(stats.mip_lower_bound, 1183.0) def run_warm_start_tests(solver: InternalSolver) -> None: @@ -342,17 +348,17 @@ def run_warm_start_tests(solver: InternalSolver) -> None: solver.set_instance(instance, model) solver.set_warm_start({"x[0]": 1.0, "x[1]": 0.0, "x[2]": 0.0, "x[3]": 1.0}) stats = solver.solve(tee=True) - if stats["Warm start value"] is not None: - assert_equals(stats["Warm start value"], 725.0) + if stats.mip_warm_start_value is not None: + assert_equals(stats.mip_warm_start_value, 725.0) solver.set_warm_start({"x[0]": 1.0, "x[1]": 1.0, "x[2]": 1.0, "x[3]": 1.0}) stats = solver.solve(tee=True) - assert stats["Warm start value"] is None + assert stats.mip_warm_start_value is None solver.fix({"x[0]": 1.0, "x[1]": 0.0, "x[2]": 0.0, "x[3]": 1.0}) stats = solver.solve(tee=True) - assert_equals(stats["Lower bound"], 725.0) - assert_equals(stats["Upper bound"], 725.0) + assert_equals(stats.mip_lower_bound, 725.0) + assert_equals(stats.mip_upper_bound, 725.0) def run_infeasibility_tests(solver: InternalSolver) -> None: @@ -361,8 +367,8 @@ def run_infeasibility_tests(solver: InternalSolver) -> None: mip_stats = solver.solve() assert solver.is_infeasible() assert solver.get_solution() is None - assert mip_stats["Upper bound"] is None - assert mip_stats["Lower bound"] is None + assert mip_stats.mip_upper_bound is None + assert mip_stats.mip_lower_bound is None lp_stats = solver.solve_lp() assert solver.get_solution() is None assert lp_stats.lp_value is None diff --git a/miplearn/types.py b/miplearn/types.py index 5adea82..b23c140 100644 --- a/miplearn/types.py +++ b/miplearn/types.py @@ -19,30 +19,18 @@ UserCutCallback = Callable[["InternalSolver", Any], None] VariableName = str Solution = Dict[VariableName, Optional[float]] -MIPSolveStats = TypedDict( - "MIPSolveStats", - { - "Lower bound": Optional[float], - "MIP log": str, - "Nodes": Optional[int], - "Sense": str, - "Upper bound": Optional[float], - "Wallclock time": float, - "Warm start value": Optional[float], - }, -) - LearningSolveStats = TypedDict( "LearningSolveStats", { "Gap": Optional[float], "Instance": Union[str, int], - "LP log": str, - "LP value": Optional[float], - "Lower bound": Optional[float], - "MIP log": str, + "lp_log": str, + "lp_value": Optional[float], + "lp_wallclock_time": Optional[float], + "mip_lower_bound": Optional[float], + "mip_log": str, "Mode": str, - "Nodes": Optional[int], + "mip_nodes": Optional[int], "Objective: Predicted lower bound": float, "Objective: Predicted upper bound": float, "Primal: Free": int, @@ -50,9 +38,9 @@ LearningSolveStats = TypedDict( "Primal: Zero": int, "Sense": str, "Solver": str, - "Upper bound": Optional[float], - "Wallclock time": float, - "Warm start value": Optional[float], + "mip_upper_bound": Optional[float], + "mip_wallclock_time": float, + "mip_warm_start_value": Optional[float], "LazyStatic: Removed": int, "LazyStatic: Kept": int, "LazyStatic: Restored": int, diff --git a/tests/components/test_objective.py b/tests/components/test_objective.py index 0f6a322..3af47ff 100644 --- a/tests/components/test_objective.py +++ b/tests/components/test_objective.py @@ -255,5 +255,5 @@ def test_usage() -> None: solver.solve(instance) solver.fit([instance]) stats = solver.solve(instance) - assert stats["Lower bound"] == stats["Objective: Predicted lower bound"] - assert stats["Upper bound"] == stats["Objective: Predicted upper bound"] + assert stats["mip_lower_bound"] == stats["Objective: Predicted lower bound"] + assert stats["mip_upper_bound"] == stats["Objective: Predicted upper bound"] diff --git a/tests/components/test_primal.py b/tests/components/test_primal.py index 061d28f..5d62088 100644 --- a/tests/components/test_primal.py +++ b/tests/components/test_primal.py @@ -221,7 +221,7 @@ def test_usage() -> None: stats = solver.solve(instance) assert stats["Primal: Free"] == 0 assert stats["Primal: One"] + stats["Primal: Zero"] == 10 - assert stats["Lower bound"] == stats["Warm start value"] + assert stats["mip_lower_bound"] == stats["mip_warm_start_value"] def test_evaluate() -> None: diff --git a/tests/problems/test_stab.py b/tests/problems/test_stab.py index b6763bb..df40d33 100644 --- a/tests/problems/test_stab.py +++ b/tests/problems/test_stab.py @@ -16,7 +16,7 @@ def test_stab() -> None: instance = MaxWeightStableSetInstance(graph, weights) solver = LearningSolver() stats = solver.solve(instance) - assert stats["Lower bound"] == 2.0 + assert stats["mip_lower_bound"] == 2.0 def test_stab_generator_fixed_graph() -> None: diff --git a/tests/problems/test_tsp.py b/tests/problems/test_tsp.py index 7b10526..7499fd3 100644 --- a/tests/problems/test_tsp.py +++ b/tests/problems/test_tsp.py @@ -47,8 +47,8 @@ def test_instance() -> None: assert solution["x[(1, 2)]"] == 1.0 assert solution["x[(1, 3)]"] == 0.0 assert solution["x[(2, 3)]"] == 1.0 - assert stats["Lower bound"] == 4.0 - assert stats["Upper bound"] == 4.0 + assert stats["mip_lower_bound"] == 4.0 + assert stats["mip_upper_bound"] == 4.0 def test_subtour() -> None: diff --git a/tests/solvers/test_learning_solver.py b/tests/solvers/test_learning_solver.py index d3c68cf..ed0c981 100644 --- a/tests/solvers/test_learning_solver.py +++ b/tests/solvers/test_learning_solver.py @@ -140,7 +140,7 @@ def test_simulate_perfect() -> None: simulate_perfect=True, ) stats = solver.solve(PickleGzInstance(tmp.name)) - assert stats["Lower bound"] == stats["Objective: Predicted lower bound"] + assert stats["mip_lower_bound"] == stats["Objective: Predicted lower bound"] def test_gap() -> None: