diff --git a/.gitignore b/.gitignore index 886a4b60..9934dbb9 100644 --- a/.gitignore +++ b/.gitignore @@ -84,3 +84,4 @@ claude-output.log .worktrees/ .worktree/ *.json +.claude/worktrees/ diff --git a/Makefile b/Makefile index 68317c94..85e4e5b2 100644 --- a/Makefile +++ b/Makefile @@ -189,12 +189,13 @@ PLAN_FILE ?= $(shell ls -t docs/plans/*.md 2>/dev/null | head -1) run-plan: @NL=$$'\n'; \ BRANCH=$$(git branch --show-current); \ + PLAN_FILE="$(PLAN_FILE)"; \ if [ "$(AGENT_TYPE)" = "claude" ]; then \ - PROCESS="1. Read the plan file$${NL}2. Use /subagent-driven-development to execute tasks$${NL}3. Push: git push origin $$BRANCH$${NL}4. Create a pull request"; \ + PROCESS="1. Read the plan file$${NL}2. Choose the right skill to execute: use /add-model for new problem models, /add-rule for new reduction rules, or /subagent-driven-development for other tasks$${NL}3. Push: git push origin $$BRANCH$${NL}4. Create a pull request"; \ else \ PROCESS="1. Read the plan file$${NL}2. Execute the tasks step by step. For each task, implement and test before moving on.$${NL}3. Push: git push origin $$BRANCH$${NL}4. Create a pull request"; \ fi; \ - PROMPT="Execute the plan in '$${PLAN_FILE}'."; \ + PROMPT="Execute the plan in '$$PLAN_FILE'."; \ if [ -n "$(INSTRUCTIONS)" ]; then \ PROMPT="$${PROMPT}$${NL}$${NL}## Additional Instructions$${NL}$(INSTRUCTIONS)"; \ fi; \ diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index 300415b0..07112ff0 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -49,6 +49,7 @@ "BMF": [Boolean Matrix Factorization], "PaintShop": [Paint Shop], "BicliqueCover": [Biclique Cover], + "BinPacking": [Bin Packing], ) // Definition label: "def:" — each definition block must have a matching label @@ -801,6 +802,46 @@ Biclique Cover is equivalent to factoring the biadjacency matrix $M$ of the bipa ) ] +#problem-def("BinPacking")[ + Given $n$ items with sizes $s_1, dots, s_n in RR^+$ and bin capacity $C > 0$, find an assignment $x: {1, dots, n} -> NN$ minimizing $|{x(i) : i = 1, dots, n}|$ (the number of distinct bins used) subject to $forall j: sum_(i: x(i) = j) s_i lt.eq C$. +][ + Bin Packing is one of the classical NP-hard optimization problems @garey1979, with applications in logistics, cutting stock, and cloud resource allocation. The best known exact algorithm runs in $O^*(2^n)$ time via inclusion-exclusion over set partitions @bjorklund2009. + + *Example.* Consider $n = 6$ items with sizes $(6, 6, 5, 5, 4, 4)$ and capacity $C = 10$. The lower bound is $ceil(30 slash 10) = 3$ bins. An optimal packing uses exactly 3 bins: $B_1 = {6, 4}$, $B_2 = {6, 4}$, $B_3 = {5, 5}$, each with total load $10 = C$. + + #figure({ + canvas(length: 1cm, { + let s = 0.28 + let w = 1.0 + let gap = 0.6 + let bins = ((6, 4), (6, 4), (5, 5)) + let fills = ( + (graph-colors.at(0), graph-colors.at(1)), + (graph-colors.at(0), graph-colors.at(1)), + (graph-colors.at(2), graph-colors.at(2)), + ) + for i in range(3) { + let x = i * (w + gap) + draw.rect((x, 0), (x + w, 10 * s), stroke: 0.8pt + black) + let y = 0 + for j in range(bins.at(i).len()) { + let sz = bins.at(i).at(j) + let c = fills.at(i).at(j) + draw.rect((x, y), (x + w, y + sz * s), stroke: 0.4pt, fill: c) + draw.content((x + w / 2, y + sz * s / 2), text(8pt, fill: white)[#sz]) + y += sz * s + } + draw.content((x + w / 2, -0.3), text(8pt)[$B_#(i + 1)$]) + } + draw.line((-0.15, 10 * s), (2 * (w + gap) + w + 0.15, 10 * s), + stroke: (dash: "dashed", paint: luma(150), thickness: 0.5pt)) + draw.content((-0.5, 10 * s), text(7pt)[$C$]) + }) + }, + caption: [Optimal packing of items with sizes $(6, 6, 5, 5, 4, 4)$ into 3 bins of capacity $C = 10$. Numbers indicate item sizes; all bins are fully utilized.], + ) +] + // Completeness check: warn about problem types in JSON but missing from paper #{ let json-models = { diff --git a/docs/src/reductions/problem_schemas.json b/docs/src/reductions/problem_schemas.json index 0e89b6f7..f3d9102c 100644 --- a/docs/src/reductions/problem_schemas.json +++ b/docs/src/reductions/problem_schemas.json @@ -51,6 +51,22 @@ } ] }, + { + "name": "BinPacking", + "description": "Assign items to bins minimizing number of bins used, subject to capacity", + "fields": [ + { + "name": "sizes", + "type_name": "Vec", + "description": "Item sizes s_i for each item" + }, + { + "name": "capacity", + "type_name": "W", + "description": "Bin capacity C" + } + ] + }, { "name": "CircuitSAT", "description": "Find satisfying input to a boolean circuit", diff --git a/docs/src/reductions/reduction_graph.json b/docs/src/reductions/reduction_graph.json index c24b51dd..6d507247 100644 --- a/docs/src/reductions/reduction_graph.json +++ b/docs/src/reductions/reduction_graph.json @@ -1,5 +1,23 @@ { "nodes": [ + { + "name": "BinPacking", + "variant": { + "weight": "f64" + }, + "category": "optimization", + "doc_path": "models/optimization/struct.BinPacking.html", + "complexity": "2^num_items" + }, + { + "name": "BinPacking", + "variant": { + "weight": "i32" + }, + "category": "optimization", + "doc_path": "models/optimization/struct.BinPacking.html", + "complexity": "2^num_items" + }, { "name": "CircuitSAT", "variant": {}, @@ -313,8 +331,8 @@ ], "edges": [ { - "source": 0, - "target": 2, + "source": 2, + "target": 4, "overhead": [ { "field": "num_vars", @@ -328,8 +346,8 @@ "doc_path": "rules/circuit_ilp/index.html" }, { - "source": 0, - "target": 31, + "source": 2, + "target": 33, "overhead": [ { "field": "num_spins", @@ -343,8 +361,8 @@ "doc_path": "rules/circuit_spinglass/index.html" }, { - "source": 1, - "target": 0, + "source": 3, + "target": 2, "overhead": [ { "field": "num_variables", @@ -358,8 +376,8 @@ "doc_path": "rules/factoring_circuit/index.html" }, { - "source": 1, - "target": 2, + "source": 3, + "target": 4, "overhead": [ { "field": "num_vars", @@ -373,8 +391,8 @@ "doc_path": "rules/factoring_ilp/index.html" }, { - "source": 2, - "target": 28, + "source": 4, + "target": 30, "overhead": [ { "field": "num_vars", @@ -384,8 +402,8 @@ "doc_path": "rules/ilp_qubo/index.html" }, { - "source": 4, - "target": 7, + "source": 6, + "target": 9, "overhead": [ { "field": "num_vertices", @@ -399,8 +417,8 @@ "doc_path": "rules/kcoloring_casts/index.html" }, { - "source": 7, - "target": 2, + "source": 9, + "target": 4, "overhead": [ { "field": "num_vars", @@ -414,8 +432,8 @@ "doc_path": "rules/coloring_ilp/index.html" }, { - "source": 7, - "target": 28, + "source": 9, + "target": 30, "overhead": [ { "field": "num_vars", @@ -425,8 +443,8 @@ "doc_path": "rules/coloring_qubo/index.html" }, { - "source": 8, - "target": 10, + "source": 10, + "target": 12, "overhead": [ { "field": "num_vars", @@ -440,8 +458,8 @@ "doc_path": "rules/ksatisfiability_casts/index.html" }, { - "source": 8, - "target": 28, + "source": 10, + "target": 30, "overhead": [ { "field": "num_vars", @@ -451,8 +469,8 @@ "doc_path": "rules/ksatisfiability_qubo/index.html" }, { - "source": 8, - "target": 29, + "source": 10, + "target": 31, "overhead": [ { "field": "num_clauses", @@ -470,8 +488,8 @@ "doc_path": "rules/sat_ksat/index.html" }, { - "source": 9, - "target": 10, + "source": 11, + "target": 12, "overhead": [ { "field": "num_vars", @@ -485,8 +503,8 @@ "doc_path": "rules/ksatisfiability_casts/index.html" }, { - "source": 9, - "target": 28, + "source": 11, + "target": 30, "overhead": [ { "field": "num_vars", @@ -496,8 +514,8 @@ "doc_path": "rules/ksatisfiability_qubo/index.html" }, { - "source": 9, - "target": 29, + "source": 11, + "target": 31, "overhead": [ { "field": "num_clauses", @@ -515,8 +533,8 @@ "doc_path": "rules/sat_ksat/index.html" }, { - "source": 10, - "target": 29, + "source": 12, + "target": 31, "overhead": [ { "field": "num_clauses", @@ -534,8 +552,8 @@ "doc_path": "rules/sat_ksat/index.html" }, { - "source": 11, - "target": 31, + "source": 13, + "target": 33, "overhead": [ { "field": "num_spins", @@ -549,8 +567,8 @@ "doc_path": "rules/spinglass_maxcut/index.html" }, { - "source": 13, - "target": 2, + "source": 15, + "target": 4, "overhead": [ { "field": "num_vars", @@ -564,8 +582,8 @@ "doc_path": "rules/maximumclique_ilp/index.html" }, { - "source": 14, - "target": 15, + "source": 16, + "target": 17, "overhead": [ { "field": "num_vertices", @@ -579,8 +597,8 @@ "doc_path": "rules/maximumindependentset_casts/index.html" }, { - "source": 14, - "target": 19, + "source": 16, + "target": 21, "overhead": [ { "field": "num_vertices", @@ -594,8 +612,8 @@ "doc_path": "rules/maximumindependentset_casts/index.html" }, { - "source": 15, - "target": 20, + "source": 17, + "target": 22, "overhead": [ { "field": "num_vertices", @@ -609,8 +627,8 @@ "doc_path": "rules/maximumindependentset_casts/index.html" }, { - "source": 16, - "target": 14, + "source": 18, + "target": 16, "overhead": [ { "field": "num_vertices", @@ -624,8 +642,8 @@ "doc_path": "rules/maximumindependentset_gridgraph/index.html" }, { - "source": 16, - "target": 15, + "source": 18, + "target": 17, "overhead": [ { "field": "num_vertices", @@ -639,8 +657,8 @@ "doc_path": "rules/maximumindependentset_gridgraph/index.html" }, { - "source": 16, - "target": 17, + "source": 18, + "target": 19, "overhead": [ { "field": "num_vertices", @@ -654,8 +672,8 @@ "doc_path": "rules/maximumindependentset_casts/index.html" }, { - "source": 16, - "target": 18, + "source": 18, + "target": 20, "overhead": [ { "field": "num_vertices", @@ -669,8 +687,8 @@ "doc_path": "rules/maximumindependentset_triangular/index.html" }, { - "source": 16, - "target": 22, + "source": 18, + "target": 24, "overhead": [ { "field": "num_sets", @@ -684,8 +702,8 @@ "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" }, { - "source": 17, - "target": 2, + "source": 19, + "target": 4, "overhead": [ { "field": "num_vars", @@ -699,8 +717,8 @@ "doc_path": "rules/maximumindependentset_ilp/index.html" }, { - "source": 17, - "target": 24, + "source": 19, + "target": 26, "overhead": [ { "field": "num_sets", @@ -714,8 +732,8 @@ "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" }, { - "source": 17, - "target": 27, + "source": 19, + "target": 29, "overhead": [ { "field": "num_vertices", @@ -729,8 +747,8 @@ "doc_path": "rules/minimumvertexcover_maximumindependentset/index.html" }, { - "source": 17, - "target": 28, + "source": 19, + "target": 30, "overhead": [ { "field": "num_vars", @@ -740,8 +758,8 @@ "doc_path": "rules/maximumindependentset_qubo/index.html" }, { - "source": 18, - "target": 20, + "source": 20, + "target": 22, "overhead": [ { "field": "num_vertices", @@ -755,8 +773,8 @@ "doc_path": "rules/maximumindependentset_casts/index.html" }, { - "source": 19, - "target": 16, + "source": 21, + "target": 18, "overhead": [ { "field": "num_vertices", @@ -770,8 +788,8 @@ "doc_path": "rules/maximumindependentset_casts/index.html" }, { - "source": 19, - "target": 20, + "source": 21, + "target": 22, "overhead": [ { "field": "num_vertices", @@ -785,8 +803,8 @@ "doc_path": "rules/maximumindependentset_casts/index.html" }, { - "source": 20, - "target": 17, + "source": 22, + "target": 19, "overhead": [ { "field": "num_vertices", @@ -800,8 +818,8 @@ "doc_path": "rules/maximumindependentset_casts/index.html" }, { - "source": 21, - "target": 2, + "source": 23, + "target": 4, "overhead": [ { "field": "num_vars", @@ -815,8 +833,8 @@ "doc_path": "rules/maximummatching_ilp/index.html" }, { - "source": 21, - "target": 24, + "source": 23, + "target": 26, "overhead": [ { "field": "num_sets", @@ -830,8 +848,8 @@ "doc_path": "rules/maximummatching_maximumsetpacking/index.html" }, { - "source": 22, - "target": 16, + "source": 24, + "target": 18, "overhead": [ { "field": "num_vertices", @@ -845,8 +863,8 @@ "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" }, { - "source": 22, - "target": 24, + "source": 24, + "target": 26, "overhead": [ { "field": "num_sets", @@ -860,8 +878,8 @@ "doc_path": "rules/maximumsetpacking_casts/index.html" }, { - "source": 23, - "target": 28, + "source": 25, + "target": 30, "overhead": [ { "field": "num_vars", @@ -871,8 +889,8 @@ "doc_path": "rules/maximumsetpacking_qubo/index.html" }, { - "source": 24, - "target": 2, + "source": 26, + "target": 4, "overhead": [ { "field": "num_vars", @@ -886,8 +904,8 @@ "doc_path": "rules/maximumsetpacking_ilp/index.html" }, { - "source": 24, - "target": 17, + "source": 26, + "target": 19, "overhead": [ { "field": "num_vertices", @@ -901,8 +919,8 @@ "doc_path": "rules/maximumindependentset_maximumsetpacking/index.html" }, { - "source": 24, - "target": 23, + "source": 26, + "target": 25, "overhead": [ { "field": "num_sets", @@ -916,8 +934,8 @@ "doc_path": "rules/maximumsetpacking_casts/index.html" }, { - "source": 25, - "target": 2, + "source": 27, + "target": 4, "overhead": [ { "field": "num_vars", @@ -931,8 +949,8 @@ "doc_path": "rules/minimumdominatingset_ilp/index.html" }, { - "source": 26, - "target": 2, + "source": 28, + "target": 4, "overhead": [ { "field": "num_vars", @@ -946,8 +964,8 @@ "doc_path": "rules/minimumsetcovering_ilp/index.html" }, { - "source": 27, - "target": 2, + "source": 29, + "target": 4, "overhead": [ { "field": "num_vars", @@ -961,8 +979,8 @@ "doc_path": "rules/minimumvertexcover_ilp/index.html" }, { - "source": 27, - "target": 17, + "source": 29, + "target": 19, "overhead": [ { "field": "num_vertices", @@ -976,8 +994,8 @@ "doc_path": "rules/minimumvertexcover_maximumindependentset/index.html" }, { - "source": 27, - "target": 26, + "source": 29, + "target": 28, "overhead": [ { "field": "num_sets", @@ -991,8 +1009,8 @@ "doc_path": "rules/minimumvertexcover_minimumsetcovering/index.html" }, { - "source": 27, - "target": 28, + "source": 29, + "target": 30, "overhead": [ { "field": "num_vars", @@ -1002,8 +1020,8 @@ "doc_path": "rules/minimumvertexcover_qubo/index.html" }, { - "source": 28, - "target": 2, + "source": 30, + "target": 4, "overhead": [ { "field": "num_vars", @@ -1017,8 +1035,8 @@ "doc_path": "rules/qubo_ilp/index.html" }, { - "source": 28, - "target": 30, + "source": 30, + "target": 32, "overhead": [ { "field": "num_spins", @@ -1028,8 +1046,8 @@ "doc_path": "rules/spinglass_qubo/index.html" }, { - "source": 29, - "target": 0, + "source": 31, + "target": 2, "overhead": [ { "field": "num_variables", @@ -1043,8 +1061,8 @@ "doc_path": "rules/sat_circuitsat/index.html" }, { - "source": 29, - "target": 4, + "source": 31, + "target": 6, "overhead": [ { "field": "num_vertices", @@ -1058,8 +1076,8 @@ "doc_path": "rules/sat_coloring/index.html" }, { - "source": 29, - "target": 9, + "source": 31, + "target": 11, "overhead": [ { "field": "num_clauses", @@ -1073,8 +1091,8 @@ "doc_path": "rules/sat_ksat/index.html" }, { - "source": 29, - "target": 16, + "source": 31, + "target": 18, "overhead": [ { "field": "num_vertices", @@ -1088,8 +1106,8 @@ "doc_path": "rules/sat_maximumindependentset/index.html" }, { - "source": 29, - "target": 25, + "source": 31, + "target": 27, "overhead": [ { "field": "num_vertices", @@ -1103,8 +1121,8 @@ "doc_path": "rules/sat_minimumdominatingset/index.html" }, { - "source": 30, - "target": 28, + "source": 32, + "target": 30, "overhead": [ { "field": "num_vars", @@ -1114,8 +1132,8 @@ "doc_path": "rules/spinglass_qubo/index.html" }, { - "source": 31, - "target": 11, + "source": 33, + "target": 13, "overhead": [ { "field": "num_vertices", @@ -1129,8 +1147,8 @@ "doc_path": "rules/spinglass_maxcut/index.html" }, { - "source": 31, - "target": 30, + "source": 33, + "target": 32, "overhead": [ { "field": "num_spins", @@ -1144,8 +1162,8 @@ "doc_path": "rules/spinglass_casts/index.html" }, { - "source": 32, - "target": 2, + "source": 34, + "target": 4, "overhead": [ { "field": "num_vars", diff --git a/problemreductions-cli/src/cli.rs b/problemreductions-cli/src/cli.rs index 1de97c43..62060979 100644 --- a/problemreductions-cli/src/cli.rs +++ b/problemreductions-cli/src/cli.rs @@ -136,7 +136,7 @@ Examples: ExportGraph, /// Create a problem instance and save as JSON - Create(CreateArgs), + Create(Box), /// Evaluate a configuration against a problem instance JSON file Evaluate(EvaluateArgs), /// Reduce a problem instance to a target type diff --git a/problemreductions-cli/src/commands/graph.rs b/problemreductions-cli/src/commands/graph.rs index fc314d17..1c707b02 100644 --- a/problemreductions-cli/src/commands/graph.rs +++ b/problemreductions-cli/src/commands/graph.rs @@ -268,7 +268,7 @@ fn variant_to_slash( ) -> String { let diffs: Vec<&str> = variant .iter() - .filter(|(k, v)| default.get(*k).map_or(true, |dv| dv != *v)) + .filter(|(k, v)| default.get(*k) != Some(*v)) .map(|(_, v)| v.as_str()) .collect(); if diffs.is_empty() { diff --git a/problemreductions-cli/src/dispatch.rs b/problemreductions-cli/src/dispatch.rs index c5bb7540..43b238dd 100644 --- a/problemreductions-cli/src/dispatch.rs +++ b/problemreductions-cli/src/dispatch.rs @@ -1,5 +1,5 @@ use anyhow::{bail, Context, Result}; -use problemreductions::models::optimization::ILP; +use problemreductions::models::optimization::{BinPacking, ILP}; use problemreductions::prelude::*; use problemreductions::rules::{MinimizeSteps, ReductionGraph}; use problemreductions::solvers::{BruteForce, ILPSolver, Solver}; @@ -235,6 +235,10 @@ pub fn load_problem( "BicliqueCover" => deser_opt::(data), "BMF" => deser_opt::(data), "PaintShop" => deser_opt::(data), + "BinPacking" => match variant.get("weight").map(|s| s.as_str()) { + Some("f64") => deser_opt::>(data), + _ => deser_opt::>(data), + }, _ => bail!("{}", crate::problem_name::unknown_problem_error(&canonical)), } } @@ -286,6 +290,10 @@ pub fn serialize_any_problem( "BicliqueCover" => try_ser::(any), "BMF" => try_ser::(any), "PaintShop" => try_ser::(any), + "BinPacking" => match variant.get("weight").map(|s| s.as_str()) { + Some("f64") => try_ser::>(any), + _ => try_ser::>(any), + }, _ => bail!("{}", crate::problem_name::unknown_problem_error(&canonical)), } } diff --git a/problemreductions-cli/src/problem_name.rs b/problemreductions-cli/src/problem_name.rs index f7fa533b..05f3dec3 100644 --- a/problemreductions-cli/src/problem_name.rs +++ b/problemreductions-cli/src/problem_name.rs @@ -19,6 +19,7 @@ pub const ALIASES: &[(&str, &str)] = &[ ("3SAT", "KSatisfiability"), ("KSAT", "KSatisfiability"), ("TSP", "TravelingSalesman"), + ("BP", "BinPacking"), ]; /// Resolve a short alias to the canonical problem name. @@ -47,6 +48,7 @@ pub fn resolve_alias(input: &str) -> String { "paintshop" => "PaintShop".to_string(), "bmf" => "BMF".to_string(), "bicliquecover" => "BicliqueCover".to_string(), + "bp" | "binpacking" => "BinPacking".to_string(), _ => input.to_string(), // pass-through for exact names } } diff --git a/src/models/mod.rs b/src/models/mod.rs index 2b1bb93e..1f753b73 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -13,7 +13,7 @@ pub use graph::{ KColoring, MaxCut, MaximalIS, MaximumClique, MaximumIndependentSet, MaximumMatching, MinimumDominatingSet, MinimumVertexCover, TravelingSalesman, }; -pub use optimization::{SpinGlass, ILP, QUBO}; +pub use optimization::{BinPacking, SpinGlass, ILP, QUBO}; pub use satisfiability::{CNFClause, KSatisfiability, Satisfiability}; pub use set::{MaximumSetPacking, MinimumSetCovering}; pub use specialized::{BicliqueCover, CircuitSAT, Factoring, PaintShop, BMF}; diff --git a/src/models/optimization/bin_packing.rs b/src/models/optimization/bin_packing.rs new file mode 100644 index 00000000..1641ee77 --- /dev/null +++ b/src/models/optimization/bin_packing.rs @@ -0,0 +1,160 @@ +//! Bin Packing problem implementation. +//! +//! The Bin Packing problem asks for an assignment of items to bins +//! that minimizes the number of bins used while respecting capacity constraints. + +use crate::registry::{FieldInfo, ProblemSchemaEntry}; +use crate::traits::{OptimizationProblem, Problem}; +use crate::types::{Direction, SolutionSize, WeightElement}; +use serde::{Deserialize, Serialize}; + +inventory::submit! { + ProblemSchemaEntry { + name: "BinPacking", + module_path: module_path!(), + description: "Assign items to bins minimizing number of bins used, subject to capacity", + fields: &[ + FieldInfo { name: "sizes", type_name: "Vec", description: "Item sizes s_i for each item" }, + FieldInfo { name: "capacity", type_name: "W", description: "Bin capacity C" }, + ], + } +} + +/// The Bin Packing problem. +/// +/// Given `n` items with sizes `s_1, ..., s_n` and bin capacity `C`, +/// find an assignment of items to bins such that: +/// - For each bin `j`, the total size of items assigned to `j` does not exceed `C` +/// - The number of bins used is minimized +/// +/// # Representation +/// +/// Each item has a variable in `{0, ..., n-1}` representing its bin assignment. +/// The worst case uses `n` bins (one item per bin). +/// +/// # Type Parameters +/// +/// * `W` - The weight type for sizes and capacity (e.g., `i32`, `f64`) +/// +/// # Example +/// +/// ``` +/// use problemreductions::models::optimization::BinPacking; +/// use problemreductions::{Problem, Solver, BruteForce}; +/// +/// // 4 items with sizes [3, 3, 2, 2], capacity 5 +/// let problem = BinPacking::new(vec![3, 3, 2, 2], 5); +/// let solver = BruteForce::new(); +/// let solution = solver.find_best(&problem); +/// assert!(solution.is_some()); +/// ``` +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BinPacking { + /// Item sizes. + sizes: Vec, + /// Bin capacity. + capacity: W, +} + +impl BinPacking { + /// Create a Bin Packing problem from item sizes and capacity. + pub fn new(sizes: Vec, capacity: W) -> Self { + Self { sizes, capacity } + } + + /// Get the item sizes. + pub fn sizes(&self) -> &[W] { + &self.sizes + } + + /// Get the bin capacity. + pub fn capacity(&self) -> &W { + &self.capacity + } + + /// Get the number of items. + pub fn num_items(&self) -> usize { + self.sizes.len() + } +} + +impl Problem for BinPacking +where + W: WeightElement + crate::variant::VariantParam, + W::Sum: PartialOrd, +{ + const NAME: &'static str = "BinPacking"; + type Metric = SolutionSize; + + fn variant() -> Vec<(&'static str, &'static str)> { + crate::variant_params![W] + } + + fn dims(&self) -> Vec { + let n = self.sizes.len(); + vec![n; n] + } + + fn evaluate(&self, config: &[usize]) -> SolutionSize { + if !is_valid_packing(&self.sizes, &self.capacity, config) { + return SolutionSize::Invalid; + } + let num_bins = count_bins(config); + SolutionSize::Valid(num_bins as i32) + } +} + +impl OptimizationProblem for BinPacking +where + W: WeightElement + crate::variant::VariantParam, + W::Sum: PartialOrd, +{ + type Value = i32; + + fn direction(&self) -> Direction { + Direction::Minimize + } +} + +/// Check if a configuration is a valid bin packing (all bins within capacity). +fn is_valid_packing(sizes: &[W], capacity: &W, config: &[usize]) -> bool +where + W::Sum: PartialOrd, +{ + if config.len() != sizes.len() { + return false; + } + let n = sizes.len(); + // Check all bin indices are in range + if config.iter().any(|&b| b >= n) { + return false; + } + // Compute load per bin + let cap_sum = capacity.to_sum(); + let mut bin_load: Vec = vec![W::Sum::default(); n]; + for (i, &bin) in config.iter().enumerate() { + bin_load[bin] += sizes[i].to_sum(); + } + // Check capacity constraints + bin_load.iter().all(|load| *load <= cap_sum) +} + +/// Count the number of distinct bins used in a configuration. +fn count_bins(config: &[usize]) -> usize { + let mut used = vec![false; config.len()]; + for &bin in config { + if bin < used.len() { + used[bin] = true; + } + } + used.iter().filter(|&&u| u).count() +} + +crate::declare_variants! { + BinPacking => "2^num_items", + BinPacking => "2^num_items", +} + +#[cfg(test)] +#[path = "../../unit_tests/models/optimization/bin_packing.rs"] +mod tests; diff --git a/src/models/optimization/mod.rs b/src/models/optimization/mod.rs index cbee429f..6e86fc48 100644 --- a/src/models/optimization/mod.rs +++ b/src/models/optimization/mod.rs @@ -1,14 +1,17 @@ //! Optimization problems. //! //! This module contains optimization problems: +//! - [`BinPacking`]: Bin Packing (minimize bins) //! - [`SpinGlass`]: Ising model Hamiltonian //! - [`QUBO`]: Quadratic Unconstrained Binary Optimization //! - [`ILP`]: Integer Linear Programming +mod bin_packing; mod ilp; mod qubo; mod spin_glass; +pub use bin_packing::BinPacking; pub use ilp::{Comparison, LinearConstraint, ObjectiveSense, VarBounds, ILP}; pub use qubo::QUBO; pub use spin_glass::SpinGlass; diff --git a/src/rules/unitdiskmapping/pathdecomposition.rs b/src/rules/unitdiskmapping/pathdecomposition.rs index cc930fde..6003241d 100644 --- a/src/rules/unitdiskmapping/pathdecomposition.rs +++ b/src/rules/unitdiskmapping/pathdecomposition.rs @@ -395,7 +395,10 @@ impl PathDecompositionMethod { /// Create a greedy method with specified number of restarts. pub fn greedy_with_restarts(nrepeat: usize) -> Self { - PathDecompositionMethod::Greedy { nrepeat } + // Zero restarts would skip greedy_decompose entirely and produce an empty layout. + PathDecompositionMethod::Greedy { + nrepeat: nrepeat.max(1), + } } } @@ -433,6 +436,8 @@ pub fn pathwidth( }; match method { PathDecompositionMethod::Greedy { nrepeat } => { + // Defend against direct enum construction with nrepeat = 0. + let nrepeat = nrepeat.max(1); let mut best: Option = None; for _ in 0..nrepeat { let layout = greedy_decompose(num_vertices, edges); diff --git a/src/unit_tests/models/optimization/bin_packing.rs b/src/unit_tests/models/optimization/bin_packing.rs new file mode 100644 index 00000000..079e62da --- /dev/null +++ b/src/unit_tests/models/optimization/bin_packing.rs @@ -0,0 +1,141 @@ +use super::*; +use crate::solvers::{BruteForce, Solver}; +use crate::traits::{OptimizationProblem, Problem}; +use crate::types::Direction; + +#[test] +fn test_bin_packing_creation() { + let problem = BinPacking::new(vec![6, 6, 5, 5, 4, 4], 10); + assert_eq!(problem.num_items(), 6); + assert_eq!(problem.sizes(), &[6, 6, 5, 5, 4, 4]); + assert_eq!(*problem.capacity(), 10); + assert_eq!(problem.dims().len(), 6); + // Each variable has domain {0, ..., 5} + assert!(problem.dims().iter().all(|&d| d == 6)); +} + +#[test] +fn test_bin_packing_direction() { + let problem = BinPacking::new(vec![1, 2, 3], 5); + assert_eq!(problem.direction(), Direction::Minimize); +} + +#[test] +fn test_bin_packing_evaluate_valid() { + // 6 items, capacity 10, sizes [6, 6, 5, 5, 4, 4] + // Assignment: (0, 1, 2, 2, 0, 1) -> 3 bins + // Bin 0: items 0,4 -> 6+4=10 OK + // Bin 1: items 1,5 -> 6+4=10 OK + // Bin 2: items 2,3 -> 5+5=10 OK + let problem = BinPacking::new(vec![6, 6, 5, 5, 4, 4], 10); + let result = problem.evaluate(&[0, 1, 2, 2, 0, 1]); + assert!(result.is_valid()); + assert_eq!(result.unwrap(), 3); +} + +#[test] +fn test_bin_packing_evaluate_invalid_overweight() { + // Bin 0: items 0,1 -> 6+6=12 > 10 + let problem = BinPacking::new(vec![6, 6, 5, 5, 4, 4], 10); + let result = problem.evaluate(&[0, 0, 1, 1, 2, 2]); + assert!(!result.is_valid()); +} + +#[test] +fn test_bin_packing_evaluate_single_bin() { + // All items fit in one bin + let problem = BinPacking::new(vec![1, 2, 3], 10); + let result = problem.evaluate(&[0, 0, 0]); + assert!(result.is_valid()); + assert_eq!(result.unwrap(), 1); +} + +#[test] +fn test_bin_packing_evaluate_all_separate() { + // Each item in its own bin + let problem = BinPacking::new(vec![3, 3, 3], 5); + let result = problem.evaluate(&[0, 1, 2]); + assert!(result.is_valid()); + assert_eq!(result.unwrap(), 3); +} + +#[test] +fn test_bin_packing_problem_name() { + assert_eq!( as Problem>::NAME, "BinPacking"); +} + +#[test] +fn test_bin_packing_brute_force_solver() { + // 6 items, capacity 10, sizes [6, 6, 5, 5, 4, 4] + // Optimal: 3 bins (lower bound ceil(30/10) = 3) + let problem = BinPacking::new(vec![6, 6, 5, 5, 4, 4], 10); + let solver = BruteForce::new(); + let solution = solver.find_best(&problem).expect("should find a solution"); + let metric = problem.evaluate(&solution); + assert!(metric.is_valid()); + assert_eq!(metric.unwrap(), 3); +} + +#[test] +fn test_bin_packing_brute_force_small() { + // 3 items [3, 3, 4], capacity 7 + // Optimal: 2 bins (e.g., {3,4} + {3}) + let problem = BinPacking::new(vec![3, 3, 4], 7); + let solver = BruteForce::new(); + let solution = solver.find_best(&problem).expect("should find a solution"); + let metric = problem.evaluate(&solution); + assert!(metric.is_valid()); + assert_eq!(metric.unwrap(), 2); +} + +#[test] +fn test_bin_packing_empty_items() { + let problem = BinPacking::new(Vec::::new(), 10); + assert_eq!(problem.num_items(), 0); + assert_eq!(problem.dims(), Vec::::new()); + let result = problem.evaluate(&[]); + assert!(result.is_valid()); + assert_eq!(result.unwrap(), 0); +} + +#[test] +fn test_bin_packing_wrong_config_length() { + let problem = BinPacking::new(vec![3, 3, 4], 7); + assert!(!problem.evaluate(&[0, 1]).is_valid()); + assert!(!problem.evaluate(&[0, 1, 2, 3]).is_valid()); +} + +#[test] +fn test_bin_packing_out_of_range_bin() { + let problem = BinPacking::new(vec![3, 3, 4], 7); + // Bin index 3 is out of range for 3 items (valid range 0..3) + assert!(!problem.evaluate(&[0, 1, 3]).is_valid()); +} + +#[test] +fn test_bin_packing_f64() { + let problem = BinPacking::new(vec![2.5, 3.5, 4.0], 7.0); + // All fit in one bin: 2.5 + 3.5 + 4.0 = 10.0 > 7.0 + assert!(!problem.evaluate(&[0, 0, 0]).is_valid()); + // Two bins: {2.5, 3.5} = 6.0, {4.0} = 4.0 + let result = problem.evaluate(&[0, 0, 1]); + assert!(result.is_valid()); + assert_eq!(result.unwrap(), 2); +} + +#[test] +fn test_bin_packing_variant() { + let v = as Problem>::variant(); + assert_eq!(v, vec![("weight", "i32")]); + let v64 = as Problem>::variant(); + assert_eq!(v64, vec![("weight", "f64")]); +} + +#[test] +fn test_bin_packing_serialization() { + let problem = BinPacking::new(vec![6, 6, 5, 5, 4, 4], 10); + let json = serde_json::to_value(&problem).unwrap(); + let restored: BinPacking = serde_json::from_value(json).unwrap(); + assert_eq!(restored.sizes(), problem.sizes()); + assert_eq!(restored.capacity(), problem.capacity()); +} diff --git a/src/unit_tests/rules/unitdiskmapping/pathdecomposition.rs b/src/unit_tests/rules/unitdiskmapping/pathdecomposition.rs index af282a73..46e8ba16 100644 --- a/src/unit_tests/rules/unitdiskmapping/pathdecomposition.rs +++ b/src/unit_tests/rules/unitdiskmapping/pathdecomposition.rs @@ -103,6 +103,26 @@ fn test_pathwidth_greedy() { assert_eq!(layout.vsep(), 1); } +#[test] +fn test_greedy_with_restarts_zero_clamps_to_one() { + assert!(matches!( + PathDecompositionMethod::greedy_with_restarts(0), + PathDecompositionMethod::Greedy { nrepeat: 1 } + )); +} + +#[test] +fn test_pathwidth_greedy_zero_restarts_produces_complete_layout() { + let n = 5; + let edges: Vec<(usize, usize)> = (0..n - 1).map(|i| (i, i + 1)).collect(); + let layout = pathwidth(n, &edges, PathDecompositionMethod::Greedy { nrepeat: 0 }); + + assert_eq!(layout.vertices.len(), n); + assert_eq!(layout.vsep(), 1); + let verified = verify_vsep(n, &edges, &layout.vertices); + assert_eq!(verified, layout.vsep()); +} + #[test] fn test_pathwidth_minhthi() { let edges = vec![(0, 1), (1, 2)];