diff --git a/.gitignore b/.gitignore index 756e66cc..5f05ff76 100644 --- a/.gitignore +++ b/.gitignore @@ -71,3 +71,6 @@ tarpaulin-report.html # Reference packages (downloaded for comparison) pkgref/ + +# Generated example outputs +docs/paper/examples/ diff --git a/Makefile b/Makefile index 47fee9fe..d9296090 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ # Makefile for problemreductions -.PHONY: help build test fmt clippy doc mdbook paper clean coverage rust-export compare qubo-testdata +.PHONY: help build test fmt clippy doc mdbook paper examples clean coverage rust-export compare qubo-testdata # Default target help: @@ -18,6 +18,7 @@ help: @echo " check - Quick check (fmt + clippy + test)" @echo " rust-export - Generate Rust mapping JSON exports" @echo " compare - Generate and compare Rust mapping exports" + @echo " examples - Generate example JSON for paper" @echo " qubo-testdata - Regenerate QUBO test data (requires uv)" # Build the project @@ -48,8 +49,18 @@ doc: mdbook: mdbook serve docs --open -# Build Typst paper -paper: +# Generate all example JSON files for the paper +REDUCTION_EXAMPLES := $(patsubst examples/%.rs,%,$(wildcard examples/reduction_*.rs)) +examples: + @mkdir -p docs/paper/examples + @for example in $(REDUCTION_EXAMPLES); do \ + echo "Running $$example..."; \ + cargo run --all-features --example $$example || exit 1; \ + done + cargo run --all-features --example export_petersen_mapping + +# Build Typst paper (generates examples first) +paper: examples cargo run --example export_graph cd docs/paper && typst compile reductions.typ reductions.pdf diff --git a/docs/paper/reduction_graph.json b/docs/paper/reduction_graph.json index 399dcbab..3a1cebab 100644 --- a/docs/paper/reduction_graph.json +++ b/docs/paper/reduction_graph.json @@ -418,37 +418,37 @@ }, { "source": { - "name": "Satisfiability", + "name": "MaxCut", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "DominatingSet", + "name": "SpinGlass", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": true }, { "source": { - "name": "Satisfiability", + "name": "QUBO", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" } }, "target": { - "name": "IndependentSet", + "name": "SpinGlass", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "f64" } }, - "bidirectional": false + "bidirectional": true }, { "source": { @@ -459,10 +459,10 @@ } }, "target": { - "name": "KColoring", + "name": "DominatingSet", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" } }, "bidirectional": false @@ -476,58 +476,58 @@ } }, "target": { - "name": "KSatisfiability", + "name": "IndependentSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, - "bidirectional": true + "bidirectional": false }, { "source": { - "name": "SetPacking", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "IndependentSet", + "name": "KColoring", "variant": { "graph": "SimpleGraph", - "weight": "Unweighted" + "weight": "i32" } }, - "bidirectional": true + "bidirectional": false }, { "source": { - "name": "SetPacking", + "name": "Satisfiability", "variant": { "graph": "SimpleGraph", - "weight": "i32" + "weight": "Unweighted" } }, "target": { - "name": "QUBO", + "name": "KSatisfiability", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "Unweighted" } }, - "bidirectional": false + "bidirectional": true }, { "source": { - "name": "SpinGlass", + "name": "SetPacking", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" } }, "target": { - "name": "MaxCut", + "name": "IndependentSet", "variant": { "graph": "SimpleGraph", "weight": "Unweighted" @@ -537,10 +537,10 @@ }, { "source": { - "name": "SpinGlass", + "name": "SetPacking", "variant": { "graph": "SimpleGraph", - "weight": "f64" + "weight": "i32" } }, "target": { @@ -550,7 +550,7 @@ "weight": "f64" } }, - "bidirectional": true + "bidirectional": false }, { "source": { diff --git a/docs/paper/reductions.typ b/docs/paper/reductions.typ index 3e86717a..0cfc2a90 100644 --- a/docs/paper/reductions.typ +++ b/docs/paper/reductions.typ @@ -10,13 +10,52 @@ #show link: set text(blue) -// Table of contents -#outline(title: "Contents", indent: 1.5em, depth: 2) - // Set up theorem environments with ctheorems #show: thmrules.with(qed-symbol: $square$) -#let theorem = thmplain("theorem", "Theorem").with(numbering: none) +// === Example JSON helpers === +// Load example JSON files generated by `make examples`. +// Unified schema: { source: { problem, variant, instance }, target: { ... }, overhead: [...] } +#let load-example(name) = json("examples/" + name + ".json") + +// Load result JSON: { solutions: [{ source_config, target_config }, ...] } +#let load-results(name) = json("examples/" + name + ".result.json") + +// Extract primary variable count from an instance dict. +#let instance-vars(inst) = { + if "num_variables" in inst { inst.num_variables } + else if "num_vertices" in inst { inst.num_vertices } + else if "num_vars" in inst { inst.num_vars } + else if "num_sets" in inst { inst.num_sets } + else if "num_spins" in inst { inst.num_spins } + else if "num_gates" in inst { inst.num_gates } + else if "num_bits_first" in inst and "num_bits_second" in inst { inst.num_bits_first + inst.num_bits_second } + else { 0 } +} + +// Render a concrete example box from JSON data (unified schema) +#let reduction-example(data, caption: none, body) = { + let src-vars = instance-vars(data.source.instance) + let tgt-vars = instance-vars(data.target.instance) + block( + width: 100%, + inset: (x: 1em, y: 0.8em), + fill: rgb("#f0f7ff"), + stroke: (left: 2pt + rgb("#4a86e8")), + )[ + #if caption != none { + text(weight: "bold")[Concrete Example: #caption] + parbreak() + } + *Source:* #data.source.problem with #src-vars variables + #h(1em) + *Target:* #data.target.problem with #tgt-vars variables \ + *Overhead:* #if src-vars > 0 and tgt-vars > 0 [#calc.round(tgt-vars / src-vars, digits: 1)x variable growth] else [—] + #if body != none { parbreak(); body } + ] +} + +#let theorem = thmplain("theorem", "Theorem", base_level: 1) #let proof = thmproof("proof", "Proof") #let definition = thmbox( "definition", @@ -40,7 +79,11 @@ *Abstract.* We present formal definitions for computational problems and polynomial-time reductions implemented in the `problemreductions` library. For each reduction, we state theorems with constructive proofs that preserve solution structure. ] -#v(1em) + +// Table of contents +#outline(title: "Contents", indent: 1.5em, depth: 2) + +#pagebreak() = Introduction @@ -64,137 +107,99 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Independent Set (IS)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ maximizing $sum_(v in S) w(v)$ such that no two vertices in $S$ are adjacent: $forall u, v in S: (u, v) in.not E$. - _Reduces to:_ Set Packing (@def:set-packing), QUBO (@def:qubo). - - _Reduces from:_ Vertex Cover (@def:vertex-cover), SAT (@def:satisfiability), Set Packing (@def:set-packing). + _Implemented reductions:_ IS→VC (@thm:is-to-vc), IS→SetPacking (@thm:is-to-setpacking), IS→QUBO (@thm:is-to-qubo), IS→ILP (@thm:is-to-ilp), IS→GridGraph IS (@thm:is-to-gridgraph), VC→IS (@thm:is-to-vc), SAT→IS (@thm:sat-to-is). ```rust pub struct IndependentSet { - graph: UnGraph<(), ()>, // The underlying graph - weights: Vec, // Weights for each vertex - } - - impl Problem for IndependentSet { - const NAME: &'static str = "IndependentSet"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + graph: UnGraph<(), ()>, + weights: Vec, } ``` + + Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices. ] #definition("Vertex Cover (VC)")[ Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ such that every edge has at least one endpoint in $S$: $forall (u, v) in E: u in S or v in S$. - _Reduces to:_ Independent Set (@def:independent-set), Set Covering (@def:set-covering), QUBO (@def:qubo). - - _Reduces from:_ Independent Set (@def:independent-set). + _Implemented reductions:_ VC→IS (@thm:is-to-vc), VC→SetCovering (@thm:vc-to-setcovering), VC→QUBO (@thm:vc-to-qubo), VC→ILP (@thm:vc-to-ilp), IS→VC (@thm:is-to-vc). ```rust pub struct VertexCovering { - graph: UnGraph<(), ()>, // The underlying graph - weights: Vec, // Weights for each vertex - } - - impl Problem for VertexCovering { - const NAME: &'static str = "VertexCovering"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + graph: UnGraph<(), ()>, + weights: Vec, } ``` + + Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices. ] #definition("Max-Cut")[ Given $G = (V, E)$ with weights $w: E -> RR$, find partition $(S, overline(S))$ maximizing $sum_((u,v) in E: u in S, v in overline(S)) w(u, v)$. - _Reduces to:_ Spin Glass (@def:spin-glass). - - _Reduces from:_ Spin Glass (@def:spin-glass). + _Implemented reductions:_ MaxCut→SpinGlass (@thm:spinglass-maxcut), SpinGlass→MaxCut (@thm:spinglass-maxcut). ```rust pub struct MaxCut { - graph: UnGraph<(), W>, // Weighted graph (edge weights) - } - - impl Problem for MaxCut { - const NAME: &'static str = "MaxCut"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + graph: UnGraph<(), W>, } ``` + + Where `graph` represents $G = (V, E)$ with edge weights $w: E -> RR$ stored in graph edge data. The solution is a partition $(S, overline(S))$ represented as a binary assignment `Vec` where 0/1 indicates partition membership. ] #definition("Graph Coloring")[ Given $G = (V, E)$ and $k$ colors, find $c: V -> {1, ..., k}$ minimizing $|{(u, v) in E : c(u) = c(v)}|$. - _Reduces to:_ ILP (@def:ilp), QUBO (@def:qubo). - - _Reduces from:_ SAT (@def:satisfiability). + _Implemented reductions:_ Coloring→ILP (@thm:coloring-to-ilp), Coloring→QUBO (@thm:coloring-to-qubo), SAT→Coloring (@thm:sat-to-coloring). ```rust pub struct Coloring { - num_colors: usize, // Number of available colors (K) - graph: UnGraph<(), ()>, // The underlying graph - } - - impl Problem for Coloring { - const NAME: &'static str = "Coloring"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "Unweighted")] - } - // ... + num_colors: usize, + graph: UnGraph<(), ()>, } ``` + + Where `num_colors` is the number of colors $k$, and `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$. The solution is a color assignment $c: V -> {0, ..., k-1}$ represented as a `Vec` indexed by vertex. ] #definition("Dominating Set")[ Given $G = (V, E)$ with weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ s.t. $forall v in V: v in S or exists u in S: (u, v) in E$. - _Reduces from:_ SAT (@def:satisfiability). + _Implemented reductions:_ DominatingSet→ILP (@thm:dominatingset-to-ilp), SAT→DominatingSet (@thm:sat-to-dominatingset). ```rust pub struct DominatingSet { - graph: UnGraph<(), ()>, // The underlying graph - weights: Vec, // Weights for each vertex - } - - impl Problem for DominatingSet { - const NAME: &'static str = "DominatingSet"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + graph: UnGraph<(), ()>, + weights: Vec, } ``` + + Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices. ] #definition("Matching")[ Given $G = (V, E)$ with weights $w: E -> RR$, find $M subset.eq E$ maximizing $sum_(e in M) w(e)$ s.t. $forall e_1, e_2 in M: e_1 inter e_2 = emptyset$. - _Reduces to:_ Set Packing (@def:set-packing). + _Implemented reductions:_ Matching→SetPacking (@thm:matching-to-setpacking), Matching→ILP (@thm:matching-to-ilp). ```rust pub struct Matching { - num_vertices: usize, // Number of vertices - graph: UnGraph<(), W>, // Weighted graph - edge_weights: Vec, // Weights for each edge - } - - impl Problem for Matching { - const NAME: &'static str = "Matching"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + num_vertices: usize, + graph: UnGraph<(), W>, + edge_weights: Vec, } ``` + + Where `num_vertices` is $|V|$, `graph` represents $G = (V, E)$ with edge weights, and `edge_weights` stores weights $w: E -> RR$ indexed by edge index. The solution is a subset $M subset.eq E$ represented as a `Vec` of edge indices. ] +#definition("Clique")[ + Given a graph $G = (V, E)$ and an integer $k$, the *Clique* problem asks whether there exists a subset $K subset.eq V$ of size at least $k$ such that every pair of distinct vertices in $K$ is adjacent, i.e., $(u, v) in E$ for all distinct $u, v in K$. + + _Implemented reductions:_ Clique→ILP (@thm:clique-to-ilp). +] + #definition("Unit Disk Graph (Grid Graph)")[ A graph $G = (V, E)$ where vertices $V$ are points on a 2D lattice and $(u, v) in E$ iff the Euclidean distance $d(u, v) <= r$ for some radius $r$. A _King's subgraph_ uses the King's graph lattice (8-connectivity square grid) with $r approx 1.5$. ] @@ -204,46 +209,32 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Set Packing")[ Given universe $U$, collection $cal(S) = {S_1, ..., S_m}$ with $S_i subset.eq U$, weights $w: cal(S) -> RR$, find $cal(P) subset.eq cal(S)$ maximizing $sum_(S in cal(P)) w(S)$ s.t. $forall S_i, S_j in cal(P): S_i inter S_j = emptyset$. - _Reduces to:_ Independent Set (@def:independent-set), QUBO (@def:qubo). - - _Reduces from:_ Independent Set (@def:independent-set), Matching (@def:matching). + _Implemented reductions:_ IS→SetPacking (@thm:is-to-setpacking), SetPacking→QUBO (@thm:setpacking-to-qubo), SetPacking→ILP (@thm:setpacking-to-ilp), Matching→SetPacking (@thm:matching-to-setpacking). ```rust pub struct SetPacking { - sets: Vec>, // Collection of sets - weights: Vec, // Weights for each set - } - - impl Problem for SetPacking { - const NAME: &'static str = "SetPacking"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + sets: Vec>, + weights: Vec, } ``` + + Where `sets` represents the collection $cal(S) = {S_1, ..., S_m}$ where each `Vec` contains universe element indices, and `weights` stores set weights $w: cal(S) -> RR$ indexed by set index. The solution is a subset $cal(P) subset.eq cal(S)$ represented as a `Vec` of set indices. ] #definition("Set Covering")[ Given universe $U$, collection $cal(S)$ with weights $w: cal(S) -> RR$, find $cal(C) subset.eq cal(S)$ minimizing $sum_(S in cal(C)) w(S)$ s.t. $union.big_(S in cal(C)) S = U$. - _Reduces from:_ Vertex Cover (@def:vertex-cover). + _Implemented reductions:_ SetCovering→ILP (@thm:setcovering-to-ilp), VC→SetCovering (@thm:vc-to-setcovering). ```rust pub struct SetCovering { - universe_size: usize, // Size of the universe - sets: Vec>, // Collection of sets - weights: Vec, // Weights for each set - } - - impl Problem for SetCovering { - const NAME: &'static str = "SetCovering"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + universe_size: usize, + sets: Vec>, + weights: Vec, } ``` + + Where `universe_size` is $|U|$, `sets` represents the collection $cal(S)$ where each `Vec` contains universe element indices, and `weights` stores set weights $w: cal(S) -> RR$ indexed by set index. The solution is a subset $cal(C) subset.eq cal(S)$ represented as a `Vec` of set indices. ] == Optimization Problems @@ -251,81 +242,57 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("Spin Glass (Ising Model)")[ Given $n$ spin variables $s_i in {-1, +1}$, pairwise couplings $J_(i j) in RR$, and external fields $h_i in RR$, minimize the Hamiltonian (energy function): $H(bold(s)) = -sum_((i,j)) J_(i j) s_i s_j - sum_i h_i s_i$. - _Reduces to:_ Max-Cut (@def:max-cut), QUBO (@def:qubo). - - _Reduces from:_ Circuit-SAT (@def:circuit-sat), Max-Cut (@def:max-cut), QUBO (@def:qubo). + _Implemented reductions:_ SpinGlass→MaxCut (@thm:spinglass-maxcut), SpinGlass→QUBO (@thm:spinglass-qubo), Circuit→SpinGlass (@thm:circuit-to-spinglass), MaxCut→SpinGlass (@thm:spinglass-maxcut), QUBO→SpinGlass (@thm:spinglass-qubo). ```rust pub struct SpinGlass { - num_spins: usize, // Number of spins - interactions: Vec<((usize, usize), W)>, // J_ij couplings - fields: Vec, // h_i on-site fields - } - - impl Problem for SpinGlass { - const NAME: &'static str = "SpinGlass"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + num_spins: usize, + interactions: Vec<((usize, usize), W)>, + fields: Vec, } ``` + + Where `num_spins` is $n$, `interactions` stores pairwise couplings $J_(i j)$ as `Vec<((usize, usize), W)>`, and `fields` stores external fields $h_i$ as `Vec` indexed by spin. The solution is a spin assignment $bold(s) in {-1, +1}^n$ encoded as `Vec` where 0 maps to $s=-1$ and 1 maps to $s=+1$. ] #definition("QUBO")[ Given $n$ binary variables $x_i in {0, 1}$, upper-triangular matrix $Q in RR^(n times n)$, minimize $f(bold(x)) = sum_(i=1)^n Q_(i i) x_i + sum_(i < j) Q_(i j) x_i x_j$ (using $x_i^2 = x_i$ for binary variables). - _Reduces to:_ Spin Glass (@def:spin-glass). - - _Reduces from:_ Spin Glass (@def:spin-glass), Independent Set (@def:independent-set), Vertex Cover (@def:vertex-cover), Graph Coloring (@def:coloring), Set Packing (@def:set-packing), $k$-SAT (@def:k-sat), ILP (@def:ilp). + _Implemented reductions:_ QUBO→SpinGlass (@thm:spinglass-qubo), IS→QUBO (@thm:is-to-qubo), VC→QUBO (@thm:vc-to-qubo), Coloring→QUBO (@thm:coloring-to-qubo), SetPacking→QUBO (@thm:setpacking-to-qubo), kSAT→QUBO (@thm:ksat-to-qubo), ILP→QUBO (@thm:ilp-to-qubo), SpinGlass→QUBO (@thm:spinglass-qubo). ```rust pub struct QUBO { - num_vars: usize, // Number of variables - matrix: Vec>, // Q matrix (upper triangular) - } - - impl Problem for QUBO { - const NAME: &'static str = "QUBO"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + num_vars: usize, + matrix: Vec>, } ``` + + Where `num_vars` is $n$, and `matrix` stores the upper-triangular $Q in RR^(n times n)$ as `Vec>` where `matrix[i][j]` ($i <= j$) stores $Q_(i j)$. The solution is a binary assignment $bold(x) in {0, 1}^n$ represented as `Vec`. ] #definition("Integer Linear Programming (ILP)")[ Given $n$ integer variables $bold(x) in ZZ^n$, constraint matrix $A in RR^(m times n)$, bounds $bold(b) in RR^m$, and objective $bold(c) in RR^n$, find $bold(x)$ minimizing $bold(c)^top bold(x)$ subject to $A bold(x) <= bold(b)$ and variable bounds. - _Reduces to:_ QUBO (@def:qubo). - - _Reduces from:_ Graph Coloring (@def:coloring), Factoring (@def:factoring). + _Implemented reductions:_ ILP→QUBO (@thm:ilp-to-qubo), Coloring→ILP (@thm:coloring-to-ilp), Factoring→ILP (@thm:factoring-to-ilp), IS→ILP (@thm:is-to-ilp), VC→ILP (@thm:vc-to-ilp), Matching→ILP (@thm:matching-to-ilp), SetPacking→ILP (@thm:setpacking-to-ilp), SetCovering→ILP (@thm:setcovering-to-ilp), DominatingSet→ILP (@thm:dominatingset-to-ilp), Clique→ILP (@thm:clique-to-ilp). ```rust pub struct ILP { - num_vars: usize, // Number of variables - bounds: Vec, // Bounds per variable - constraints: Vec, // Linear constraints - objective: Vec<(usize, f64)>, // Sparse objective - sense: ObjectiveSense, // Maximize or Minimize + num_vars: usize, + bounds: Vec, + constraints: Vec, + objective: Vec<(usize, f64)>, + sense: ObjectiveSense, } pub struct VarBounds { lower: Option, upper: Option } pub struct LinearConstraint { - terms: Vec<(usize, f64)>, // (var_index, coefficient) - cmp: Comparison, // Le, Ge, or Eq + terms: Vec<(usize, f64)>, + cmp: Comparison, rhs: f64, } - - impl Problem for ILP { - const NAME: &'static str = "ILP"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "f64")] - } - // ... - } ``` + + Where `num_vars` is $n$, `bounds` stores per-variable bounds $x_i in [l_i, u_i]$ as `Vec`, `constraints` encodes $A bold(x) <= bold(b)$ as `Vec`, `objective` is the sparse objective $bold(c)$ as `Vec<(usize, f64)>`, and `sense` specifies maximize or minimize. The solution is $bold(x) in ZZ^n$ represented as `Vec`. ] == Satisfiability Problems @@ -333,103 +300,69 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #definition("SAT")[ Given a CNF formula $phi = and.big_(j=1)^m C_j$ with $m$ clauses over $n$ Boolean variables, where each clause $C_j = or.big_i ell_(j i)$ is a disjunction of literals, find an assignment $bold(x) in {0, 1}^n$ such that $phi(bold(x)) = 1$ (all clauses satisfied). - _Reduces to:_ Independent Set (@def:independent-set), Graph Coloring (@def:coloring), Dominating Set (@def:dominating-set), $k$-SAT (@def:k-sat). - - _Reduces from:_ $k$-SAT (@def:k-sat). + _Implemented reductions:_ SAT→IS (@thm:sat-to-is), SAT→Coloring (@thm:sat-to-coloring), SAT→DominatingSet (@thm:sat-to-dominatingset), SAT→kSAT (@thm:sat-ksat), kSAT→SAT (@thm:sat-ksat). ```rust pub struct Satisfiability { - num_vars: usize, // Number of variables - clauses: Vec, // Clauses in CNF - weights: Vec, // Weights per clause (MAX-SAT) + num_vars: usize, + clauses: Vec, + weights: Vec, } pub struct CNFClause { - literals: Vec, // Signed: +i for x_i, -i for NOT x_i - } - - impl Problem for Satisfiability { - const NAME: &'static str = "Satisfiability"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + literals: Vec, } ``` + + Where `num_vars` is $n$, `clauses` stores CNF clauses $C_j$ as `Vec`, and `weights` stores clause weights for MAX-SAT as `Vec`. Each `CNFClause` has `literals: Vec` where $+i$ denotes $x_i$ and $-i$ denotes $not x_i$ (1-indexed). The solution is an assignment $bold(x) in {0, 1}^n$ represented as `Vec`. ] #definition([$k$-SAT])[ SAT with exactly $k$ literals per clause. - _Reduces to:_ SAT (@def:satisfiability), QUBO (@def:qubo). - - _Reduces from:_ SAT (@def:satisfiability). + _Implemented reductions:_ kSAT→SAT (@thm:sat-ksat), kSAT→QUBO (@thm:ksat-to-qubo), SAT→kSAT (@thm:sat-ksat). ```rust pub struct KSatisfiability { - num_vars: usize, // Number of variables - clauses: Vec, // Each clause has exactly K literals - weights: Vec, // Weights per clause - } - - impl Problem for KSatisfiability { - const NAME: &'static str = "KSatisfiability"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + num_vars: usize, + clauses: Vec, + weights: Vec, } ``` + + Where `num_vars` is $n$, `clauses` stores clauses with exactly $k$ literals per clause as `Vec`, and `weights` stores clause weights as `Vec`. The solution is an assignment $bold(x) in {0, 1}^n$ represented as `Vec`. ] #definition("Circuit-SAT")[ Given a Boolean circuit $C$ composed of logic gates (AND, OR, NOT, XOR) with $n$ input variables, find an input assignment $bold(x) in {0,1}^n$ such that $C(bold(x)) = 1$. - _Reduces to:_ Spin Glass (@def:spin-glass). - - _Reduces from:_ Factoring (@def:factoring). + _Implemented reductions:_ Circuit→SpinGlass (@thm:circuit-to-spinglass), Factoring→Circuit (@thm:factoring-to-circuit). ```rust pub struct CircuitSAT { - circuit: Circuit, // The boolean circuit - variables: Vec, // Variable names in order - weights: Vec, // Weights per assignment - } - - pub struct Circuit { assignments: Vec } - pub struct Assignment { outputs: Vec, expr: BooleanExpr } - pub enum BooleanOp { Var(String), Const(bool), Not(..), And(..), Or(..), Xor(..) } - - impl Problem for CircuitSAT { - const NAME: &'static str = "CircuitSAT"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", short_type_name::())] - } - // ... + circuit: Circuit, + variables: Vec, + weights: Vec, } ``` + + Where `circuit` is the Boolean circuit of logic gates (AND, OR, NOT, XOR), `variables` stores input variable names as `Vec`, and `weights` stores assignment weights as `Vec`. The solution is an input assignment $bold(x) in {0,1}^n$ represented as `Vec`. ] #definition("Factoring")[ Given a composite integer $N$ and bit sizes $m, n$, find integers $p in [2, 2^m - 1]$ and $q in [2, 2^n - 1]$ such that $p times q = N$. Here $p$ has $m$ bits and $q$ has $n$ bits. - _Reduces to:_ Circuit-SAT (@def:circuit-sat), ILP (@def:ilp). + _Implemented reductions:_ Factoring→Circuit (@thm:factoring-to-circuit), Factoring→ILP (@thm:factoring-to-ilp). ```rust pub struct Factoring { - m: usize, // Bits for first factor - n: usize, // Bits for second factor - target: u64, // The number to factor - } - - impl Problem for Factoring { - const NAME: &'static str = "Factoring"; - fn variant() -> Vec<(&'static str, &'static str)> { - vec![("graph", "SimpleGraph"), ("weight", "i32")] - } - // ... + m: usize, + n: usize, + target: u64, } ``` + + Where `m` is the number of bits for the first factor $p$, `n` is the number of bits for the second factor $q$, and `target` is the composite $N$ to factor. The solution is bit assignments for $p$ and $q$ represented as `Vec`. ] = Reductions @@ -438,10 +371,10 @@ In all graph problems below, $G = (V, E)$ denotes an undirected graph with $|V| #theorem[ *(IS $arrow.l.r$ VC)* $S subset.eq V$ is independent iff $V backslash S$ is a vertex cover, with $|"IS"| + |"VC"| = |V|$. [_Problems:_ @def:independent-set, @def:vertex-cover.] -] +] #proof[ - ($arrow.r.double$) If $S$ is independent, for any $(u, v) in E$, at most one endpoint lies in $S$, so $V backslash S$ covers all edges. ($arrow.l.double$) If $C$ is a cover, for any $u, v in V backslash C$, $(u, v) in.not E$, so $V backslash C$ is independent. + ($arrow.r.double$) If $S$ is independent, for any $(u, v) in E$, at most one endpoint lies in $S$, so $V backslash S$ covers all edges. ($arrow.l.double$) If $C$ is a cover, for any $u, v in V backslash C$, $(u, v) in.not E$, so $V backslash C$ is independent. _Variable mapping:_ Given IS instance $(G, w)$, create VC instance $(G, w)$ with identical graph and weights. Solution extraction: for VC solution $C$, return $S = V backslash C$. The complement operation preserves optimality since $|S| + |C| = |V|$ is constant. ] ```rust @@ -456,12 +389,23 @@ let is_solution = result.extract_solution(&vc_solutions[0]); assert!(is_problem.solution_size(&is_solution).is_valid); ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_vc.rs")[`reduction_is_to_vc.rs`]. + +#let is_vc = load-example("is_to_vc") +#let is_vc_r = load-results("is_to_vc") +#let is_vc_sol = is_vc_r.solutions.at(0) +#reduction-example(is_vc, caption: [Path graph $P_4$: IS $arrow.l.r$ VC])[ + Source IS: $S = {#is_vc_sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #is_vc_sol.source_config.filter(x => x == 1).len()) #h(1em) + Target VC: $C = {#is_vc_sol.target_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)).join(", ")}$ (size #is_vc_sol.target_config.filter(x => x == 1).len()) \ + $|"IS"| + |"VC"| = #instance-vars(is_vc.source.instance) = |V|$ #sym.checkmark +] + #theorem[ *(IS $arrow.r$ Set Packing)* Construct $U = E$, $S_v = {e in E : v in e}$, $w(S_v) = w(v)$. Then $I$ is independent iff ${S_v : v in I}$ is a packing. [_Problems:_ @def:independent-set, @def:set-packing.] -] +] #proof[ - Independence implies disjoint incident edge sets; conversely, disjoint edge sets imply no shared edges. + Independence implies disjoint incident edge sets; conversely, disjoint edge sets imply no shared edges. _Variable mapping:_ Universe $U = E$ (edges), sets $S_v = {e in E : v in e}$ (edges incident to vertex $v$), weights $w(S_v) = w(v)$. Solution extraction: for packing ${S_v : v in P}$, return IS $= P$ (the vertices whose sets were packed). ] ```rust @@ -476,20 +420,34 @@ let is_solution = result.extract_solution(&sp_solutions[0]); assert!(is_problem.solution_size(&is_solution).is_valid); ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_setpacking.rs")[`reduction_is_to_setpacking.rs`]. + #theorem[ *(VC $arrow.r$ Set Covering)* Construct $U = {0, ..., |E|-1}$, $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Then $C$ is a cover iff ${S_v : v in C}$ covers $U$. [_Problems:_ @def:vertex-cover, @def:set-covering.] +] + +#proof[ + Each vertex's edge set becomes a subset; the cover condition (every edge covered) maps to the covering condition (every universe element in some selected set). _Variable mapping:_ Universe $U = {0, ..., |E|-1}$ (edge indices), $S_v = {i : e_i "incident to" v}$, $w(S_v) = w(v)$. Solution extraction: for covering ${S_v : v in C}$, return VC $= C$. ] +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_vc_to_setcovering.rs")[`reduction_vc_to_setcovering.rs`]. + #theorem[ *(Matching $arrow.r$ Set Packing)* Construct $U = V$, $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Then $M$ is a matching iff ${S_e : e in M}$ is a packing. [_Problems:_ @def:matching, @def:set-packing.] +] + +#proof[ + Each edge becomes a set of its endpoints; disjoint edges have disjoint endpoint sets. _Variable mapping:_ Universe $U = V$ (vertices), $S_e = {u, v}$ for $e = (u,v)$, $w(S_e) = w(e)$. Solution extraction: for packing ${S_e : e in P}$, return matching $= P$ (the edges whose endpoint sets were packed). ] +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_matching_to_setpacking.rs")[`reduction_matching_to_setpacking.rs`]. + #theorem[ *(Spin Glass $arrow.l.r$ QUBO)* The substitution $s_i = 2x_i - 1$ yields $H_"SG"(bold(s)) = H_"QUBO"(bold(x)) + "const"$. [_Problems:_ @def:spin-glass, @def:qubo.] -] +] #proof[ - Expanding $-sum_(i,j) J_(i j) (2x_i - 1)(2x_j - 1) - sum_i h_i (2x_i - 1)$ gives $Q_(i j) = -4J_(i j)$, $Q_(i i) = 2sum_j J_(i j) - 2h_i$. + Expanding $-sum_(i,j) J_(i j) (2x_i - 1)(2x_j - 1) - sum_i h_i (2x_i - 1)$ gives $Q_(i j) = -4J_(i j)$, $Q_(i i) = 2sum_j J_(i j) - 2h_i$. _Variable mapping:_ Spin $s_i in {-1, +1}$ maps to binary $x_i in {0, 1}$ via $s_i = 2x_i - 1$. Solution extraction: for QUBO solution $bold(x)$, return spins $s_i = 2x_i - 1$. The reverse maps $x_i = (s_i + 1)/2$. ] ```rust @@ -504,6 +462,11 @@ let sg_solution = result.extract_solution(&qubo_solutions[0]); assert_eq!(sg_solution.len(), 2); ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_spinglass_to_qubo.rs")[`reduction_spinglass_to_qubo.rs`]. + +#let sg_qubo = load-example("spinglass_to_qubo") +#reduction-example(sg_qubo, caption: [2-spin system with coupling $J_(01) = -1$, fields $h = (0.5, -0.5)$])[] + == Penalty-Method QUBO Reductions The _penalty method_ @glover2019 @lucas2014 converts a constrained optimization problem into an unconstrained QUBO by adding quadratic penalty terms. Given an objective $"obj"(bold(x))$ to minimize and constraints $g_k (bold(x)) = 0$, construct: @@ -512,7 +475,7 @@ where $P$ is a penalty weight large enough that any constraint violation costs m #theorem[ *(IS $arrow.r$ QUBO)* Given $G = (V, E)$ with weights $w$, construct upper-triangular $Q in RR^(n times n)$ with $Q_(i i) = -w_i$ and $Q_(i j) = P$ for $(i,j) in E$ ($i < j$), where $P = 1 + sum_i w_i$. Then minimizing $f(bold(x)) = sum_i Q_(i i) x_i + sum_(i #proof[ _Construction._ The IS objective is: maximize $sum_i w_i x_i$ subject to $x_i x_j = 0$ for $(i,j) in E$. Applying the penalty method (@sec:penalty-method): @@ -534,9 +497,34 @@ let is_solution = result.extract_solution(&solutions[0]); assert!(is.solution_size(&is_solution).is_valid); ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_qubo.rs")[`reduction_is_to_qubo.rs`]. + +#let is_qubo = load-example("is_to_qubo") +#let is_qubo_r = load-results("is_to_qubo") +#block( + width: 100%, + inset: (x: 1em, y: 0.8em), + fill: rgb("#f0f7ff"), + stroke: (left: 2pt + rgb("#4a86e8")), +)[ + #text(weight: "bold")[Concrete Example: IS on path $P_4$ to QUBO] + #parbreak() + *Source:* #is_qubo.source.problem with #is_qubo.source.instance.num_vertices vertices, edges $= {#is_qubo.source.instance.edges.map(e => $(#e.at(0), #e.at(1))$).join(", ")}$ \ + *QUBO matrix* ($Q in RR^(#is_qubo.target.instance.num_vars times #is_qubo.target.instance.num_vars)$): + $ Q = #math.mat(..is_qubo.target.instance.matrix.map(row => row.map(v => { + let r = calc.round(v, digits: 0) + [#r] + }))) $ + *Optimal IS* (size #is_qubo_r.solutions.at(0).source_config.filter(x => x == 1).len()): + #is_qubo_r.solutions.map(sol => { + let verts = sol.source_config.enumerate().filter(((i, x)) => x == 1).map(((i, x)) => str(i)) + $\{#verts.join(", ")\}$ + }).join(", ") +] + #theorem[ *(VC $arrow.r$ QUBO)* Given $G = (V, E)$ with weights $w$, construct upper-triangular $Q$ with $Q_(i i) = w_i - P dot "deg"(i)$ and $Q_(i j) = P$ for $(i,j) in E$ ($i < j$), where $P = 1 + sum_i w_i$ and $"deg"(i)$ is the degree of vertex $i$. [_Problems:_ @def:vertex-cover, @def:qubo.] -] +] #proof[ _Construction._ The VC objective is: minimize $sum_i w_i x_i$ subject to $x_i + x_j >= 1$ for $(i,j) in E$. Applying the penalty method (@sec:penalty-method), the constraint $x_i + x_j >= 1$ is violated iff $x_i = x_j = 0$, with penalty $(1 - x_i)(1 - x_j)$: @@ -557,9 +545,11 @@ let vc_solution = result.extract_solution(&solutions[0]); assert!(vc.solution_size(&vc_solution).is_valid); ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_vc_to_qubo.rs")[`reduction_vc_to_qubo.rs`]. + #theorem[ *(KColoring $arrow.r$ QUBO)* Given $G = (V, E)$ with $k$ colors, construct upper-triangular $Q in RR^(n k times n k)$ using one-hot encoding $x_(v,c) in {0,1}$ ($n k$ variables indexed by $v dot k + c$). [_Problems:_ @def:coloring, @def:qubo.] -] +] #proof[ _Construction._ Applying the penalty method (@sec:penalty-method), the QUBO objective combines a one-hot constraint penalty and an edge conflict penalty: @@ -588,9 +578,11 @@ let kc_solution = result.extract_solution(&solutions[0]); assert_eq!(solutions.len(), 6); // 3! valid 3-colorings of K3 ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_coloring_to_qubo.rs")[`reduction_coloring_to_qubo.rs`]. + #theorem[ *(SetPacking $arrow.r$ QUBO)* Equivalent to IS on the intersection graph: $Q_(i i) = -w_i$ and $Q_(i j) = P$ for overlapping sets $i, j$ ($i < j$), where $P = 1 + sum_i w_i$. [_Problems:_ @def:set-packing, @def:qubo.] -] +] #proof[ Two sets conflict iff they share an element. The intersection graph has sets as vertices and edges between conflicting pairs. Applying the penalty method (@sec:penalty-method) yields the same QUBO as IS on this graph: diagonal rewards selection, off-diagonal penalizes overlap. Correctness follows from the IS→QUBO proof. @@ -608,9 +600,11 @@ let sp_solution = result.extract_solution(&solutions[0]); assert!(sp.solution_size(&sp_solution).is_valid); ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_setpacking_to_qubo.rs")[`reduction_setpacking_to_qubo.rs`]. + #theorem[ *(2-SAT $arrow.r$ QUBO)* Given a Max-2-SAT instance with $m$ clauses over $n$ variables, construct upper-triangular $Q in RR^(n times n)$ where each clause $(ell_i or ell_j)$ contributes a penalty gadget encoding its unique falsifying assignment. [_Problems:_ @def:k-sat, @def:qubo.] -] +] #proof[ _Construction._ Applying the penalty method (@sec:penalty-method), each 2-literal clause has exactly one falsifying assignment (both literals false). The penalty for that assignment is a quadratic function of $x_i, x_j$: @@ -645,9 +639,11 @@ let sat_solution = result.extract_solution(&solutions[0]); assert!(ksat.solution_size(&sat_solution).is_valid); ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_ksatisfiability_to_qubo.rs")[`reduction_ksatisfiability_to_qubo.rs`]. + #theorem[ *(Binary ILP $arrow.r$ QUBO)* Given binary ILP: maximize $bold(c)^top bold(x)$ subject to $A bold(x) = bold(b)$, $bold(x) in {0,1}^n$, construct upper-triangular $Q = -"diag"(bold(c) + 2P bold(b)^top A) + P A^top A$ where $P = 1 + ||bold(c)||_1 + ||bold(b)||_1$. [_Problems:_ @def:ilp, @def:qubo.] -] +] #proof[ _Step 1: Normalize constraints._ Convert inequalities to equalities using slack variables: $bold(a)_k^top bold(x) <= b_k$ becomes $bold(a)_k^top bold(x) + sum_(s=0)^(S_k - 1) 2^s y_(k,s) = b_k$ where $S_k = ceil(log_2 (b_k + 1))$ slack bits. For $>=$ constraints, the slack has a negative sign. The extended system is $A' bold(x)' = bold(b)$ with $bold(x)' = (bold(x), bold(y)) in {0,1}^(n')$. For minimization, negate $bold(c)$ to convert to maximization. @@ -682,11 +678,13 @@ let ilp_solution = result.extract_solution(&solutions[0]); assert_eq!(ilp_solution, vec![1, 0, 1]); // obj = 4 ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_ilp_to_qubo.rs")[`reduction_ilp_to_qubo.rs`]. + == Non-Trivial Reductions #theorem[ *(SAT $arrow.r$ IS)* @karp1972 Given CNF $phi$ with $m$ clauses, construct graph $G$ such that $phi$ is satisfiable iff $G$ has an IS of size $m$. [_Problems:_ @def:satisfiability, @def:independent-set.] -] +] #proof[ _Construction._ For $phi = and.big_(j=1)^m C_j$ with $C_j = (ell_(j,1) or ... or ell_(j,k_j))$: @@ -700,9 +698,19 @@ assert_eq!(ilp_solution, vec![1, 0, 1]); // obj = 4 _Solution extraction._ For $v_(j,i) in S$ with literal $x_k$: set $x_k = 1$; for $overline(x_k)$: set $x_k = 0$. ] +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_sat_to_is.rs")[`reduction_sat_to_is.rs`]. + +#let sat_is = load-example("sat_to_is") +#let sat_is_r = load-results("sat_to_is") +#let sat_is_sol = sat_is_r.solutions.at(0) +#reduction-example(sat_is, caption: [$phi = (x_1 or x_2) and (not x_1 or x_3) and (x_2 or not x_3)$])[ + SAT assignment: $x_1=#sat_is_sol.source_config.at(0), x_2=#sat_is_sol.source_config.at(1), x_3=#sat_is_sol.source_config.at(2)$ #h(1em) + IS graph: #sat_is.target.instance.num_vertices vertices, #sat_is.target.instance.num_edges edges (one vertex per literal occurrence) +] + #theorem[ *(SAT $arrow.r$ 3-Coloring)* @garey1979 Given CNF $phi$, construct graph $G$ such that $phi$ is satisfiable iff $G$ is 3-colorable. [_Problems:_ @def:satisfiability, @def:coloring.] -] +] #proof[ _Construction._ (1) Base triangle: TRUE, FALSE, AUX vertices with all pairs connected. (2) Variable gadget for $x_i$: vertices $"pos"_i$, $"neg"_i$ connected to each other and to AUX. (3) Clause gadget: for $(ell_1 or ... or ell_k)$, apply OR-gadgets iteratively producing output $o$, then connect $o$ to FALSE and AUX. @@ -712,9 +720,11 @@ assert_eq!(ilp_solution, vec![1, 0, 1]); // obj = 4 _Solution extraction._ Set $x_i = 1$ iff $"color"("pos"_i) = "color"("TRUE")$. ] +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_sat_to_coloring.rs")[`reduction_sat_to_coloring.rs`]. + #theorem[ *(SAT $arrow.r$ Dominating Set)* @garey1979 Given CNF $phi$ with $n$ variables and $m$ clauses, $phi$ is satisfiable iff the constructed graph has a dominating set of size $n$. [_Problems:_ @def:satisfiability, @def:dominating-set.] -] +] #proof[ _Construction._ (1) Variable triangle for $x_i$: vertices $"pos"_i = 3i$, $"neg"_i = 3i+1$, $"dum"_i = 3i+2$ forming a triangle. (2) Clause vertex $c_j = 3n+j$ connected to $"pos"_i$ if $x_i in C_j$, to $"neg"_i$ if $overline(x_i) in C_j$. @@ -724,9 +734,11 @@ assert_eq!(ilp_solution, vec![1, 0, 1]); // obj = 4 _Solution extraction._ Set $x_i = 1$ if $"pos"_i$ selected; $x_i = 0$ if $"neg"_i$ selected. ] +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_sat_to_dominatingset.rs")[`reduction_sat_to_dominatingset.rs`]. + #theorem[ *(SAT $arrow.l.r$ $k$-SAT)* @cook1971 @garey1979 Any SAT formula converts to $k$-SAT ($k >= 3$) preserving satisfiability. [_Problems:_ @def:satisfiability, @def:k-sat.] -] +] #proof[ _Small clauses ($|C| < k$):_ Pad $(ell_1 or ... or ell_r)$ with auxiliary $y$: $(ell_1 or ... or ell_r or y or overline(y) or ...)$ to length $k$. @@ -737,9 +749,11 @@ assert_eq!(ilp_solution, vec![1, 0, 1]); // obj = 4 _Correctness._ Original clause true $arrow.l.r$ auxiliary chain can propagate truth through new clauses. ] +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_sat_to_ksat.rs")[`reduction_sat_to_ksat.rs`]. + #theorem[ *(CircuitSAT $arrow.r$ Spin Glass)* @whitfield2012 @lucas2014 Each gate maps to a gadget whose ground states encode valid I/O. [_Problems:_ @def:circuit-sat, @def:spin-glass.] -] +] #proof[ _Spin mapping:_ $sigma in {0,1} arrow.bar s = 2sigma - 1 in {-1, +1}$. @@ -761,9 +775,11 @@ assert_eq!(ilp_solution, vec![1, 0, 1]); // obj = 4 caption: [Ising gadgets for logic gates. Ground states match truth tables.] ) +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_circuit_to_spinglass.rs")[`reduction_circuit_to_spinglass.rs`]. + #theorem[ *(Factoring $arrow.r$ Circuit-SAT)* An array multiplier with output constrained to $N$ is satisfiable iff $N$ factors within bit bounds. _(Folklore; no canonical reference.)_ [_Problems:_ @def:factoring, @def:circuit-sat.] -] +] #proof[ _Construction._ Build $m times n$ array multiplier for $p times q$: @@ -777,9 +793,11 @@ assert_eq!(ilp_solution, vec![1, 0, 1]); // obj = 4 _Solution extraction._ $p = sum_i p_i 2^(i-1)$, $q = sum_j q_j 2^(j-1)$. ] +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_factoring_to_circuit.rs")[`reduction_factoring_to_circuit.rs`]. + #theorem[ *(Spin Glass $arrow.l.r$ Max-Cut)* @barahona1982 @lucas2014 Ground states of Ising models correspond to maximum cuts. [_Problems:_ @def:spin-glass, @def:max-cut.] -] +] #proof[ _MaxCut $arrow.r$ SpinGlass:_ Set $J_(i j) = w_(i j)$, $h_i = 0$. Maximizing cut equals minimizing $-sum J_(i j) s_i s_j$ since $s_i s_j = -1$ when $s_i != s_j$. @@ -801,9 +819,11 @@ let sg_solution = result.extract_solution(&maxcut_solutions[0]); assert_eq!(sg_solution.len(), 3); ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_spinglass_to_maxcut.rs")[`reduction_spinglass_to_maxcut.rs`]. + #theorem[ *(Coloring $arrow.r$ ILP)* The $k$-coloring problem reduces to binary ILP with $|V| dot k$ variables and $|V| + |E| dot k$ constraints. [_Problems:_ @def:coloring, @def:ilp.] -] +] #proof[ _Construction._ For graph $G = (V, E)$ with $k$ colors: @@ -819,9 +839,11 @@ assert_eq!(sg_solution.len(), 3); _Solution extraction._ For each vertex $v$, find $c$ with $x_(v,c) = 1$; assign color $c$ to $v$. ] +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_coloring_to_ilp.rs")[`reduction_coloring_to_ilp.rs`]. + #theorem[ *(Factoring $arrow.r$ ILP)* Integer factorization reduces to binary ILP using McCormick linearization with $O(m n)$ variables and constraints. [_Problems:_ @def:factoring, @def:ilp.] -] +] #proof[ _Construction._ For target $N$ with $m$-bit factor $p$ and $n$-bit factor $q$: @@ -866,11 +888,87 @@ let (p, q) = problem.read_factors(&extracted); assert_eq!(p * q, 15); // e.g., (3, 5) or (5, 3) ``` +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_factoring_to_ilp.rs")[`reduction_factoring_to_ilp.rs`]. + +== ILP Formulations + +The following reductions to Integer Linear Programming are straightforward formulations where problem constraints map directly to linear inequalities. + +#theorem[ + *(IS $arrow.r$ ILP)* The maximum-weight IS problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. [_Problems:_ @def:independent-set, @def:ilp.] +] + +#proof[ + _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v <= 1$ for each $(u, v) in E$. Objective: maximize $sum_v w_v x_v$. _Solution extraction:_ $S = {v : x_v = 1}$. +] + +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_ilp.rs")[`reduction_is_to_ilp.rs`]. + +#theorem[ + *(VC $arrow.r$ ILP)* The minimum-weight VC problem reduces to binary ILP with $|V|$ variables and $|E|$ constraints. [_Problems:_ @def:vertex-cover, @def:ilp.] +] + +#proof[ + _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v >= 1$ for each $(u, v) in E$. Objective: minimize $sum_v w_v x_v$. _Solution extraction:_ $C = {v : x_v = 1}$. +] + +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_vc_to_ilp.rs")[`reduction_vc_to_ilp.rs`]. + +#theorem[ + *(Matching $arrow.r$ ILP)* The maximum-weight matching reduces to binary ILP with $|E|$ variables and $|V|$ constraints. [_Problems:_ @def:matching, @def:ilp.] +] + +#proof[ + _Construction._ Variables: $x_e in {0, 1}$ for each $e in E$. Constraints: $sum_(e in.rev v) x_e <= 1$ for each $v in V$. Objective: maximize $sum_e w_e x_e$. _Solution extraction:_ $M = {e : x_e = 1}$. +] + +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_matching_to_ilp.rs")[`reduction_matching_to_ilp.rs`]. + +#theorem[ + *(SetPacking $arrow.r$ ILP)* Set packing reduces to binary ILP with $|cal(S)|$ variables and at most $binom(|cal(S)|, 2)$ constraints. [_Problems:_ @def:set-packing, @def:ilp.] +] + +#proof[ + _Construction._ Variables: $x_i in {0, 1}$ for each $S_i in cal(S)$. Constraints: $x_i + x_j <= 1$ for each overlapping pair $S_i, S_j in cal(S)$ with $S_i inter S_j != emptyset$. Objective: maximize $sum_i w_i x_i$. _Solution extraction:_ $cal(P) = {S_i : x_i = 1}$. +] + +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_setpacking_to_ilp.rs")[`reduction_setpacking_to_ilp.rs`]. + +#theorem[ + *(SetCovering $arrow.r$ ILP)* Set covering reduces to binary ILP with $|cal(S)|$ variables and $|U|$ constraints. [_Problems:_ @def:set-covering, @def:ilp.] +] + +#proof[ + _Construction._ Variables: $x_i in {0, 1}$ for each $S_i in cal(S)$. Constraints: $sum_(S_i in.rev u) x_i >= 1$ for each $u in U$. Objective: minimize $sum_i w_i x_i$. _Solution extraction:_ $cal(C) = {S_i : x_i = 1}$. +] + +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_setcovering_to_ilp.rs")[`reduction_setcovering_to_ilp.rs`]. + +#theorem[ + *(DominatingSet $arrow.r$ ILP)* Dominating set reduces to binary ILP with $|V|$ variables and $|V|$ constraints. [_Problems:_ @def:dominating-set, @def:ilp.] +] + +#proof[ + _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_v + sum_(u in N(v)) x_u >= 1$ for each $v in V$ (each vertex dominated). Objective: minimize $sum_v w_v x_v$. _Solution extraction:_ $D = {v : x_v = 1}$. +] + +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_dominatingset_to_ilp.rs")[`reduction_dominatingset_to_ilp.rs`]. + +#theorem[ + *(Clique $arrow.r$ ILP)* Maximum clique reduces to binary ILP with $|V|$ variables and $O(|overline(E)|)$ constraints. [_Problems:_ @def:clique, @def:ilp.] +] + +#proof[ + _Construction._ Variables: $x_v in {0, 1}$ for each $v in V$. Constraints: $x_u + x_v <= 1$ for each $(u, v) in.not E$ (non-edges). Objective: maximize $sum_v x_v$. Equivalently, IS on the complement graph. _Solution extraction:_ $K = {v : x_v = 1}$. +] + +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_clique_to_ilp.rs")[`reduction_clique_to_ilp.rs`]. + == Unit Disk Mapping #theorem[ *(IS $arrow.r$ GridGraph IS)* @nguyen2023 Any MIS problem on a general graph $G$ can be reduced to MIS on a unit disk graph (King's subgraph) with at most quadratic overhead in the number of vertices. [_Problem:_ @def:independent-set.] -] +] #proof[ _Construction (Copy-Line Method)._ Given $G = (V, E)$ with $n = |V|$: @@ -1025,6 +1123,56 @@ assert_eq!(p * q, 15); // e.g., (3, 5) or (5, 3) 2. Using XOR gadgets for couplings: $x_"out" = not(x_1 xor x_2)$ 3. Adding weights for linear and quadratic terms +See #link("https://github.com/CodingThrust/problem-reductions/blob/main/examples/export_petersen_mapping.rs")[`export_petersen_mapping.rs`]. + +== Resource Estimation from Examples + +The following table shows concrete variable overhead for example instances, generated from the reduction examples (`make examples`). + +#let example-files = ( + "is_to_vc", "vc_to_is", "is_to_setpacking", "matching_to_setpacking", + "vc_to_setcovering", + "maxcut_to_spinglass", "spinglass_to_maxcut", + "spinglass_to_qubo", "qubo_to_spinglass", + "is_to_qubo", "vc_to_qubo", "coloring_to_qubo", + "setpacking_to_qubo", "ksatisfiability_to_qubo", "ilp_to_qubo", + "sat_to_is", "sat_to_coloring", "sat_to_dominatingset", "sat_to_ksat", + "circuit_to_spinglass", "factoring_to_circuit", + "is_to_ilp", "vc_to_ilp", "matching_to_ilp", + "coloring_to_ilp", "factoring_to_ilp", + "setpacking_to_ilp", "setcovering_to_ilp", + "dominatingset_to_ilp", "clique_to_ilp", +) + +#let examples = example-files.map(n => { + let d = load-example(n) + (name: n, data: d) +}) + +#figure( + table( + columns: (auto, auto, auto, auto, auto), + inset: 5pt, + align: (left, left, right, right, right), + table.header([*Reduction*], [*Instance*], [*Src Vars*], [*Tgt Vars*], [*Ratio*]), + ..examples.map(ex => { + let d = ex.data + let sv = instance-vars(d.source.instance) + let tv = instance-vars(d.target.instance) + let ratio = if sv > 0 { calc.round(tv / sv, digits: 1) } else { 0 } + let label = ex.name.replace("_to_", " $arrow.r$ ").replace("_", " ") + ( + [#label], + [#d.source.problem $arrow.r$ #d.target.problem], + [#sv], + [#tv], + [#(ratio)x], + ) + }).flatten() + ), + caption: [Concrete variable overhead for all example instances. Generated by `make examples`.] +) + = Summary #let gray = rgb("#e8e8e8") @@ -1054,6 +1202,13 @@ assert_eq!(p * q, 15); // e.g., (3, 5) or (5, 3) [SpinGlass $arrow.l.r$ MaxCut], [$O(n + |J|)$], [@barahona1982 @lucas2014], table.cell(fill: gray)[Coloring $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| dot k + |E| dot k)$], table.cell(fill: gray)[—], table.cell(fill: gray)[Factoring $arrow.r$ ILP], table.cell(fill: gray)[$O(m n)$], table.cell(fill: gray)[—], + table.cell(fill: gray)[IS $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| + |E|)$], table.cell(fill: gray)[—], + table.cell(fill: gray)[VC $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| + |E|)$], table.cell(fill: gray)[—], + table.cell(fill: gray)[Matching $arrow.r$ ILP], table.cell(fill: gray)[$O(|E| + |V|)$], table.cell(fill: gray)[—], + table.cell(fill: gray)[SetPacking $arrow.r$ ILP], table.cell(fill: gray)[$O(|cal(S)|^2)$], table.cell(fill: gray)[—], + table.cell(fill: gray)[SetCovering $arrow.r$ ILP], table.cell(fill: gray)[$O(|cal(S)| + |U|)$], table.cell(fill: gray)[—], + table.cell(fill: gray)[DominatingSet $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| + |E|)$], table.cell(fill: gray)[—], + table.cell(fill: gray)[Clique $arrow.r$ ILP], table.cell(fill: gray)[$O(|V| + |overline(E)|)$], table.cell(fill: gray)[—], [IS $arrow.r$ GridGraph IS], [$O(n^2)$], [@nguyen2023], ), caption: [Summary of reductions. Gray rows indicate trivial (complement/isomorphism) reductions.] diff --git a/docs/plans/2026-02-10-polish-reductions-implementation.md b/docs/plans/2026-02-10-polish-reductions-implementation.md new file mode 100644 index 00000000..af991906 --- /dev/null +++ b/docs/plans/2026-02-10-polish-reductions-implementation.md @@ -0,0 +1,1060 @@ +# Polish Reductions.typ Documentation - Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Polish reductions.typ by connecting math to code, adding theorem labels, expanding proofs, and creating 28 example files with JSON export. + +**Architecture:** 5-pass approach: (1) Add theorem labels, (2) Enhance problem definitions, (3) Expand proofs + add links, (4) Create example files, (5) Verify compilation. + +**Tech Stack:** Typst (docs/paper/reductions.typ), Rust (examples/), serde_json (JSON export) + +**Design Reference:** `docs/plans/2026-02-10-polish-reductions-paper.md` + +--- + +## Overview + +This plan implements a comprehensive documentation polish consisting of: +- **Pass 1:** Add `` labels to all theorems for cross-referencing +- **Pass 2:** Enhance 15 problem definitions with field mappings and theorem links +- **Pass 3:** Expand trivial reduction proofs and add GitHub example links +- **Pass 4:** Create 28 example files with JSON export (split existing qubo_reductions.rs) +- **Pass 5:** Verify `make paper` compiles and all examples run + +**Important Notes:** +- pkgref/ contains reference implementations (ProblemReductions.jl, UnitDiskMapping.jl, qubogen) +- Unit Disk Mapping already has export_petersen_mapping.rs (polished) +- Each example exports JSON to docs/paper/examples/ for paper code blocks + +--- + +## PASS 1: Add Theorem Labels + +### Task 1.1: Scan and catalog all theorems + +**Files:** +- Read: `docs/paper/reductions.typ:312-940` + +**Step 1: Extract theorem titles** + +```bash +cd /Users/liujinguo/rcode/problemreductions +grep -n "^#theorem\[" docs/paper/reductions.typ +``` + +Expected: List of line numbers and theorem titles (e.g., "*(IS $arrow.l.r$ VC)*") + +**Step 2: Create theorem-to-label mapping** + +Create temporary file listing all theorems with proposed labels: +- IS ↔ VC → `` and `` +- IS → SetPacking → `` +- SpinGlass ↔ QUBO → `` and `` +- etc. + +Save to: `docs/paper/.theorem_labels.txt` (temporary scratch file) + +**Step 3: Verify no duplicate labels** + +Check for uniqueness in the mapping file. + +Expected: All labels unique + + +### Task 1.2: Add labels to trivial reduction theorems + +**Files:** +- Modify: `docs/paper/reductions.typ:314-370` + +**Step 1: Add label to IS ↔ VC theorem** + +Find the theorem block starting with `*(IS $arrow.l.r$ VC)*` and add label after the closing bracket: + +```typst +#theorem[ + *(IS $arrow.l.r$ VC)* $S subset.eq V$ is independent iff $V backslash S$ is a vertex cover, with $|"IS"| + |"VC"| = |V|$. [_Problems:_ @def:independent-set, @def:vertex-cover.] +] +``` + +**Step 2: Add label to IS → SetPacking theorem** + +```typst +#theorem[ + *(IS $arrow.r$ Set Packing)* Construct $U = E$, $S_v = {e in E : v in e}$, $w(S_v) = w(v)$. Then $I$ is independent iff ${S_v : v in I}$ is a packing. [_Problems:_ @def:independent-set, @def:set-packing.] +] +``` + +**Step 3: Add labels to remaining trivial reductions** + +Continue adding labels to: +- VC → SetCovering: `` +- Matching → SetPacking: `` +- SpinGlass ↔ QUBO: `` (bidirectional) + +**Step 4: Commit trivial reduction labels** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: add theorem labels to trivial reductions + +Added labels to IS↔VC, IS→SetPacking, VC→SetCovering, +Matching→SetPacking, SpinGlass↔QUBO for cross-referencing" +``` + + +### Task 1.3: Add labels to penalty-method QUBO theorems + +**Files:** +- Modify: `docs/paper/reductions.typ:384-560` + +**Step 1: Add label to IS → QUBO theorem** + +```typst +#theorem[ + *(IS $arrow.r$ QUBO)* Given $G = (V, E)$ with weights $w$, construct upper-triangular $Q in RR^(n times n)$ with $Q_(i i) = -w_i$ and $Q_(i j) = P$ for $(i,j) in E$ ($i < j$), where $P = 1 + sum_i w_i$. Then minimizing $f(bold(x)) = sum_i Q_(i i) x_i + sum_(i +``` + +**Step 2: Add labels to remaining QUBO reductions** + +Continue with: +- VC → QUBO: `` +- KColoring → QUBO: `` +- SetPacking → QUBO: `` +- 2-SAT → QUBO: `` +- Binary ILP → QUBO: `` + +**Step 3: Commit penalty-method labels** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: add theorem labels to penalty-method QUBO reductions + +Added labels for IS→QUBO, VC→QUBO, Coloring→QUBO, SetPacking→QUBO, +KSatisfiability→QUBO, ILP→QUBO" +``` + + +### Task 1.4: Add labels to non-trivial reduction theorems + +**Files:** +- Modify: `docs/paper/reductions.typ:562-720` + +**Step 1: Add labels to SAT reduction theorems** + +```typst +#theorem[ + *(SAT $arrow.r$ IS)* @karp1972 Given CNF $phi$ with $m$ clauses, construct graph $G$ such that $phi$ is satisfiable iff $G$ has an IS of size $m$. [_Problems:_ @def:satisfiability, @def:independent-set.] +] + +#theorem[ + *(SAT $arrow.r$ 3-Coloring)* @garey1979 Given CNF $phi$, construct graph $G$ such that $phi$ is satisfiable iff $G$ is 3-colorable. [_Problems:_ @def:satisfiability, @def:coloring.] +] + +#theorem[ + *(SAT $arrow.r$ Dominating Set)* @garey1979 Given CNF $phi$ with $n$ variables and $m$ clauses, $phi$ is satisfiable iff the constructed graph has a dominating set of size $n$. [_Problems:_ @def:satisfiability, @def:dominating-set.] +] + +#theorem[ + *(SAT $arrow.l.r$ $k$-SAT)* @cook1971 @garey1979 Any SAT formula converts to $k$-SAT ($k >= 3$) preserving satisfiability. [_Problems:_ @def:satisfiability, @def:k-sat.] +] +``` + +**Step 2: Add labels to CircuitSAT and Factoring theorems** + +```typst +#theorem[ + *(CircuitSAT $arrow.r$ Spin Glass)* @whitfield2012 @lucas2014 Each gate maps to a gadget whose ground states encode valid I/O. [_Problems:_ @def:circuit-sat, @def:spin-glass.] +] + +#theorem[ + *(Factoring $arrow.r$ Circuit-SAT)* An array multiplier with output constrained to $N$ is satisfiable iff $N$ factors within bit bounds. _(Folklore; no canonical reference.)_ [_Problems:_ @def:factoring, @def:circuit-sat.] +] +``` + +**Step 3: Add labels to SpinGlass ↔ MaxCut and ILP theorems** + +```typst +#theorem[ + *(Spin Glass $arrow.l.r$ Max-Cut)* @barahona1982 @lucas2014 Ground states of Ising models correspond to maximum cuts. [_Problems:_ @def:spin-glass, @def:max-cut.] +] + +#theorem[ + *(Coloring $arrow.r$ ILP)* The $k$-coloring problem reduces to binary ILP with $|V| dot k$ variables and $|V| + |E| dot k$ constraints. [_Problems:_ @def:coloring, @def:ilp.] +] + +#theorem[ + *(Factoring $arrow.r$ ILP)* Integer factorization reduces to binary ILP using McCormick linearization with $O(m n)$ variables and constraints. [_Problems:_ @def:factoring, @def:ilp.] +] +``` + +**Step 4: Add label to Unit Disk Mapping theorem** + +```typst +#theorem[ + *(IS $arrow.r$ GridGraph IS)* @nguyen2023 Any MIS problem on a general graph $G$ can be reduced to MIS on a unit disk graph (King's subgraph) with at most quadratic overhead in the number of vertices. [_Problem:_ @def:independent-set.] +] +``` + +**Step 5: Commit non-trivial reduction labels** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: add theorem labels to non-trivial reductions + +Added labels for SAT→IS, SAT→Coloring, SAT→DominatingSet, SAT↔KSAT, +CircuitSAT→SpinGlass, Factoring→Circuit, SpinGlass↔MaxCut, +Coloring→ILP, Factoring→ILP, IS→GridGraph" +``` + + +### Task 1.5: Verify all theorem labels added + +**Step 1: Count theorems and labels** + +```bash +cd /Users/liujinguo/rcode/problemreductions +echo "Theorems:" && grep -c "^#theorem\[" docs/paper/reductions.typ +echo "Labels:" && grep -c "] /\1/' | sort +``` + +Expected: All labels use lowercase with hyphens, no duplicates + +**Step 3: Commit verification notes** + +Update `.theorem_labels.txt` with final mapping, commit as documentation. + +```bash +git add docs/paper/.theorem_labels.txt +git commit -m "docs: add theorem label mapping for reference" +``` + +--- + +## PASS 2: Enhance Problem Definitions + +### Task 2.1: Enhance Independent Set definition + +**Files:** +- Modify: `docs/paper/reductions.typ:65-78` + +**Step 1: Add field mapping paragraph after struct** + +Find the Independent Set definition and add after the Rust struct: + +```typst +#definition("Independent Set (IS)")[ + Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ maximizing $sum_(v in S) w(v)$ such that no two vertices in $S$ are adjacent: $forall u, v in S: (u, v) in.not E$. + + ```rust + pub struct IndependentSet { + graph: UnGraph<(), ()>, // The underlying graph + weights: Vec, // Weights for each vertex + } + ``` + + Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices. + + _Implemented reductions:_ IS→SetPacking (@thm:is-to-setpacking), IS→QUBO (@thm:is-to-qubo), IS→ILP (@thm:is-to-ilp), VC→IS (@thm:is-to-vc), SAT→IS (@thm:sat-to-is). +] +``` + +**Step 2: Commit Independent Set enhancement** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: enhance Independent Set definition with field mappings + +Added field mapping paragraph and replaced problem links with +theorem references for IS definition" +``` + + +### Task 2.2: Enhance Vertex Cover definition + +**Files:** +- Modify: `docs/paper/reductions.typ:80-93` + +**Step 1: Add field mapping and theorem links** + +```typst +#definition("Vertex Cover (VC)")[ + Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ minimizing $sum_(v in S) w(v)$ such that every edge has at least one endpoint in $S$: $forall (u, v) in E: u in S or v in S$. + + ```rust + pub struct VertexCovering { + graph: UnGraph<(), ()>, // The underlying graph + weights: Vec, // Weights for each vertex + } + ``` + + Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices covering all edges. + + _Implemented reductions:_ VC→IS (@thm:is-to-vc), VC→SetCovering (@thm:vc-to-setcovering), VC→QUBO (@thm:vc-to-qubo), VC→ILP (@thm:vc-to-ilp), IS→VC (@thm:is-to-vc). +] +``` + +**Step 2: Commit Vertex Cover enhancement** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: enhance Vertex Cover definition + +Added field mapping paragraph and theorem references" +``` + + +### Task 2.3: Enhance remaining graph problem definitions + +**Files:** +- Modify: `docs/paper/reductions.typ:95-153` + +**Step 1: Enhance Max-Cut, Graph Coloring, Dominating Set, Matching definitions** + +For each definition: +1. Add field mapping paragraph after struct +2. Replace "Reduces to/from" with "Implemented reductions" using theorem labels +3. Keep existing struct code block + +**Step 2: Commit graph problem enhancements** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: enhance graph problem definitions + +Added field mappings and theorem references for Max-Cut, Coloring, +DominatingSet, Matching, Unit Disk Graph" +``` + + +### Task 2.4: Enhance set problem definitions + +**Files:** +- Modify: `docs/paper/reductions.typ:155-184` + +**Step 1: Enhance Set Packing and Set Covering** + +Add field mapping paragraphs: +- Set Packing: Explain `sets` as collection, `weights` as set weights +- Set Covering: Explain `universe_size`, `sets`, `weights` + +**Step 2: Commit set problem enhancements** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: enhance set problem definitions + +Added field mappings and theorem references for SetPacking and SetCovering" +``` + + +### Task 2.5: Enhance optimization problem definitions + +**Files:** +- Modify: `docs/paper/reductions.typ:186-242` + +**Step 1: Enhance Spin Glass, QUBO, ILP definitions** + +- SpinGlass: Explain `num_spins`, `interactions` (J_ij), `fields` (h_i) +- QUBO: Explain `num_vars`, `matrix` (upper triangular Q) +- ILP: Explain `num_vars`, `bounds`, `constraints`, `objective`, `sense` + +**Step 2: Commit optimization problem enhancements** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: enhance optimization problem definitions + +Added field mappings and theorem references for SpinGlass, QUBO, ILP" +``` + + +### Task 2.6: Enhance satisfiability problem definitions + +**Files:** +- Modify: `docs/paper/reductions.typ:244-310` + +**Step 1: Enhance SAT, K-SAT, Circuit-SAT, Factoring definitions** + +- SAT: Explain `num_vars`, `clauses` (CNFClause), `weights` +- K-SAT: Similar to SAT but with K literals per clause +- Circuit-SAT: Explain `circuit`, `variables`, `weights` +- Factoring: Explain `m`, `n`, `target` + +**Step 2: Commit satisfiability problem enhancements** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: enhance satisfiability problem definitions + +Added field mappings and theorem references for SAT, K-SAT, +CircuitSAT, Factoring" +``` + + +### Task 2.7: Verify all problem definitions enhanced + +**Step 1: Count field mapping paragraphs** + +```bash +grep -c "Where \`" docs/paper/reductions.typ +``` + +Expected: 15 (one per problem definition) + +**Step 2: Check all use theorem references** + +```bash +grep "_Implemented reductions:_" docs/paper/reductions.typ | wc -l +``` + +Expected: 15 + +**Step 3: Commit verification checkpoint** + +```bash +git commit --allow-empty -m "checkpoint: Pass 2 complete - all problem definitions enhanced" +``` + +--- + +## PASS 3: Expand Proofs and Add Example Links + +### Task 3.1: Expand trivial reduction proofs + +**Files:** +- Modify: `docs/paper/reductions.typ:316-370` + +**Step 1: Expand IS ↔ VC proof** + +Find the proof block and add variable mapping section at end: + +```typst +#proof[ + ($arrow.r.double$) If $S$ is independent, for any $(u, v) in E$, at most one endpoint lies in $S$, so $V backslash S$ covers all edges. ($arrow.l.double$) If $C$ is a cover, for any $u, v in V backslash C$, $(u, v) in.not E$, so $V backslash C$ is independent. + + _Variable mapping:_ Given IS instance $(G, w)$, create VC instance $(G, w)$ with identical graph and weights. Solution extraction: for VC solution $C$, return $S = V backslash C$. The complement operation preserves optimality since $|S| + |C| = |V|$ is constant. +] +``` + +**Step 2: Expand remaining trivial proofs** + +Add variable mapping sections to: +- IS → SetPacking: Explain edge set mapping +- VC → SetCovering: Explain edge coverage mapping +- Matching → SetPacking: Explain endpoint mapping +- SpinGlass ↔ QUBO: Already has formula expansion, just verify + +**Step 3: Commit expanded trivial proofs** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: expand trivial reduction proofs with variable mappings + +Added explicit variable mapping explanations to IS↔VC, IS→SetPacking, +VC→SetCovering, Matching→SetPacking proofs" +``` + + +### Task 3.2: Add GitHub links to trivial reductions + +**Files:** +- Modify: `docs/paper/reductions.typ:316-370` + +**Step 1: Add example link after IS ↔ VC proof** + +After the proof block, before the code example: + +```typst +See [reduction example](https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_vc.rs). +``` + +**Step 2: Add links to all trivial reductions** + +Continue for: +- IS → SetPacking: `examples/reduction_is_to_setpacking.rs` +- VC → SetCovering: `examples/reduction_vc_to_setcovering.rs` +- Matching → SetPacking: `examples/reduction_matching_to_setpacking.rs` +- SpinGlass ↔ QUBO: `examples/reduction_spinglass_to_qubo.rs` and `reduction_qubo_to_spinglass.rs` + +**Step 3: Commit GitHub links** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: add GitHub example links to trivial reductions" +``` + + +### Task 3.3: Add GitHub links to QUBO reductions + +**Files:** +- Modify: `docs/paper/reductions.typ:384-560` + +**Step 1: Add links after each QUBO reduction proof** + +After each proof/code example: +- IS → QUBO: `examples/reduction_is_to_qubo.rs` +- VC → QUBO: `examples/reduction_vc_to_qubo.rs` +- KColoring → QUBO: `examples/reduction_coloring_to_qubo.rs` +- SetPacking → QUBO: `examples/reduction_setpacking_to_qubo.rs` +- 2-SAT → QUBO: `examples/reduction_ksatisfiability_to_qubo.rs` +- Binary ILP → QUBO: `examples/reduction_ilp_to_qubo.rs` + +**Step 2: Commit QUBO reduction links** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: add GitHub example links to QUBO reductions" +``` + + +### Task 3.4: Add GitHub links to non-trivial reductions + +**Files:** +- Modify: `docs/paper/reductions.typ:562-770` + +**Step 1: Add links to SAT reduction theorems** + +- SAT → IS: `examples/reduction_sat_to_is.rs` +- SAT → 3-Coloring: `examples/reduction_sat_to_coloring.rs` +- SAT → Dominating Set: `examples/reduction_sat_to_dominatingset.rs` +- SAT ↔ K-SAT: `examples/reduction_sat_to_ksat.rs` + +**Step 2: Add links to remaining reductions** + +- CircuitSAT → SpinGlass: `examples/reduction_circuit_to_spinglass.rs` +- Factoring → Circuit: `examples/reduction_factoring_to_circuit.rs` +- SpinGlass ↔ MaxCut: `examples/reduction_spinglass_to_maxcut.rs` and `reduction_maxcut_to_spinglass.rs` +- Coloring → ILP: `examples/reduction_coloring_to_ilp.rs` +- Factoring → ILP: `examples/reduction_factoring_to_ilp.rs` + +**Step 3: Add link to Unit Disk Mapping** + +After the Unit Disk Mapping theorem proof: + +```typst +See [unit disk mapping example](https://github.com/CodingThrust/problem-reductions/blob/main/examples/export_petersen_mapping.rs). +``` + +**Step 4: Commit non-trivial reduction links** + +```bash +git add docs/paper/reductions.typ +git commit -m "docs: add GitHub example links to non-trivial reductions + +Added links for SAT, CircuitSAT, Factoring, SpinGlass/MaxCut, ILP +reductions and Unit Disk Mapping" +``` + + +### Task 3.5: Verify all theorems have example links + +**Step 1: Count GitHub links** + +```bash +grep -c "See \[.*example\](https://github.com" docs/paper/reductions.typ +``` + +Expected: 28+ (one per reduction theorem) + +**Step 2: Verify link format consistency** + +```bash +grep "See \[.*example\]" docs/paper/reductions.typ +``` + +Expected: All use same format with `/blob/main/examples/` + +**Step 3: Commit verification checkpoint** + +```bash +git commit --allow-empty -m "checkpoint: Pass 3 complete - all proofs expanded and linked" +``` + +--- + +## PASS 4: Create Example Files + +### Task 4.1: Split qubo_reductions.rs into separate files + +**Files:** +- Read: `examples/qubo_reductions.rs` +- Create: 6 new files in `examples/` + +**Step 1: Create reduction_is_to_qubo.rs** + +Extract the `demo_independent_set()` function and create standalone example following the template from design doc. Include: +- Detailed docstring with mathematical equivalence +- Problem transformation metrics output +- JSON export to `docs/paper/examples/is_to_qubo.json` + +**Step 2: Test the example compiles and runs** + +```bash +cargo check --example reduction_is_to_qubo +cargo run --example reduction_is_to_qubo +``` + +Expected: Compiles, runs, outputs metrics, exports JSON + +**Step 3: Commit IS → QUBO example** + +```bash +git add examples/reduction_is_to_qubo.rs +git commit -m "feat: add IS→QUBO reduction example with JSON export + +Extracted from qubo_reductions.rs, added detailed docstring +and JSON export for paper integration" +``` + +**Step 4: Repeat for remaining 5 QUBO examples** + +Create in order: +- `reduction_vc_to_qubo.rs` from `demo_vertex_covering()` +- `reduction_coloring_to_qubo.rs` from `demo_coloring()` +- `reduction_setpacking_to_qubo.rs` from `demo_set_packing()` +- `reduction_ksatisfiability_to_qubo.rs` from `demo_ksat()` +- `reduction_ilp_to_qubo.rs` from `demo_ilp()` + +Commit each separately. + +**Step 5: Remove or rename original qubo_reductions.rs** + +```bash +# Option: rename as tutorial +git mv examples/qubo_reductions.rs examples/tutorial_qubo_reductions.rs +# Or option: remove if redundant +# git rm examples/qubo_reductions.rs + +git commit -m "refactor: rename qubo_reductions.rs to tutorial + +Separated into individual reduction examples, keeping original +as comprehensive tutorial" +``` + + +### Task 4.2: Create trivial reduction examples (IS↔VC, SetPacking, etc.) + +**Files:** +- Create: `examples/reduction_is_to_vc.rs` +- Create: `examples/reduction_vc_to_is.rs` +- Create: `examples/reduction_is_to_setpacking.rs` +- Create: `examples/reduction_matching_to_setpacking.rs` +- Create: `examples/reduction_vc_to_setcovering.rs` +- Create: `examples/reduction_spinglass_to_qubo.rs` (if not already created) +- Create: `examples/reduction_qubo_to_spinglass.rs` +- Create: `examples/reduction_spinglass_to_maxcut.rs` +- Create: `examples/reduction_maxcut_to_spinglass.rs` + +**Step 1: Create reduction_is_to_vc.rs** + +Use UnitDiskMapping.jl's 5-vertex demo graph or Petersen graph. Follow template with: +- Docstring explaining complement relationship +- Transformation metrics +- JSON export with graph structure and solutions + +```rust +//! # Independent Set to Vertex Cover Reduction +//! +//! ## Mathematical Equivalence +//! S ⊆ V is an independent set iff V \ S is a vertex cover. +//! Proof: If S is independent, no edge has both endpoints in S, +//! so every edge has at least one endpoint in V \ S. +//! +//! ## This Example +//! Demonstrates the complement relationship using the Petersen graph: +//! - Instance: Petersen graph (10 vertices, 15 edges) +//! - Maximum IS size: 4 +//! - Minimum VC size: 6 (complement property: 4 + 6 = 10) +//! - Reference: Based on UnitDiskMapping.jl Petersen example +//! +//! ## Output +//! Exports `docs/paper/examples/is_to_vc.json` for use in paper code blocks. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::prelude::*; +use serde::Serialize; +use std::fs; + +#[derive(Serialize)] +struct ExampleData { + source_problem: String, + target_problem: String, + graph_vertices: usize, + graph_edges: Vec<(usize, usize)>, + source_size: usize, + target_size: usize, + source_solution: Vec, + target_solution: Vec, +} + +fn main() { + // Petersen graph (10 vertices, 15 edges) + let edges = vec![ + (0, 1), (1, 2), (2, 3), (3, 4), (4, 0), // outer pentagon + (5, 7), (7, 9), (9, 6), (6, 8), (8, 5), // inner star + (0, 5), (1, 6), (2, 7), (3, 8), (4, 9), // spokes + ]; + let is = IndependentSet::::new(10, edges.clone()); + + println!("\n=== Problem Transformation ==="); + println!("Source: {} with {} variables", "IndependentSet", is.num_variables()); + + // Reduce to VC + let reduction = ReduceTo::>::reduce_to(&is); + let vc = reduction.target_problem(); + println!("Target: {} with {} variables", "VertexCovering", vc.num_variables()); + + // Solve + let solver = BruteForce::new(); + let vc_solutions = solver.find_best(vc); + println!("\n=== Solution ==="); + println!("Target solutions found: {}", vc_solutions.len()); + + // Extract + let is_solution = reduction.extract_solution(&vc_solutions[0]); + println!("Source solution: {:?}", is_solution); + + let size = is.solution_size(&is_solution); + println!("Solution size: {:?}", size); + assert!(size.is_valid); + println!("\n✓ Reduction verified successfully"); + + // Export JSON + let example_data = ExampleData { + source_problem: "IndependentSet".to_string(), + target_problem: "VertexCovering".to_string(), + graph_vertices: 10, + graph_edges: edges, + source_size: is.num_variables(), + target_size: vc.num_variables(), + source_solution: is_solution, + target_solution: vc_solutions[0].clone(), + }; + + fs::create_dir_all("docs/paper/examples").unwrap(); + let json = serde_json::to_string_pretty(&example_data).unwrap(); + fs::write("docs/paper/examples/is_to_vc.json", json).unwrap(); + println!(" Exported: docs/paper/examples/is_to_vc.json"); +} +``` + +**Step 2: Test and commit** + +```bash +cargo check --example reduction_is_to_vc +cargo run --example reduction_is_to_vc +git add examples/reduction_is_to_vc.rs +git commit -m "feat: add IS→VC reduction example" +``` + +**Step 3: Create remaining trivial examples** + +Follow same pattern for VC→IS, IS→SetPacking, etc. Commit each separately. + + +### Task 4.3: Create ILP reduction examples + +**Files:** +- Create 9 ILP reduction examples in `examples/` + +Create following the same pattern: +- `reduction_is_to_ilp.rs` +- `reduction_vc_to_ilp.rs` +- `reduction_matching_to_ilp.rs` +- `reduction_setpacking_to_ilp.rs` +- `reduction_setcovering_to_ilp.rs` +- `reduction_dominatingset_to_ilp.rs` +- `reduction_clique_to_ilp.rs` +- `reduction_coloring_to_ilp.rs` (already may exist) +- `reduction_factoring_to_ilp.rs` (already may exist) + +Each should: +- Use small instances (5-10 variables) +- Include `--features ilp` note in docstring +- Export ILP constraints to JSON for paper + +Commit each separately. + + +### Task 4.4: Create SAT and non-trivial reduction examples + +**Files:** +- Create remaining examples in `examples/` + +Create: +- `reduction_sat_to_is.rs` - use 3-4 variable SAT formula +- `reduction_sat_to_coloring.rs` - small SAT to coloring +- `reduction_sat_to_dominatingset.rs` - small SAT instance +- `reduction_sat_to_ksat.rs` - SAT to 3-SAT conversion +- `reduction_circuit_to_spinglass.rs` - small circuit +- `reduction_factoring_to_circuit.rs` - factor 6 = 2×3 + +Each should reference pkgref/ instances where applicable and export JSON. + +Commit each separately. + + +### Task 4.5: Verify all 28 examples compile and run + +**Step 1: Test all examples compile** + +```bash +cd /Users/liujinguo/rcode/problemreductions +cargo check --examples +``` + +Expected: All examples compile without errors + +**Step 2: Run all examples and verify JSON output** + +```bash +for example in examples/reduction_*.rs; do + name=$(basename "$example" .rs) + echo "Running $name..." + cargo run --example "$name" || echo "FAILED: $name" +done + +ls -lh docs/paper/examples/*.json +``` + +Expected: All examples run, JSON files created + +**Step 3: Commit verification script** + +Create `scripts/test_examples.sh`: + +```bash +#!/bin/bash +set -e +cd "$(dirname "$0")/.." +echo "Testing all reduction examples..." +for example in examples/reduction_*.rs; do + name=$(basename "$example" .rs) + cargo run --quiet --example "$name" +done +echo "✓ All examples passed" +``` + +```bash +chmod +x scripts/test_examples.sh +git add scripts/test_examples.sh +git commit -m "test: add example verification script" +``` + +--- + +## PASS 5: Final Verification + +### Task 5.1: Verify paper compiles + +**Step 1: Build the paper** + +```bash +cd /Users/liujinguo/rcode/problemreductions +make paper +``` + +Expected: Typst compiles without errors, generates PDF + +**Step 2: Check for broken references** + +```bash +grep -n "@thm:" docs/paper/reductions.typ | grep -v "^[0-9]*:#theorem" +``` + +Expected: All theorem references resolve (no broken links) + +**Step 3: Commit if fixes needed** + +If any issues found, fix and commit: + +```bash +git add docs/paper/reductions.typ +git commit -m "fix: resolve broken theorem references" +``` + + +### Task 5.2: Run full test suite + +**Step 1: Run Rust tests** + +```bash +make test +``` + +Expected: All tests pass + +**Step 2: Run clippy** + +```bash +make clippy +``` + +Expected: No warnings + +**Step 3: Run all examples** + +```bash +./scripts/test_examples.sh +``` + +Expected: All examples run successfully + + +### Task 5.3: Generate final checklist report + +**Step 1: Verify success criteria** + +Create `docs/paper/VERIFICATION.md`: + +```markdown +# Reductions.typ Polish - Verification Report + +Date: 2026-02-10 + +## Success Criteria + +- [x] 15 problem definitions have field mapping paragraphs +- [x] All problem definitions link to theorem labels (not problem definitions) +- [x] 28 theorems have labels and GitHub example links +- [x] Trivial reduction proofs explain variable mappings explicitly +- [x] 28 example files created with detailed docstrings +- [x] All examples use reference package instances where applicable +- [x] All examples export JSON to `docs/paper/examples/` +- [x] `docs/paper/examples/` added to `.gitignore` (already done) +- [x] Existing `qubo_reductions.rs` split into 6 separate files +- [x] `make paper` compiles successfully +- [x] All example files compile and run successfully + +## Statistics + +- Problem definitions enhanced: 15 +- Theorems labeled: 28 +- Example files created: 28 +- JSON exports: 28 +- Total commits: ~50-60 + +## Files Modified + +- `docs/paper/reductions.typ` - main paper file +- `examples/` - 28 new example files +- `examples/qubo_reductions.rs` - renamed to tutorial +- `.gitignore` - already updated + +## Next Steps + +- Review generated PDF for formatting +- Verify all GitHub links work after PR merge +- Consider adding CI check for example JSON generation +``` + +**Step 2: Commit verification report** + +```bash +git add docs/paper/VERIFICATION.md +git commit -m "docs: add verification report for reductions.typ polish" +``` + + +### Task 5.4: Final cleanup and summary commit + +**Step 1: Remove temporary files** + +```bash +rm -f docs/paper/.theorem_labels.txt +``` + +**Step 2: Create summary commit** + +```bash +git commit --allow-empty -m "feat: complete reductions.typ documentation polish + +Implemented 5-pass documentation enhancement: + +Pass 1: Added theorem labels () to all 28 reduction theorems +Pass 2: Enhanced 15 problem definitions with field mappings and theorem links +Pass 3: Expanded trivial reduction proofs and added GitHub example links +Pass 4: Created 28 standalone example files with JSON export +Pass 5: Verified compilation and all examples run successfully + +All examples follow consistent format: +- Detailed docstrings with mathematical context +- Transformation metrics output +- JSON export to docs/paper/examples/ +- Based on reference package instances where applicable + +See docs/paper/VERIFICATION.md for complete checklist." +``` + + +### Task 5.5: Push and create PR (if in worktree) + +**Step 1: Push changes** + +```bash +git push origin HEAD +``` + +**Step 2: Create PR** + +```bash +gh pr create --title "Polish reductions.typ documentation" \ + --body "Implements design from docs/plans/2026-02-10-polish-reductions-paper.md + +## Changes +- Added theorem labels to all 28 reductions for cross-referencing +- Enhanced 15 problem definitions with field mappings +- Expanded trivial reduction proofs with variable mapping explanations +- Created 28 standalone example files with JSON export +- Split qubo_reductions.rs into 6 separate files +- All examples reference pkgref/ instances where applicable + +## Verification +- ✓ make paper compiles successfully +- ✓ All 28 examples compile and run +- ✓ JSON exports generated +- ✓ All tests pass +- ✓ No clippy warnings + +See docs/paper/VERIFICATION.md for complete checklist." +``` + +Expected: PR created and ready for review + +--- + +## Notes + +**Reference Package Usage:** +- pkgref/UnitDiskMapping.jl/examples/ - Petersen graph, 5-vertex demo +- pkgref/qubogen/tests/ - QUBO test cases with known matrices +- pkgref/ProblemReductions.jl/examples/ - Factoring example + +**JSON Export Format:** +Each example exports structured data including: +- Problem names and sizes +- Input instance (graph edges, formulas, matrices) +- Solutions (source and target) +- Verification results + +**Commit Strategy:** +- Small, focused commits (one enhancement per commit) +- ~50-60 commits total across 5 passes +- Checkpoint commits after each pass +- Final summary commit + +**Testing:** +- Verify each example individually as created +- Run full suite at end +- Check paper compiles after each pass + diff --git a/docs/plans/2026-02-10-polish-reductions-paper.md b/docs/plans/2026-02-10-polish-reductions-paper.md new file mode 100644 index 00000000..cdf3341c --- /dev/null +++ b/docs/plans/2026-02-10-polish-reductions-paper.md @@ -0,0 +1,342 @@ +# Polish reductions.typ Documentation + +**Date:** 2026-02-10 +**Status:** Design validated, ready for implementation + +## Objectives + +1. Connect mathematical symbols to program fields in problem definitions +2. Link from problem definitions to reduction theorems (not other problems) +3. Explain trivial reductions explicitly with variable mappings +4. Create standalone example files for all 28 reductions with GitHub links + +## Design + +### 1. Problem Definition Enhancement + +For each of the 15 problem definitions in Section 2, add two modifications: + +**A) Field Mapping Paragraph** + +After the Rust struct, add a paragraph explaining the correspondence between mathematical notation and struct fields: + +```typst +#definition("Independent Set (IS)")[ + Given $G = (V, E)$ with vertex weights $w: V -> RR$, find $S subset.eq V$ maximizing... + + ```rust + pub struct IndependentSet { + graph: UnGraph<(), ()>, + weights: Vec, + } + ``` + + Where `graph` represents $G = (V, E)$ with vertices indexed $0..n-1$, and `weights` stores vertex weights $w: V -> RR$ indexed by vertex ID. The solution is a subset $S subset.eq V$ represented as a `Vec` of vertex indices. + + _Implemented reductions:_ IS→SetPacking (@thm:is-to-setpacking), IS→QUBO (@thm:is-to-qubo), VC→IS (@thm:vc-to-is), SAT→IS (@thm:sat-to-is), SetPacking→IS (@thm:setpacking-to-is). +] +``` + +**B) Link to Theorems Instead of Problems** + +Replace current: +```typst +_Reduces to:_ Set Packing (@def:set-packing), QUBO (@def:qubo). +_Reduces from:_ Vertex Cover (@def:vertex-cover), SAT (@def:satisfiability). +``` + +With: +```typst +_Implemented reductions:_ IS→SetPacking (@thm:is-to-setpacking), IS→QUBO (@thm:is-to-qubo), VC→IS (@thm:vc-to-is), SAT→IS (@thm:sat-to-is). +``` + +### 2. Theorem Enhancement + +**A) Add Labels to All Theorems** + +Every reduction theorem gets a label for cross-referencing: + +```typst +#theorem[ + *(IS $arrow.l.r$ VC)* ... +] + +#theorem[ + *(IS $arrow.r$ Set Packing)* ... +] +``` + +Label format: `` using lowercase problem names with hyphens. + +**B) Expand Trivial Reduction Proofs** + +For trivial reductions (complement, isomorphism), add explicit variable mapping explanation: + +```typst +#theorem[ + *(IS $arrow.l.r$ VC)* $S subset.eq V$ is independent iff $V backslash S$ is a vertex cover, with $|"IS"| + |"VC"| = |V|$. +] + +#proof[ + ($arrow.r.double$) If $S$ is independent, no edge has both endpoints in $S$, so every edge has at least one endpoint in $V backslash S$, making it a cover. + + ($arrow.l.double$) If $C$ is a cover, for any $u, v in V backslash C$, edge $(u,v)$ cannot exist (else uncovered), so $V backslash C$ is independent. + + _Variable mapping:_ Given IS instance $(G, w)$, create VC instance $(G, w)$ with identical graph and weights. Solution extraction: for VC solution $C$, return $S = V backslash C$. The complement operation preserves optimality since $|S| + |C| = |V|$ is constant. +] +``` + +**C) Add GitHub Links to Examples** + +After each proof or embedded example, add: + +```typst +See [reduction example](https://github.com/CodingThrust/problem-reductions/blob/main/examples/reduction_is_to_vc.rs). +``` + +### 3. Example Files Creation + +Create 28 example files in `examples/` directory with flat naming structure: + +#### Example File Template + +```rust +//! # [Source] to [Target] Reduction +//! +//! ## Mathematical Equivalence +//! [Explain the mathematical relationship and why it works - 2-4 sentences] +//! [Reference to the mathematical proof if helpful] +//! +//! ## This Example +//! [Describe the specific instance used - graph structure, problem size, expected results] +//! - Instance: [e.g., "5-vertex graph from edges [(0,1), (1,2), ...]" or "Petersen graph"] +//! - Source: [expected optimal value] +//! - Target: [expected optimal value] +//! - Reference: [cite pkgref source if applicable, e.g., "Based on qubogen test case"] +//! +//! ## Output +//! Exports `docs/paper/examples/[source]_to_[target].json` for use in paper code blocks. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::prelude::*; +use serde::Serialize; +use std::fs; +use std::path::Path; + +#[derive(Serialize)] +struct ExampleData { + source_problem: String, + target_problem: String, + source_size: usize, + target_size: usize, + source_solution: Vec, + target_solution: Vec, + // Include problem-specific fields as needed +} + +fn main() { + // 1. Create source problem + // Use instances from reference packages where available: + // - ProblemReductions.jl examples (Petersen graph, demo graphs) + // - qubogen test cases (small graphs with known solutions) + // - UnitDiskMapping.jl examples + let source = SourceProblem::new(...); + + // 2. Reduce to target + let reduction = ReduceTo::::reduce_to(&source); + let target = reduction.target_problem(); + + // 3. Print problem transformation metrics + println!("\n=== Problem Transformation ==="); + println!("Source: {} with {} variables", + SourceProblem::NAME, source.num_variables()); + println!("Target: {} with {} variables", + TargetProblem::NAME, target.num_variables()); + + // 4. Solve target problem + let solver = BruteForce::new(); + let target_solutions = solver.find_best(target); + println!("\n=== Solution ==="); + println!("Target solutions found: {}", target_solutions.len()); + + // 5. Extract source solution + let source_solution = reduction.extract_solution(&target_solutions[0]); + println!("Source solution: {:?}", source_solution); + + // 6. Verify and print result + let size = source.solution_size(&source_solution); + println!("Solution size: {:?}", size); + assert!(size.is_valid); + println!("\n✓ Reduction verified successfully"); + + // 7. Export to JSON for paper + let example_data = ExampleData { + source_problem: SourceProblem::NAME.to_string(), + target_problem: TargetProblem::NAME.to_string(), + source_size: source.num_variables(), + target_size: target.num_variables(), + source_solution: source_solution.clone(), + target_solution: target_solutions[0].clone(), + // Add problem-specific fields + }; + + let json = serde_json::to_string_pretty(&example_data).unwrap(); + fs::create_dir_all("docs/paper/examples").unwrap(); + let path = Path::new("docs/paper/examples/[source]_to_[target].json"); + fs::write(path, json).unwrap(); + println!(" Exported: {}", path.display()); +} +``` + +#### File Manifest (28 files) + +**Note:** Unit Disk Mapping (IS → GridGraph IS) already has an example at `examples/export_petersen_mapping.rs`. We will link to it from the paper but not create a new example file. + +**Note:** Existing `examples/qubo_reductions.rs` will be split into 6 separate files following our naming convention. The original file can be deleted or kept as a tutorial (to be decided during implementation). + +**Trivial/Complement (6 files):** +1. `reduction_is_to_vc.rs` +2. `reduction_vc_to_is.rs` +3. `reduction_spinglass_to_qubo.rs` +4. `reduction_qubo_to_spinglass.rs` +5. `reduction_spinglass_to_maxcut.rs` +6. `reduction_maxcut_to_spinglass.rs` + +**Graph → Set (3 files):** +7. `reduction_is_to_setpacking.rs` +8. `reduction_matching_to_setpacking.rs` +9. `reduction_vc_to_setcovering.rs` + +**Penalty-method QUBO (6 files):** +10. `reduction_is_to_qubo.rs` +11. `reduction_vc_to_qubo.rs` +12. `reduction_coloring_to_qubo.rs` +13. `reduction_setpacking_to_qubo.rs` +14. `reduction_ksatisfiability_to_qubo.rs` +15. `reduction_ilp_to_qubo.rs` + +**ILP formulations (9 files):** +16. `reduction_coloring_to_ilp.rs` +17. `reduction_factoring_to_ilp.rs` +18. `reduction_is_to_ilp.rs` +19. `reduction_vc_to_ilp.rs` +20. `reduction_matching_to_ilp.rs` +21. `reduction_setpacking_to_ilp.rs` +22. `reduction_setcovering_to_ilp.rs` +23. `reduction_dominatingset_to_ilp.rs` +24. `reduction_clique_to_ilp.rs` + +**Non-trivial (6 files):** +25. `reduction_sat_to_is.rs` +26. `reduction_sat_to_coloring.rs` +27. `reduction_sat_to_dominatingset.rs` +28. `reduction_sat_to_ksat.rs` +29. `reduction_circuit_to_spinglass.rs` +30. `reduction_factoring_to_circuit.rs` + +### 4. Reference Package Integration + +Use instances from reference packages for cross-verification and consistency: + +**Available in `pkgref/` (cloned from GitHub):** + +1. **ProblemReductions.jl** (`pkgref/ProblemReductions.jl/examples/`) + - Petersen graph examples + - Factoring → Circuit → SpinGlass + - Educational narrative style + +2. **UnitDiskMapping.jl** (`pkgref/UnitDiskMapping.jl/examples/`) + - 5-vertex demo graph: edges `[(1,2), (2,4), (3,4), (1,3), (4,5), (1,5)]` + - Petersen graph mapping + - Comprehensive tutorial examples + +3. **qubogen** (`pkgref/qubogen/tests/`) + - Small test instances (5 nodes) with known QUBO matrices + - Graph coloring: 5 nodes, 3 colors + - Max-Cut, MVC, Set Packing test cases + - Max-2-SAT examples + +**Instance Selection Strategy:** + +- **Graph problems**: Use Petersen graph (10 vertices) or UnitDiskMapping's 5-vertex demo graph +- **QUBO reductions**: Cross-reference with qubogen test cases where applicable +- **SAT problems**: Small formulas (3-4 variables) with known solutions +- **Factoring**: Use 6 = 2×3 from ProblemReductions.jl example +- **Document reference source** in example docstring when using external instance + +### 5. Implementation Workflow + +**Pass 1: Add theorem labels** +- Scan all `#theorem[...]` blocks in Section 3 +- Add `` labels +- Build mapping: reduction → label + +**Pass 2: Enhance problem definitions** +- For each problem in Section 2: + - Add field mapping paragraph after struct + - Replace "Reduces to/from" with "Implemented reductions" + theorem refs + +**Pass 3: Enhance theorem proofs** +- Expand trivial reduction proofs with variable mapping +- Add GitHub links after all theorems + - For Unit Disk Mapping (IS → GridGraph IS): link to existing `examples/export_petersen_mapping.rs` + - For other reductions: link to new `examples/reduction_*.rs` files + +**Pass 4: Create example files** +- Split existing `examples/qubo_reductions.rs` into 6 separate files: + - `reduction_is_to_qubo.rs` + - `reduction_vc_to_qubo.rs` + - `reduction_coloring_to_qubo.rs` + - `reduction_setpacking_to_qubo.rs` + - `reduction_ksatisfiability_to_qubo.rs` + - `reduction_ilp_to_qubo.rs` +- Extract embedded examples from paper to standalone files +- For each new example: + - Check `pkgref/` for matching instances in reference packages + - Use reference instances where available for cross-verification + - Document source in docstring (e.g., "Based on qubogen test case") + - Add detailed output showing problem transformation metrics + - Export JSON to `docs/paper/examples/[source]_to_[target].json` +- Each file: detailed docstring + closed-loop verification + JSON export + +**Pass 5: Verification** +- All theorems labeled +- All problems link to theorems +- All theorems link to examples +- Run `make paper` - must compile without errors + +## Success Criteria + +- [ ] 15 problem definitions have field mapping paragraphs +- [ ] All problem definitions link to theorem labels (not problem definitions) +- [ ] 28 theorems have labels and GitHub example links +- [ ] Trivial reduction proofs explain variable mappings explicitly +- [ ] 28 example files created with detailed docstrings +- [ ] All examples use reference package instances where applicable +- [ ] All examples export JSON to `docs/paper/examples/` +- [ ] `docs/paper/examples/` added to `.gitignore` (generated files) +- [ ] Existing `qubo_reductions.rs` split into 6 separate files +- [ ] `make paper` compiles successfully +- [ ] All example files compile and run successfully + +## Dependencies + +- Repository: https://github.com/CodingThrust/problem-reductions +- Paper file: `docs/paper/reductions.typ` +- Examples directory: `examples/` (already exists) +- Reduction rules: 28 files in `src/rules/` + +## Notes + +- **Docstrings**: Explain math and example instance, NOT reduction algorithm (kept in paper) +- **Instance selection**: Prefer instances from reference packages (ProblemReductions.jl, UnitDiskMapping.jl, qubogen) for cross-verification +- **Output style**: Inspired by UnitDiskMapping.jl - show problem transformation metrics and verification details +- **JSON export**: Each example exports `docs/paper/examples/[source]_to_[target].json` containing: + - Problem names and sizes + - Source and target solutions + - Problem-specific data (graphs, matrices, formulas) + - Used for embedding code examples in the paper +- **Separation of concerns**: Examples demonstrate mechanics, paper provides mathematical specification +- **GitHub links**: Use path `/blob/main/examples/reduction_*.rs` +- **Reference packages**: Located in `pkgref/` (gitignored, cloned for development reference) diff --git a/docs/plans/2026-02-10-unified-json-schema.md b/docs/plans/2026-02-10-unified-json-schema.md new file mode 100644 index 00000000..3a22b1d4 --- /dev/null +++ b/docs/plans/2026-02-10-unified-json-schema.md @@ -0,0 +1,277 @@ +# Unified JSON Schema for Reduction Examples + +**Date**: 2026-02-10 +**Status**: Draft +**Scope**: All 30 example JSON files in `docs/paper/examples/` + +## Problem + +Three inconsistent JSON schemas exist across 30 example files: + +1. **Flat schema** (e.g., `sat_to_ksat.json`): `source_num_variables`, `target_num_variables`, `source_solution`, `target_solution` at top level +2. **QUBO rich schema** (e.g., `is_to_qubo.json`): `source_instance`, `qubo`, `optimal_solutions` with domain-specific details +3. **Nested schema** (e.g., `matching_to_setpacking.json`): `source`, `target`, `solution` objects + +This forces Typst's `load-example()` to normalize across all three formats at load time. + +## Design Decisions (from brainstorming) + +1. **Two files per example**: `.json` (reduction structure) and `.result.json` (runtime solutions) +2. **Typed instance objects**: Each problem type has its own natural fields (`num_vertices`/`edges` for graphs, `num_vars`/`matrix` for QUBO, `clauses` for SAT) +3. **No field duplication**: No synthetic `num_variables` when the instance already has `num_vertices` or `num_vars` +4. **Raw configs only**: Solution arrays are `[0,1,0,1]`, no interpretation strings like `"V0=Red"` +5. **Polynomial overhead**: Matches `ReductionOverhead` from `src/rules/registry.rs` +6. **Variant dict**: Matches `reduction_graph.json` node format + +## Schema: `.json` (Reduction File) + +```json +{ + "source": { + "problem": "IndependentSet", + "variant": { "graph": "SimpleGraph", "weight": "Unweighted" }, + "instance": { + "num_vertices": 4, + "num_edges": 3, + "edges": [[0,1], [1,2], [2,3]] + } + }, + "target": { + "problem": "QUBO", + "variant": { "graph": "SimpleGraph", "weight": "f64" }, + "instance": { + "num_vars": 4, + "matrix": [[-1.0, 5.0, 0.0, 0.0], ...] + } + }, + "overhead": [ + { "field": "num_vars", "polynomial": [{ "coefficient": 1.0, "variables": [["num_vertices", 1]] }] } + ] +} +``` + +### `source` / `target` object + +| Field | Type | Description | +|-------|------|-------------| +| `problem` | string | Problem name matching `Problem::NAME` (e.g., `"IndependentSet"`, `"QUBO"`, `"KSatisfiability<3>"`) | +| `variant` | object | Key-value pairs matching `reduction_graph.json` node variant (e.g., `{"graph": "SimpleGraph", "weight": "Unweighted"}`) | +| `instance` | object | Problem-specific fields (see Instance Schemas below) | + +### `overhead` array + +Directly mirrors `ReductionOverhead::output_size: Vec<(&str, Polynomial)>` from `src/rules/registry.rs`. + +Each element maps one output size field to a polynomial of input size variables: + +```json +{ + "field": "num_vars", + "polynomial": [ + { "coefficient": 1.0, "variables": [["num_vertices", 1]] } + ] +} +``` + +Each polynomial entry is a monomial (mirrors `Monomial` from `src/polynomial.rs`): + +| Field | Type | Description | +|-------|------|-------------| +| `coefficient` | float | Scalar multiplier | +| `variables` | array of `[name, exponent]` | Variable-exponent pairs (empty = constant) | + +The polynomial is the sum of all monomials: `Σ (coefficient × Π variable^exponent)`. + +**Examples matching actual code declarations:** + +| Code (`#[reduction(overhead = ...)]`) | JSON `overhead` | +|---------------------------------------|-----------------| +| `poly!(num_vertices)` | `[{"coefficient": 1.0, "variables": [["num_vertices", 1]]}]` | +| `poly!(7 * num_clauses)` | `[{"coefficient": 7.0, "variables": [["num_clauses", 1]]}]` | +| `poly!(num_bits_first^2)` | `[{"coefficient": 1.0, "variables": [["num_bits_first", 2]]}]` | +| `poly!(3 * num_vars)` | `[{"coefficient": 3.0, "variables": [["num_vars", 1]]}]` | +| `poly!(3)` (constant) | `[{"coefficient": 3.0, "variables": []}]` | + +**Multi-field overhead** (e.g., SAT → IS has both `num_vertices` and `num_edges`): + +```json +"overhead": [ + { "field": "num_vertices", "polynomial": [{ "coefficient": 7.0, "variables": [["num_clauses", 1]] }] }, + { "field": "num_edges", "polynomial": [{ "coefficient": 21.0, "variables": [["num_clauses", 1]] }] } +] +``` + +### Instance Schemas (by problem type) + +Each problem type uses its natural fields from `problem_size()`. No generic `num_variables` wrapper. + +**Graph problems** (IndependentSet, VertexCovering, DominatingSet, MaxCut, Clique, Matching): +```json +{ "num_vertices": 4, "num_edges": 3, "edges": [[0,1], [1,2], [2,3]] } +``` +Optional: `"weights": [1, 2, 3, 4]` for weighted variants. + +**KColoring**: +```json +{ "num_vertices": 3, "num_edges": 3, "num_colors": 3, "edges": [[0,1], [1,2], [0,2]] } +``` + +**SAT / Satisfiability**: +```json +{ "num_vars": 4, "num_clauses": 2, "clauses": [[1, -2, 3], [-1, 4]] } +``` + +**KSatisfiability**: +```json +{ "num_vars": 8, "num_clauses": 6, "k": 3, "clauses": [[1, -2, 3], ...] } +``` + +**QUBO**: +```json +{ "num_vars": 4, "matrix": [[-1.0, 5.0, 0.0, 0.0], ...] } +``` + +**SpinGlass**: +```json +{ "num_spins": 4, "num_interactions": 3, "interactions": [[0,1,1.0], [1,2,-1.0], ...] } +``` + +**SetPacking / SetCovering**: +```json +{ "num_sets": 4, "num_elements": 3, "sets": [[0], [0,1], [1,2], [2]] } +``` +Optional: `"weights": [1, 1, 1, 1]` for weighted variants. + +**ILP**: +```json +{ "num_vars": 4, "num_constraints": 2, "objective": [1.0, 2.0, 3.0, 4.0], "constraints": [...] } +``` + +**CircuitSAT**: +```json +{ "num_gates": 5, "num_assignments": 8, "gates": [...] } +``` + +**Factoring**: +```json +{ "number": 15, "num_bits_first": 2, "num_bits_second": 2 } +``` + +## Schema: `.result.json` (Results File) + +```json +{ + "solutions": [ + { + "source_config": [1, 0, 1, 0], + "target_config": [1, 0, 1, 0] + } + ] +} +``` + +| Field | Type | Description | +|-------|------|-------------| +| `solutions` | array | One or more optimal solution pairs | +| `solutions[].source_config` | array of int | Raw variable assignment for source problem | +| `solutions[].target_config` | array of int | Raw variable assignment for target problem | + +No interpretation fields (no `"coloring": ["V0=Red", ...]`, no `"selected_vertices": [1,3]`). Consumers derive meaning from config + instance. + +## Implementation Plan + +### Step 1: Create `ExampleData` Rust struct + +Add a shared serialization module (e.g., `examples/shared/schema.rs` or a helper in `src/`) that all examples import: + +```rust +#[derive(Serialize)] +struct ProblemSide { + problem: String, + variant: HashMap, + instance: serde_json::Value, +} + +#[derive(Serialize)] +struct OverheadEntry { + field: String, + polynomial: Vec, +} + +#[derive(Serialize)] +struct MonomialJson { + coefficient: f64, + variables: Vec<(String, u8)>, +} + +#[derive(Serialize)] +struct ReductionData { + source: ProblemSide, + target: ProblemSide, + overhead: Vec, +} + +#[derive(Serialize)] +struct SolutionPair { + source_config: Vec, + target_config: Vec, +} + +#[derive(Serialize)] +struct ResultData { + solutions: Vec, +} +``` + +Add helper methods to build `ProblemSide` from any `Problem` impl and `OverheadEntry` from `ReductionOverhead`. + +### Step 2: Update all 30 example files + +Replace ad-hoc serialization with the shared struct. Each example: +1. Creates source problem +2. Reduces to target +3. Solves target, extracts solutions +4. Builds `ReductionData` + `ResultData` +5. Writes `.json` and `.result.json` + +### Step 3: Update Typst `load-example()` + +Replace the 3-schema normalization in `reductions.typ` with direct field access. Since all JSON files now share the same schema, `load-example()` becomes trivial: + +```typst +#let load-example(name) = json("examples/" + name + ".json") +#let load-results(name) = json("examples/" + name + ".result.json") +``` + +Update all `reduction-example()` calls and the resource estimation table to use `data.source.instance.num_vertices` etc. + +### Step 4: Update integration test assertions + +If `tests/suites/examples.rs` checks JSON structure, update to match new schema. + +### Step 5: Verify + +```bash +make examples # All 30 examples regenerate both files +make paper # Typst compiles with new schema +make test # All tests pass +make clippy # No warnings +``` + +## File Impact + +| Files | Count | Action | +|-------|-------|--------| +| `examples/shared/schema.rs` (or similar) | 1 | New: shared serialization structs | +| `examples/reduction_*.rs` | 30 | Update: use shared schema | +| `docs/paper/examples/*.json` | 30 | Regenerated: unified schema | +| `docs/paper/examples/*.result.json` | 30 | New: split solution data | +| `docs/paper/reductions.typ` | 1 | Update: simplify `load-example()` | + +## Migration Notes + +- Old JSON files are fully replaced (not backwards-compatible) +- The `overhead` field is new — sourced from each reduction's `#[reduction(overhead = ...)]` macro +- Polynomial serialization is a 1:1 mapping from `Polynomial { terms: Vec }` in Rust to `[{coefficient, variables}]` in JSON +- Problem names use `Problem::NAME` exactly (e.g., `"KSatisfiability<3>"` not `"3-SAT"`) +- Variant dicts match `reduction_graph.json` nodes diff --git a/examples/export_petersen_mapping.rs b/examples/export_petersen_mapping.rs index 85aec23d..62ea9706 100644 --- a/examples/export_petersen_mapping.rs +++ b/examples/export_petersen_mapping.rs @@ -1,12 +1,34 @@ -//! Export Petersen graph and its grid mapping to JSON files for visualization. +//! # Independent Set to Grid Graph IS Reduction (Unit Disk Mapping) //! -//! Run with: `cargo run --example export_petersen_mapping` +//! ## Mathematical Equivalence +//! Any Maximum Independent Set (MIS) problem on a general graph G can be reduced to +//! MIS on a unit disk graph (King's subgraph or triangular lattice) with polynomial +//! overhead. The copy-line method creates L-shaped "copy lines" for each vertex, with +//! crossing gadgets enforcing edge constraints. The grid MIS size equals the source +//! MIS plus a constant overhead: MIS(G_grid) = MIS(G) + Δ. //! -//! Outputs: -//! - docs/paper/petersen_source.json - The original Petersen graph -//! - docs/paper/petersen_square_weighted.json - Weighted square lattice (King's subgraph) -//! - docs/paper/petersen_square_unweighted.json - Unweighted square lattice -//! - docs/paper/petersen_triangular.json - Weighted triangular lattice +//! ## This Example +//! Demonstrates the unit disk mapping using the Petersen graph: +//! - Instance: Petersen graph (10 vertices, 15 edges, MIS = 4) +//! - King's subgraph (weighted): 30×42 grid, 219 nodes, overhead Δ = 89 +//! - King's subgraph (unweighted): 30×42 grid, 219 nodes, overhead Δ = 89 +//! - Triangular lattice (weighted): 42×60 grid, 395 nodes, overhead Δ = 375 +//! - Reference: Based on UnitDiskMapping.jl Petersen graph example +//! +//! This example also exports JSON files for paper visualization (Figure: Unit Disk Mappings). +//! +//! ## Usage +//! ```bash +//! cargo run --example export_petersen_mapping +//! ``` +//! +//! ## Outputs +//! - `docs/paper/petersen_source.json` - The original Petersen graph +//! - `docs/paper/petersen_square_weighted.json` - Weighted King's subgraph +//! - `docs/paper/petersen_square_unweighted.json` - Unweighted King's subgraph +//! - `docs/paper/petersen_triangular.json` - Weighted triangular lattice +//! +//! See docs/paper/reductions.typ for the full reduction specification. use problemreductions::rules::unitdiskmapping::{ksg, triangular}; use problemreductions::topology::{Graph, GridGraph}; @@ -40,7 +62,7 @@ fn write_json(data: &T, path: &Path) { fs::create_dir_all(parent).expect("Failed to create output directory"); } fs::write(path, json).expect("Failed to write JSON file"); - println!("Wrote: {}", path.display()); + println!(" Wrote: {}", path.display()); } /// Create a GridMapping from a MappingResult by using the actual grid_graph. @@ -55,6 +77,8 @@ fn make_grid_mapping(result: &ksg::MappingResult, weighted: bool) -> GridM } fn main() { + println!("\n=== Independent Set to Grid Graph IS (Unit Disk Mapping) ===\n"); + // Petersen graph: n=10, MIS=4 // Outer pentagon: 0-1-2-3-4-0 // Inner star: 5-7-9-6-8-5 @@ -82,6 +106,12 @@ fn main() { let num_vertices = 10; let petersen_mis = 4; + println!("Source Problem: Independent Set"); + println!(" Graph: Petersen graph"); + println!(" Vertices: {}", num_vertices); + println!(" Edges: {}", petersen_edges.len()); + println!(" MIS size: {}", petersen_mis); + // Export source graph let source = SourceGraph { name: "Petersen".to_string(), @@ -91,16 +121,28 @@ fn main() { }; write_json(&source, Path::new("docs/paper/petersen_source.json")); + println!("\n=== Mapping to Grid Graphs ===\n"); + // Map to weighted King's subgraph (square lattice) + println!("1. King's Subgraph (Weighted)"); let square_weighted_result = ksg::map_weighted(num_vertices, &petersen_edges); let square_weighted = make_grid_mapping(&square_weighted_result, true); println!( - "Square weighted: {}x{}, {} nodes, {} edges, overhead={}", + " Grid size: {}×{}", square_weighted.grid_graph.size().0, - square_weighted.grid_graph.size().1, + square_weighted.grid_graph.size().1 + ); + println!( + " Vertices: {}, Edges: {}", square_weighted.grid_graph.num_vertices(), - square_weighted.grid_graph.num_edges(), - square_weighted.mis_overhead + square_weighted.grid_graph.num_edges() + ); + println!(" MIS overhead Δ: {}", square_weighted.mis_overhead); + println!( + " MIS(grid) = MIS(source) + Δ = {} + {} = {}", + petersen_mis, + square_weighted.mis_overhead, + petersen_mis as i32 + square_weighted.mis_overhead ); write_json( &square_weighted, @@ -108,15 +150,25 @@ fn main() { ); // Map to unweighted King's subgraph (square lattice) + println!("\n2. King's Subgraph (Unweighted)"); let square_unweighted_result = ksg::map_unweighted(num_vertices, &petersen_edges); let square_unweighted = make_grid_mapping(&square_unweighted_result, false); println!( - "Square unweighted: {}x{}, {} nodes, {} edges, overhead={}", + " Grid size: {}×{}", square_unweighted.grid_graph.size().0, - square_unweighted.grid_graph.size().1, + square_unweighted.grid_graph.size().1 + ); + println!( + " Vertices: {}, Edges: {}", square_unweighted.grid_graph.num_vertices(), - square_unweighted.grid_graph.num_edges(), - square_unweighted.mis_overhead + square_unweighted.grid_graph.num_edges() + ); + println!(" MIS overhead Δ: {}", square_unweighted.mis_overhead); + println!( + " MIS(grid) = MIS(source) + Δ = {} + {} = {}", + petersen_mis, + square_unweighted.mis_overhead, + petersen_mis as i32 + square_unweighted.mis_overhead ); write_json( &square_unweighted, @@ -124,45 +176,53 @@ fn main() { ); // Map to weighted triangular lattice + println!("\n3. Triangular Lattice (Weighted)"); let triangular_result = triangular::map_weighted(num_vertices, &petersen_edges); let triangular_weighted = make_grid_mapping(&triangular_result, true); println!( - "Triangular weighted: {}x{}, {} nodes, {} edges, overhead={}", + " Grid size: {}×{}", triangular_weighted.grid_graph.size().0, - triangular_weighted.grid_graph.size().1, + triangular_weighted.grid_graph.size().1 + ); + println!( + " Vertices: {}, Edges: {}", triangular_weighted.grid_graph.num_vertices(), - triangular_weighted.grid_graph.num_edges(), - triangular_weighted.mis_overhead + triangular_weighted.grid_graph.num_edges() + ); + println!(" MIS overhead Δ: {}", triangular_weighted.mis_overhead); + println!( + " MIS(grid) = MIS(source) + Δ = {} + {} = {}", + petersen_mis, + triangular_weighted.mis_overhead, + petersen_mis as i32 + triangular_weighted.mis_overhead ); write_json( &triangular_weighted, Path::new("docs/paper/petersen_triangular.json"), ); - println!("\nSummary:"); - println!( - " Source: Petersen graph, n={}, MIS={}", - num_vertices, petersen_mis - ); + println!("\n=== Summary ===\n"); + println!("Source: Petersen graph (10 vertices, MIS = 4)"); + println!(); println!( - " Square weighted: {} nodes, MIS = {} + {} = {}", + "King's subgraph (weighted): {} vertices, MIS = {} (overhead Δ = {})", square_weighted.grid_graph.num_vertices(), - petersen_mis, - square_weighted.mis_overhead, - petersen_mis as i32 + square_weighted.mis_overhead + petersen_mis as i32 + square_weighted.mis_overhead, + square_weighted.mis_overhead ); println!( - " Square unweighted: {} nodes, MIS = {} + {} = {}", + "King's subgraph (unweighted): {} vertices, MIS = {} (overhead Δ = {})", square_unweighted.grid_graph.num_vertices(), - petersen_mis, - square_unweighted.mis_overhead, - petersen_mis as i32 + square_unweighted.mis_overhead + petersen_mis as i32 + square_unweighted.mis_overhead, + square_unweighted.mis_overhead ); println!( - " Triangular weighted: {} nodes, MIS = {} + {} = {}", + "Triangular lattice (weighted): {} vertices, MIS = {} (overhead Δ = {})", triangular_weighted.grid_graph.num_vertices(), - petersen_mis, - triangular_weighted.mis_overhead, - petersen_mis as i32 + triangular_weighted.mis_overhead + petersen_mis as i32 + triangular_weighted.mis_overhead, + triangular_weighted.mis_overhead ); + + println!("\n✓ Unit disk mapping demonstrated successfully"); + println!(" JSON files exported for paper visualization"); } diff --git a/examples/reduction_circuit_to_spinglass.rs b/examples/reduction_circuit_to_spinglass.rs new file mode 100644 index 00000000..94f08cb0 --- /dev/null +++ b/examples/reduction_circuit_to_spinglass.rs @@ -0,0 +1,125 @@ +//! # Circuit-SAT to Spin Glass Reduction +//! +//! ## Mathematical Equivalence +//! Each logic gate (AND, OR, NOT, XOR) maps to a spin glass gadget whose ground +//! states encode valid input-output combinations. The full circuit becomes a sum +//! of gadget Hamiltonians; ground states correspond to satisfying assignments. +//! +//! ## This Example +//! - Instance: Simple AND gate circuit (c = x AND y) +//! - Source: CircuitSAT with 2 inputs +//! - Target: SpinGlass +//! +//! ## Output +//! Exports `docs/paper/examples/circuit_to_spinglass.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::models::specialized::{Assignment, BooleanExpr, Circuit}; +use problemreductions::prelude::*; +use problemreductions::topology::{Graph, SimpleGraph}; + +fn main() { + // 1. Create CircuitSAT instance: c = x AND y + // This is a simple circuit with one AND gate. + let circuit = Circuit::new(vec![ + Assignment::new( + vec!["c".to_string()], + BooleanExpr::and(vec![BooleanExpr::var("x"), BooleanExpr::var("y")]), + ), + ]); + let circuit_sat = CircuitSAT::::new(circuit); + + println!("=== Circuit-SAT to Spin Glass Reduction ===\n"); + println!("Source circuit: c = x AND y"); + println!( + " {} variables: {:?}", + circuit_sat.num_variables(), + circuit_sat.variable_names() + ); + + // 2. Reduce to SpinGlass + let reduction = ReduceTo::>::reduce_to(&circuit_sat); + let sg = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!( + "Source: CircuitSAT with {} variables", + circuit_sat.num_variables() + ); + println!( + "Target: SpinGlass with {} spins, {} interactions", + sg.num_spins(), + sg.graph().num_edges() + ); + println!(" Each logic gate becomes a spin glass gadget."); + println!(" AND gadget: 3 spins with J=[1,-2,-2], h=[-1,-1,2]"); + println!(" Ground states encode valid truth table entries."); + + // 3. Solve the target SpinGlass problem + let solver = BruteForce::new(); + let sg_solutions = solver.find_best(sg); + println!("\n=== Solution ==="); + println!("Target SpinGlass ground states found: {}", sg_solutions.len()); + + // 4. Extract and verify source solutions + println!("\nAll extracted CircuitSAT solutions:"); + let mut valid_count = 0; + let mut solutions = Vec::new(); + for sg_sol in &sg_solutions { + let circuit_sol = reduction.extract_solution(sg_sol); + let size = circuit_sat.solution_size(&circuit_sol); + let var_names = circuit_sat.variable_names(); + let assignment_str: Vec = var_names + .iter() + .zip(circuit_sol.iter()) + .map(|(name, &val)| format!("{}={}", name, val)) + .collect(); + println!( + " SG config {:?} -> Circuit: [{}], valid: {}", + sg_sol, + assignment_str.join(", "), + size.is_valid + ); + if size.is_valid { + valid_count += 1; + solutions.push(SolutionPair { + source_config: circuit_sol, + target_config: sg_sol.clone(), + }); + } + } + println!( + "\n{}/{} SpinGlass ground states map to valid circuit assignments", + valid_count, + sg_solutions.len() + ); + assert!(valid_count > 0, "At least one ground state must be a valid circuit assignment"); + + println!("\nReduction verified successfully"); + + // 5. Export JSON + let overhead = lookup_overhead("CircuitSAT", "SpinGlass") + .expect("CircuitSAT -> SpinGlass overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: CircuitSAT::::NAME.to_string(), + variant: variant_to_map(CircuitSAT::::variant()), + instance: serde_json::json!({ + "num_gates": circuit_sat.circuit().num_assignments(), + "num_variables": circuit_sat.num_variables(), + }), + }, + target: ProblemSide { + problem: SpinGlass::::NAME.to_string(), + variant: variant_to_map(SpinGlass::::variant()), + instance: serde_json::json!({ + "num_spins": sg.num_variables(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("circuit_to_spinglass", &data, &results); +} diff --git a/examples/reduction_clique_to_ilp.rs b/examples/reduction_clique_to_ilp.rs new file mode 100644 index 00000000..9f78f0ca --- /dev/null +++ b/examples/reduction_clique_to_ilp.rs @@ -0,0 +1,92 @@ +//! # Clique to ILP Reduction +//! +//! ## Mathematical Formulation +//! Variables: x_v in {0,1} for each vertex v. +//! Constraints: x_u + x_v <= 1 for each non-edge (u,v) not in E. +//! Objective: maximize sum of w_v * x_v. +//! +//! ## This Example +//! - Instance: 4-vertex graph with a triangle subgraph on {0,1,2} plus vertex 3 +//! connected only to vertex 2. Edges: 0-1, 0-2, 1-2, 2-3. +//! - Source Clique: max clique is {0,1,2} (size 3) +//! - Target ILP: 4 binary variables, 3 non-edge constraints +//! (non-edges: (0,3), (1,3)) +//! +//! ## Output +//! Exports `docs/paper/examples/clique_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::BruteForceFloat; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create Clique instance: 4 vertices, triangle {0,1,2} plus vertex 3 connected to 2 + let clique = Clique::::new(4, vec![(0, 1), (0, 2), (1, 2), (2, 3)]); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&clique); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: Clique with {} variables", clique.num_variables()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve target ILP + let solver = BruteForce::new(); + let ilp_solutions = solver.find_best_float(ilp); + println!("\n=== Solution ==="); + println!("ILP solutions found: {}", ilp_solutions.len()); + + let ilp_solution = &ilp_solutions[0].0; + println!("ILP solution: {:?}", ilp_solution); + + // 5. Extract source solution + let clique_solution = reduction.extract_solution(ilp_solution); + println!("Source Clique solution: {:?}", clique_solution); + + // 6. Verify + let size = clique.solution_size(&clique_solution); + println!("Solution valid: {}, size: {:?}", size.is_valid, size.size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let mut solutions = Vec::new(); + for (target_config, _score) in &ilp_solutions { + let source_sol = reduction.extract_solution(target_config); + let s = clique.solution_size(&source_sol); + assert!(s.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_config.clone(), + }); + } + + let overhead = lookup_overhead_or_empty("Clique", "ILP"); + + let data = ReductionData { + source: ProblemSide { + problem: Clique::::NAME.to_string(), + variant: variant_to_map(Clique::::variant()), + instance: serde_json::json!({ + "num_vertices": clique.num_vertices(), + "num_edges": clique.num_edges(), + "edges": clique.edges(), + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("clique_to_ilp", &data, &results); +} diff --git a/examples/reduction_coloring_to_ilp.rs b/examples/reduction_coloring_to_ilp.rs new file mode 100644 index 00000000..b5daaf80 --- /dev/null +++ b/examples/reduction_coloring_to_ilp.rs @@ -0,0 +1,93 @@ +//! # K-Coloring to ILP Reduction +//! +//! ## Mathematical Formulation +//! Variables: x_{v,c} in {0,1} for each vertex v and color c. +//! Constraints: +//! (1) sum_c x_{v,c} = 1 for each vertex v (exactly one color). +//! (2) x_{u,c} + x_{v,c} <= 1 for each edge (u,v) and color c (different colors on adjacent). +//! Objective: feasibility (minimize 0). +//! +//! ## This Example +//! - Instance: Triangle K3 (3 vertices, 3 edges) with 3 colors +//! - Source KColoring: feasible, each vertex gets a distinct color +//! - Target ILP: 9 binary variables (3 vertices * 3 colors), 12 constraints +//! +//! ## Output +//! Exports `docs/paper/examples/coloring_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::BruteForceFloat; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create KColoring instance: triangle K3 with 3 colors + let coloring = KColoring::<3, SimpleGraph, i32>::new(3, vec![(0, 1), (1, 2), (0, 2)]); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&coloring); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: KColoring<3> with {} variables", coloring.num_variables()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve target ILP + let solver = BruteForce::new(); + let ilp_solutions = solver.find_best_float(ilp); + println!("\n=== Solution ==="); + println!("ILP solutions found: {}", ilp_solutions.len()); + + let ilp_solution = &ilp_solutions[0].0; + println!("ILP solution: {:?}", ilp_solution); + + // 5. Extract source solution + let coloring_solution = reduction.extract_solution(ilp_solution); + println!("Source Coloring solution: {:?}", coloring_solution); + + // 6. Verify + let size = coloring.solution_size(&coloring_solution); + println!("Solution valid: {}, size: {:?}", size.is_valid, size.size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let mut solutions = Vec::new(); + for (target_config, _score) in &ilp_solutions { + let source_sol = reduction.extract_solution(target_config); + let s = coloring.solution_size(&source_sol); + assert!(s.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_config.clone(), + }); + } + + let overhead = lookup_overhead("KColoring", "ILP") + .expect("KColoring -> ILP overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: KColoring::<3, SimpleGraph, i32>::NAME.to_string(), + variant: variant_to_map(KColoring::<3, SimpleGraph, i32>::variant()), + instance: serde_json::json!({ + "num_vertices": coloring.num_vertices(), + "num_edges": coloring.num_edges(), + "num_colors": 3, + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("coloring_to_ilp", &data, &results); +} diff --git a/examples/reduction_coloring_to_qubo.rs b/examples/reduction_coloring_to_qubo.rs new file mode 100644 index 00000000..4427745b --- /dev/null +++ b/examples/reduction_coloring_to_qubo.rs @@ -0,0 +1,119 @@ +//! # K-Coloring to QUBO Reduction (Penalty Method) +//! +//! ## Mathematical Relationship +//! The K-Coloring problem on a graph G = (V, E) with K colors is mapped to QUBO +//! using a one-hot encoding. Each vertex i has K binary variables x_{i,c} for +//! c = 0..K-1, with penalties enforcing: +//! +//! 1. One-hot constraint: each vertex gets exactly one color +//! P1 * sum_i (1 - sum_c x_{i,c})^2 +//! +//! 2. Edge constraint: adjacent vertices get different colors +//! P2 * sum_{(i,j) in E} sum_c x_{i,c} * x_{j,c} +//! +//! The QUBO has n*K variables (n vertices, K colors). +//! +//! ## This Example +//! - Instance: Complete graph K3 (triangle) with 3 colors +//! - Source: KColoring<3> on 3 vertices, 3 edges +//! - QUBO variables: 9 (3 vertices x 3 colors, one-hot encoding) +//! - Expected: 6 valid 3-colorings (3! = 6 permutations of 3 colors on 3 vertices) +//! +//! ## Outputs +//! - `docs/paper/examples/coloring_to_qubo.json` — reduction structure +//! - `docs/paper/examples/coloring_to_qubo.result.json` — solutions +//! +//! ## Usage +//! ```bash +//! cargo run --example reduction_coloring_to_qubo +//! ``` + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + println!("=== K-Coloring -> QUBO Reduction ===\n"); + + // Triangle K3: all 3 vertices are adjacent + let edges = vec![(0, 1), (1, 2), (0, 2)]; + let kc = KColoring::<3, SimpleGraph, i32>::new(3, edges.clone()); + + // Reduce to QUBO + let reduction = ReduceTo::::reduce_to(&kc); + let qubo = reduction.target_problem(); + + let colors = ["Red", "Green", "Blue"]; + println!("Source: KColoring<3> on triangle K3 (3 vertices, 3 edges)"); + println!( + "Target: QUBO with {} variables (one-hot: 3 vertices x 3 colors)", + qubo.num_variables() + ); + println!("Q matrix:"); + for row in qubo.matrix() { + let formatted: Vec = row.iter().map(|v| format!("{:6.1}", v)).collect(); + println!(" [{}]", formatted.join(", ")); + } + + // Solve QUBO with brute force + let solver = BruteForce::new(); + let qubo_solutions = solver.find_best(qubo); + + // Extract and verify solutions + println!("\nValid 3-colorings: {}", qubo_solutions.len()); + let mut solutions = Vec::new(); + for sol in &qubo_solutions { + let extracted = reduction.extract_solution(sol); + let coloring: Vec = extracted + .iter() + .enumerate() + .map(|(i, &c)| format!("V{}={}", i, colors[c])) + .collect(); + println!(" {}", coloring.join(", ")); + + // Closed-loop verification: check solution is valid in original problem + let sol_size = kc.solution_size(&extracted); + assert!(sol_size.is_valid, "Coloring must be valid in source problem"); + + solutions.push(SolutionPair { + source_config: extracted, + target_config: sol.clone(), + }); + } + + // K3 with 3 colors has exactly 3! = 6 valid colorings + assert_eq!( + qubo_solutions.len(), + 6, + "Triangle K3 with 3 colors should have exactly 6 valid colorings" + ); + println!("\nVerification passed: 6 valid colorings found"); + + // Export JSON + let overhead = lookup_overhead("KColoring", "QUBO") + .expect("KColoring -> QUBO overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: KColoring::<3, SimpleGraph, i32>::NAME.to_string(), + variant: variant_to_map(KColoring::<3, SimpleGraph, i32>::variant()), + instance: serde_json::json!({ + "num_vertices": kc.num_vertices(), + "num_edges": kc.num_edges(), + "num_colors": 3, + }), + }, + target: ProblemSide { + problem: QUBO::::NAME.to_string(), + variant: variant_to_map(QUBO::::variant()), + instance: serde_json::json!({ + "num_vars": qubo.num_vars(), + "matrix": qubo.matrix(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("coloring_to_qubo", &data, &results); +} diff --git a/examples/reduction_dominatingset_to_ilp.rs b/examples/reduction_dominatingset_to_ilp.rs new file mode 100644 index 00000000..52fb88ba --- /dev/null +++ b/examples/reduction_dominatingset_to_ilp.rs @@ -0,0 +1,90 @@ +//! # Dominating Set to ILP Reduction +//! +//! ## Mathematical Formulation +//! Variables: x_v in {0,1} for each vertex v. +//! Constraints: x_v + sum_{u in N(v)} x_u >= 1 for each vertex v. +//! Objective: minimize sum of w_v * x_v. +//! +//! ## This Example +//! - Instance: Path graph P4 (4 vertices, 3 edges: 0-1-2-3) +//! - Source DominatingSet: min dominating set size 2 (e.g., {1,2}) +//! - Target ILP: 4 binary variables, 4 domination constraints +//! +//! ## Output +//! Exports `docs/paper/examples/dominatingset_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::BruteForceFloat; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create DominatingSet instance: path graph P4 + let ds = DominatingSet::::new(4, vec![(0, 1), (1, 2), (2, 3)]); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&ds); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: DominatingSet with {} variables", ds.num_variables()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve target ILP + let solver = BruteForce::new(); + let ilp_solutions = solver.find_best_float(ilp); + println!("\n=== Solution ==="); + println!("ILP solutions found: {}", ilp_solutions.len()); + + let ilp_solution = &ilp_solutions[0].0; + println!("ILP solution: {:?}", ilp_solution); + + // 5. Extract source solution + let ds_solution = reduction.extract_solution(ilp_solution); + println!("Source DominatingSet solution: {:?}", ds_solution); + + // 6. Verify + let size = ds.solution_size(&ds_solution); + println!("Solution valid: {}, size: {:?}", size.is_valid, size.size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let mut solutions = Vec::new(); + for (target_config, _score) in &ilp_solutions { + let source_sol = reduction.extract_solution(target_config); + let s = ds.solution_size(&source_sol); + assert!(s.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_config.clone(), + }); + } + + let overhead = lookup_overhead_or_empty("DominatingSet", "ILP"); + + let data = ReductionData { + source: ProblemSide { + problem: DominatingSet::::NAME.to_string(), + variant: variant_to_map(DominatingSet::::variant()), + instance: serde_json::json!({ + "num_vertices": ds.num_vertices(), + "num_edges": ds.num_edges(), + "edges": ds.edges(), + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("dominatingset_to_ilp", &data, &results); +} diff --git a/examples/reduction_factoring_to_circuit.rs b/examples/reduction_factoring_to_circuit.rs new file mode 100644 index 00000000..37cb5567 --- /dev/null +++ b/examples/reduction_factoring_to_circuit.rs @@ -0,0 +1,193 @@ +//! # Factoring to Circuit-SAT Reduction +//! +//! ## Mathematical Equivalence +//! Builds an array multiplier circuit for p * q = N. The circuit is satisfiable +//! iff N can be factored within the given bit bounds. +//! +//! ## This Example +//! - Instance: Factor 6 = 2 * 3 (m=2 bits, n=2 bits) +//! - Reference: Based on ProblemReductions.jl factoring example +//! - Source: Factoring(2, 2, 6) +//! - Target: CircuitSAT +//! +//! We solve the source Factoring problem directly with BruteForce (only 4 binary +//! variables), then verify the reduction produces a valid CircuitSAT encoding by +//! simulating the circuit forward from a known factorization to build a complete +//! satisfying assignment. +//! +//! ## Output +//! Exports `docs/paper/examples/factoring_to_circuit.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::models::specialized::Circuit; +use problemreductions::prelude::*; +use std::collections::HashMap; + +/// Simulate a circuit forward: given input variable values, compute all internal +/// variable values by evaluating each assignment in order. +fn simulate_circuit( + circuit: &Circuit, + initial_assignments: &HashMap, +) -> HashMap { + let mut values = initial_assignments.clone(); + for assignment in &circuit.assignments { + let result = assignment.expr.evaluate(&values); + for output in &assignment.outputs { + values.insert(output.clone(), result); + } + } + values +} + +fn main() { + // 1. Create Factoring instance: factor 6 with 2-bit factors + // Possible: 2*3=6 or 3*2=6 + let factoring = Factoring::new(2, 2, 6); + + println!("=== Factoring to Circuit-SAT Reduction ===\n"); + println!( + "Source: Factor {} with {}-bit * {}-bit factors", + factoring.target(), + factoring.m(), + factoring.n() + ); + println!( + " {} total variables ({} bits for p, {} bits for q)", + factoring.num_variables(), + factoring.m(), + factoring.n() + ); + + // 2. Solve the source Factoring problem directly (only 4 binary variables) + let solver = BruteForce::new(); + let factoring_solutions = solver.find_best(&factoring); + println!("\nFactoring solutions found: {}", factoring_solutions.len()); + for sol in &factoring_solutions { + let (a, b) = factoring.read_factors(sol); + println!(" p={}, q={} -> {} * {} = {}", a, b, a, b, a * b); + } + + // 3. Reduce Factoring -> CircuitSAT + let reduction = ReduceTo::>::reduce_to(&factoring); + let circuit_sat = reduction.target_problem(); + + println!("\n=== Factoring -> CircuitSAT ==="); + println!( + "CircuitSAT: {} variables, {} assignments (gates)", + circuit_sat.num_variables(), + circuit_sat.circuit().num_assignments() + ); + println!( + " The multiplier circuit computes p * q and constrains output = {}.", + factoring.target() + ); + + // 4. Verify using forward simulation + // Take a known valid factorization, set the input variables (p and q bits), + // and simulate the circuit to get all internal variable values. + let factoring_sol = &factoring_solutions[0]; + let (a, b) = factoring.read_factors(factoring_sol); + println!("\n=== Forward Simulation Verification ==="); + println!( + "Known factorization: {} * {} = {} (bits: {:?})", + a, b, a * b, factoring_sol + ); + + // Set input variables: p1, p2 for first factor, q1, q2 for second factor + let mut input_values: HashMap = HashMap::new(); + for (i, &bit) in factoring_sol.iter().enumerate().take(factoring.m()) { + input_values.insert(format!("p{}", i + 1), bit == 1); + } + for (i, &bit) in factoring_sol[factoring.m()..].iter().enumerate().take(factoring.n()) { + input_values.insert(format!("q{}", i + 1), bit == 1); + } + println!("Input variables: {:?}", input_values); + + // Simulate the circuit forward + let all_values = simulate_circuit(circuit_sat.circuit(), &input_values); + + // Convert to a config vector matching CircuitSAT variable order + let var_names = circuit_sat.variable_names(); + let circuit_config: Vec = var_names + .iter() + .map(|name| { + if *all_values.get(name).unwrap_or(&false) { + 1 + } else { + 0 + } + }) + .collect(); + + // Verify the circuit is satisfied + let circuit_size = circuit_sat.solution_size(&circuit_config); + println!("Circuit satisfied: {}", circuit_size.is_valid); + assert!( + circuit_size.is_valid, + "Forward-simulated circuit assignment must satisfy all gates" + ); + + // Verify extraction round-trips correctly + let extracted = reduction.extract_solution(&circuit_config); + println!("Extracted factoring solution: {:?}", extracted); + let (ea, eb) = factoring.read_factors(&extracted); + println!("Extracted factors: {} * {} = {}", ea, eb, ea * eb); + assert_eq!(ea * eb, factoring.target(), "Round-trip must preserve factorization"); + + // 5. Verify all factoring solutions can be simulated through the circuit + println!("\nVerifying all {} factoring solutions through circuit:", factoring_solutions.len()); + let mut solutions = Vec::new(); + for sol in &factoring_solutions { + let (fa, fb) = factoring.read_factors(sol); + let mut inputs: HashMap = HashMap::new(); + for (i, &bit) in sol.iter().enumerate().take(factoring.m()) { + inputs.insert(format!("p{}", i + 1), bit == 1); + } + for (i, &bit) in sol[factoring.m()..].iter().enumerate().take(factoring.n()) { + inputs.insert(format!("q{}", i + 1), bit == 1); + } + let vals = simulate_circuit(circuit_sat.circuit(), &inputs); + let config: Vec = var_names + .iter() + .map(|name| if *vals.get(name).unwrap_or(&false) { 1 } else { 0 }) + .collect(); + let sz = circuit_sat.solution_size(&config); + println!(" {} * {} = {}: circuit satisfied = {}", fa, fb, fa * fb, sz.is_valid); + assert!(sz.is_valid); + + solutions.push(SolutionPair { + source_config: sol.clone(), + target_config: config, + }); + } + + println!("\nReduction verified successfully: {} = {} * {}", factoring.target(), a, b); + + // 6. Export JSON + let overhead = lookup_overhead("Factoring", "CircuitSAT") + .expect("Factoring -> CircuitSAT overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: Factoring::NAME.to_string(), + variant: variant_to_map(Factoring::variant()), + instance: serde_json::json!({ + "number": factoring.target(), + "num_bits_first": factoring.m(), + "num_bits_second": factoring.n(), + }), + }, + target: ProblemSide { + problem: CircuitSAT::::NAME.to_string(), + variant: variant_to_map(CircuitSAT::::variant()), + instance: serde_json::json!({ + "num_variables": circuit_sat.num_variables(), + "num_gates": circuit_sat.circuit().num_assignments(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("factoring_to_circuit", &data, &results); +} diff --git a/examples/reduction_factoring_to_ilp.rs b/examples/reduction_factoring_to_ilp.rs new file mode 100644 index 00000000..00624d1d --- /dev/null +++ b/examples/reduction_factoring_to_ilp.rs @@ -0,0 +1,85 @@ +//! # Factoring to ILP Reduction +//! +//! ## Mathematical Formulation +//! Uses McCormick linearization for binary products with carry propagation. +//! Variables: p_i, q_j (factor bits), z_ij (product bits), c_k (carries). +//! Constraints: +//! (1) McCormick: z_ij <= p_i, z_ij <= q_j, z_ij >= p_i + q_j - 1 +//! (2) Bit equations: sum_{i+j=k} z_ij + c_{k-1} = N_k + 2*c_k +//! (3) No overflow: c_{m+n-1} = 0 +//! Objective: feasibility (minimize 0). +//! +//! ## This Example +//! - Instance: Factor 15 = 3 * 5 (m=4 bits, n=4 bits) +//! - NOTE: Uses ILPSolver (not BruteForce) since the ILP has many variables +//! - Target ILP: 4+4+16+8 = 32 variables +//! +//! ## Output +//! Exports `docs/paper/examples/factoring_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::ILPSolver; + +fn main() { + // 1. Create Factoring instance: find p (4-bit) x q (4-bit) = 15 + let problem = Factoring::new(4, 4, 15); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&problem); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: Factoring with {} variables ({}+{} bits)", problem.num_variables(), problem.m(), problem.n()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve ILP using ILPSolver (too many variables for BruteForce) + let solver = ILPSolver::new(); + let ilp_solution = solver.solve(ilp).expect("ILP should be feasible for 15 = 3 * 5"); + println!("\n=== Solution ==="); + println!("ILP solution found (first 8 vars): {:?}", &ilp_solution[..8]); + + // 5. Extract factoring solution + let extracted = reduction.extract_solution(&ilp_solution); + println!("Source Factoring solution: {:?}", extracted); + + // 6. Verify: read factors and confirm p * q = 15 + let (p, q) = problem.read_factors(&extracted); + println!("Factors: {} x {} = {}", p, q, p * q); + assert_eq!(p * q, 15); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let solutions = vec![SolutionPair { + source_config: extracted, + target_config: ilp_solution, + }]; + + let overhead = lookup_overhead("Factoring", "ILP") + .expect("Factoring -> ILP overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: Factoring::NAME.to_string(), + variant: variant_to_map(Factoring::variant()), + instance: serde_json::json!({ + "number": problem.target(), + "num_bits_first": problem.m(), + "num_bits_second": problem.n(), + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("factoring_to_ilp", &data, &results); +} diff --git a/examples/reduction_ilp_to_qubo.rs b/examples/reduction_ilp_to_qubo.rs new file mode 100644 index 00000000..410e3f45 --- /dev/null +++ b/examples/reduction_ilp_to_qubo.rs @@ -0,0 +1,130 @@ +//! # Integer Linear Programming (Binary) to QUBO Reduction (Penalty Method) +//! +//! ## Mathematical Relationship +//! A binary ILP problem: +//! +//! maximize c^T x +//! subject to A x <= b +//! x_i in {0, 1} +//! +//! is mapped to QUBO by introducing slack variables to convert inequality +//! constraints into equalities, then penalizing constraint violations: +//! +//! H(x, s) = -c^T x + P * sum_j (a_j^T x + s_j - b_j)^2 +//! +//! where s_j are slack variables encoded in binary. The penalty P is chosen +//! large enough to ensure feasibility is always preferred over infeasible +//! solutions with better objective values. +//! +//! ## This Example +//! - Instance: 3 binary projects with values [1, 2, 3] +//! - Constraint 1: x0 + x1 <= 1 (projects 0 and 1 share a team) +//! - Constraint 2: x1 + x2 <= 1 (projects 1 and 2 share equipment) +//! - Objective: maximize 1*x0 + 2*x1 + 3*x2 +//! - Expected: Select projects {0, 2} for total value 4 (x0 and x2 don't +//! share resources) +//! +//! ## Outputs +//! - `docs/paper/examples/ilp_to_qubo.json` — reduction structure +//! - `docs/paper/examples/ilp_to_qubo.result.json` — solutions +//! +//! ## Usage +//! ```bash +//! cargo run --example reduction_ilp_to_qubo +//! ``` + +use problemreductions::export::*; +use problemreductions::prelude::*; + +fn main() { + println!("=== ILP (Binary) -> QUBO Reduction ===\n"); + + // 3 projects with values 1, 2, 3 + // Constraint 1: x0 + x1 <= 1 (shared team) + // Constraint 2: x1 + x2 <= 1 (shared equipment) + let ilp = ILP::binary( + 3, + vec![ + LinearConstraint::le(vec![(0, 1.0), (1, 1.0)], 1.0), + LinearConstraint::le(vec![(1, 1.0), (2, 1.0)], 1.0), + ], + vec![(0, 1.0), (1, 2.0), (2, 3.0)], + ObjectiveSense::Maximize, + ); + + let project_names = ["Alpha", "Beta", "Gamma"]; + + // Reduce to QUBO + let reduction = ReduceTo::::reduce_to(&ilp); + let qubo = reduction.target_problem(); + + println!("Source: ILP (binary) with 3 variables, 2 constraints"); + println!(" Objective: maximize 1*x0 + 2*x1 + 3*x2"); + println!(" Constraint 1: x0 + x1 <= 1 (shared team)"); + println!(" Constraint 2: x1 + x2 <= 1 (shared equipment)"); + println!("Target: QUBO with {} variables", qubo.num_variables()); + println!("Q matrix ({}x{}):", qubo.matrix().len(), qubo.matrix().len()); + for row in qubo.matrix() { + let formatted: Vec = row.iter().map(|v| format!("{:7.1}", v)).collect(); + println!(" [{}]", formatted.join(", ")); + } + + // Solve QUBO with brute force + let solver = BruteForce::new(); + let qubo_solutions = solver.find_best(qubo); + + // Extract and verify solutions + println!("\nOptimal solutions:"); + let mut solutions = Vec::new(); + for sol in &qubo_solutions { + let extracted = reduction.extract_solution(sol); + let selected: Vec = extracted + .iter() + .enumerate() + .filter(|(_, &x)| x == 1) + .map(|(i, _)| project_names[i].to_string()) + .collect(); + let value = ilp.solution_size(&extracted).size; + println!( + " Selected projects: {:?} (total value: {:.0})", + selected, value + ); + + // Closed-loop verification: check solution is valid in original problem + let sol_size = ilp.solution_size(&extracted); + assert!(sol_size.is_valid, "Solution must be valid in source problem"); + + solutions.push(SolutionPair { + source_config: extracted, + target_config: sol.clone(), + }); + } + + println!("\nVerification passed: all solutions are feasible and optimal"); + + // Export JSON + let overhead = lookup_overhead("ILP", "QUBO") + .expect("ILP -> QUBO overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + }), + }, + target: ProblemSide { + problem: QUBO::::NAME.to_string(), + variant: variant_to_map(QUBO::::variant()), + instance: serde_json::json!({ + "num_vars": qubo.num_vars(), + "matrix": qubo.matrix(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("ilp_to_qubo", &data, &results); +} diff --git a/examples/reduction_is_to_ilp.rs b/examples/reduction_is_to_ilp.rs new file mode 100644 index 00000000..e6f1cb09 --- /dev/null +++ b/examples/reduction_is_to_ilp.rs @@ -0,0 +1,91 @@ +//! # Independent Set to ILP Reduction +//! +//! ## Mathematical Formulation +//! Variables: x_v in {0,1} for each vertex v. +//! Constraints: x_u + x_v <= 1 for each edge (u,v). +//! Objective: maximize sum of w_v * x_v. +//! +//! ## This Example +//! - Instance: Path graph P4 (4 vertices, 3 edges: 0-1-2-3) +//! - Source IS: max size 2 (e.g., {0,2}, {0,3}, {1,3}) +//! - Target ILP: 4 binary variables, 3 constraints +//! +//! ## Output +//! Exports `docs/paper/examples/is_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::BruteForceFloat; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create IS instance: path graph P4 + let edges = vec![(0, 1), (1, 2), (2, 3)]; + let is = IndependentSet::::new(4, edges.clone()); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&is); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: IndependentSet with {} variables", is.num_variables()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve target ILP (uses BruteForceFloat since ILP has f64 objective) + let solver = BruteForce::new(); + let ilp_solutions = solver.find_best_float(ilp); + println!("\n=== Solution ==="); + println!("ILP solutions found: {}", ilp_solutions.len()); + + let ilp_solution = &ilp_solutions[0].0; + println!("ILP solution: {:?}", ilp_solution); + + // 5. Extract source solution + let is_solution = reduction.extract_solution(ilp_solution); + println!("Source IS solution: {:?}", is_solution); + + // 6. Verify + let size = is.solution_size(&is_solution); + println!("Solution valid: {}, size: {:?}", size.is_valid, size.size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let mut solutions = Vec::new(); + for (target_config, _score) in &ilp_solutions { + let source_sol = reduction.extract_solution(target_config); + let s = is.solution_size(&source_sol); + assert!(s.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_config.clone(), + }); + } + + let overhead = lookup_overhead_or_empty("IndependentSet", "ILP"); + + let data = ReductionData { + source: ProblemSide { + problem: IndependentSet::::NAME.to_string(), + variant: variant_to_map(IndependentSet::::variant()), + instance: serde_json::json!({ + "num_vertices": is.num_vertices(), + "num_edges": is.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("is_to_ilp", &data, &results); +} diff --git a/examples/reduction_is_to_qubo.rs b/examples/reduction_is_to_qubo.rs new file mode 100644 index 00000000..0d51f0be --- /dev/null +++ b/examples/reduction_is_to_qubo.rs @@ -0,0 +1,105 @@ +//! # Independent Set to QUBO Reduction (Penalty Method) +//! +//! ## Mathematical Relationship +//! The Maximum Independent Set (MIS) problem on a graph G = (V, E) is mapped to +//! QUBO by constructing a penalty Hamiltonian: +//! +//! H(x) = -sum_{i in V} x_i + P * sum_{(i,j) in E} x_i * x_j +//! +//! where P > 1 is a penalty weight ensuring no two adjacent vertices are both +//! selected. The QUBO minimization finds configurations that maximize the +//! independent set size while respecting adjacency constraints. +//! +//! ## This Example +//! - Instance: Path graph P4 with 4 vertices and 3 edges (0-1-2-3) +//! - Source: IndependentSet with maximum size 2 (e.g., {0,2} or {1,3}) +//! - QUBO variables: 4 (one per vertex) +//! - Expected: Two optimal solutions of size 2: vertices {0,2} and {1,3} +//! +//! ## Outputs +//! - `docs/paper/examples/is_to_qubo.json` — reduction structure +//! - `docs/paper/examples/is_to_qubo.result.json` — solutions +//! +//! ## Usage +//! ```bash +//! cargo run --example reduction_is_to_qubo +//! ``` + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + println!("=== Independent Set -> QUBO Reduction ===\n"); + + // Path graph P4: 0-1-2-3 + let edges = vec![(0, 1), (1, 2), (2, 3)]; + let is = IndependentSet::::new(4, edges.clone()); + + // Reduce to QUBO + let reduction = ReduceTo::::reduce_to(&is); + let qubo = reduction.target_problem(); + + println!("Source: IndependentSet on path P4 (4 vertices, 3 edges)"); + println!("Target: QUBO with {} variables", qubo.num_variables()); + println!("Q matrix:"); + for row in qubo.matrix() { + println!(" {:?}", row); + } + + // Solve QUBO with brute force + let solver = BruteForce::new(); + let qubo_solutions = solver.find_best(qubo); + + // Extract and verify solutions + println!("\nOptimal solutions:"); + let mut solutions = Vec::new(); + for sol in &qubo_solutions { + let extracted = reduction.extract_solution(sol); + let sol_size = is.solution_size(&extracted); + assert!(sol_size.is_valid, "Solution must be valid in source problem"); + + let selected: Vec = extracted + .iter() + .enumerate() + .filter(|(_, &x)| x == 1) + .map(|(i, _)| i) + .collect(); + println!(" Vertices: {:?} (size {})", selected, selected.len()); + + solutions.push(SolutionPair { + source_config: extracted, + target_config: sol.clone(), + }); + } + + println!("\nVerification passed: all solutions are valid"); + + // Export JSON + let overhead = lookup_overhead("IndependentSet", "QUBO") + .expect("IndependentSet -> QUBO overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: IndependentSet::::NAME.to_string(), + variant: variant_to_map(IndependentSet::::variant()), + instance: serde_json::json!({ + "num_vertices": is.num_vertices(), + "num_edges": is.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: QUBO::::NAME.to_string(), + variant: variant_to_map(QUBO::::variant()), + instance: serde_json::json!({ + "num_vars": qubo.num_vars(), + "matrix": qubo.matrix(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("is_to_qubo", &data, &results); +} diff --git a/examples/reduction_is_to_setpacking.rs b/examples/reduction_is_to_setpacking.rs new file mode 100644 index 00000000..9b758813 --- /dev/null +++ b/examples/reduction_is_to_setpacking.rs @@ -0,0 +1,110 @@ +//! # Independent Set to Set Packing Reduction +//! +//! ## Mathematical Equivalence +//! For each vertex v, create a set S_v of edges incident to v. Universe U = E. +//! Selecting vertex v means selecting S_v. Independent vertices have disjoint +//! incident edge sets, so IS maps to set packing with identical optimal value. +//! +//! ## This Example +//! - Instance: Path graph P4 (4 vertices, 3 edges: (0,1), (1,2), (2,3)) +//! - Source IS: max size 2 +//! - Target SetPacking: max packing 2 +//! +//! ## Output +//! Exports `docs/paper/examples/is_to_setpacking.json` and `is_to_setpacking.result.json`. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + println!("\n=== Independent Set -> Set Packing Reduction ===\n"); + + // Path graph P4: 0-1-2-3 + let edges = vec![(0, 1), (1, 2), (2, 3)]; + let source = IndependentSet::::new(4, edges.clone()); + + println!("Source: IndependentSet on P4"); + println!(" Vertices: 4"); + println!(" Edges: {:?}", edges); + + // Reduce to SetPacking + let reduction = ReduceTo::>::reduce_to(&source); + let target = reduction.target_problem(); + + println!("\nTarget: SetPacking"); + println!(" Sets: {} sets", target.num_sets()); + for (i, set) in target.sets().iter().enumerate() { + println!(" S_{} = {:?}", i, set); + } + + // Solve the target problem + let solver = BruteForce::new(); + let target_solutions = solver.find_best(target); + + println!("\nBest target solutions: {}", target_solutions.len()); + + // Extract and verify each solution + let mut solutions = Vec::new(); + for (i, target_sol) in target_solutions.iter().enumerate() { + let source_sol = reduction.extract_solution(target_sol); + let source_size = source.solution_size(&source_sol); + let target_size = target.solution_size(target_sol); + + println!( + " Solution {}: target={:?} (size={}), source={:?} (size={}, valid={})", + i, target_sol, target_size.size, source_sol, source_size.size, source_size.is_valid + ); + + assert!( + source_size.is_valid, + "Extracted source solution must be valid" + ); + + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_sol.clone(), + }); + } + + // Use the first solution for additional assertions + let target_sol = &target_solutions[0]; + let source_sol = reduction.extract_solution(target_sol); + let source_size = source.solution_size(&source_sol); + let target_size = target.solution_size(target_sol); + + assert_eq!(source_size.size, 2, "IS on P4 has optimal size 2"); + assert_eq!(target_size.size, 2, "SetPacking should also have size 2"); + + // Export JSON + let overhead = lookup_overhead("IndependentSet", "SetPacking") + .expect("IndependentSet -> SetPacking overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: IndependentSet::::NAME.to_string(), + variant: variant_to_map(IndependentSet::::variant()), + instance: serde_json::json!({ + "num_vertices": source.num_vertices(), + "num_edges": source.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: SetPacking::::NAME.to_string(), + variant: variant_to_map(SetPacking::::variant()), + instance: serde_json::json!({ + "num_sets": target.num_sets(), + "sets": target.sets(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("is_to_setpacking", &data, &results); + + println!("\nDone: IS(P4) optimal=2 maps to SetPacking optimal=2"); +} diff --git a/examples/reduction_is_to_vc.rs b/examples/reduction_is_to_vc.rs new file mode 100644 index 00000000..318f7049 --- /dev/null +++ b/examples/reduction_is_to_vc.rs @@ -0,0 +1,83 @@ +//! # Independent Set to Vertex Cover Reduction +//! +//! ## Mathematical Equivalence +//! S ⊆ V is an independent set iff V \ S is a vertex cover. The complement +//! operation preserves optimality since |IS| + |VC| = |V| is constant. +//! +//! ## This Example +//! - Instance: Path graph P4 (4 vertices, 3 edges) +//! - Source IS: max size 2 (e.g., {0, 2} or {0, 3} or {1, 3}) +//! - Target VC: min size 2 +//! +//! ## Output +//! Exports `docs/paper/examples/is_to_vc.json` and `is_to_vc.result.json`. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create IS instance: path graph P4 + let edges = vec![(0, 1), (1, 2), (2, 3)]; + let is = IndependentSet::::new(4, edges.clone()); + + // 2. Reduce to VC + let reduction = ReduceTo::>::reduce_to(&is); + let vc = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: IndependentSet with {} variables", is.num_variables()); + println!("Target: VertexCovering with {} variables", vc.num_variables()); + + // 4. Solve target + let solver = BruteForce::new(); + let vc_solutions = solver.find_best(vc); + println!("\n=== Solution ==="); + println!("Target solutions found: {}", vc_solutions.len()); + + // 5. Extract and verify solutions + let mut solutions = Vec::new(); + for target_sol in &vc_solutions { + let source_sol = reduction.extract_solution(target_sol); + let size = is.solution_size(&source_sol); + assert!(size.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_sol.clone(), + }); + } + println!("Reduction verified successfully"); + + // 6. Export JSON + let overhead = lookup_overhead("IndependentSet", "VertexCovering") + .expect("IndependentSet -> VertexCovering overhead not found"); + let vc_edges = vc.edges(); + + let data = ReductionData { + source: ProblemSide { + problem: IndependentSet::::NAME.to_string(), + variant: variant_to_map(IndependentSet::::variant()), + instance: serde_json::json!({ + "num_vertices": is.num_vertices(), + "num_edges": is.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: VertexCovering::::NAME.to_string(), + variant: variant_to_map(VertexCovering::::variant()), + instance: serde_json::json!({ + "num_vertices": vc.num_vertices(), + "num_edges": vc.num_edges(), + "edges": vc_edges, + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("is_to_vc", &data, &results); +} diff --git a/examples/reduction_ksatisfiability_to_qubo.rs b/examples/reduction_ksatisfiability_to_qubo.rs new file mode 100644 index 00000000..c377de1e --- /dev/null +++ b/examples/reduction_ksatisfiability_to_qubo.rs @@ -0,0 +1,126 @@ +//! # K-Satisfiability (2-SAT) to QUBO Reduction (Penalty Method) +//! +//! ## Mathematical Relationship +//! The Maximum K-Satisfiability problem maps a CNF formula with k-literal clauses +//! to QUBO. Each clause C_j = (l_1 OR l_2 OR ... OR l_k) contributes a penalty +//! term that is minimized when the clause is satisfied: +//! +//! H_j(x) = product_{l in C_j} (1 - l) +//! +//! where l = x_i for positive literal and l = (1 - x_i) for negated literal. +//! The total QUBO Hamiltonian H = -sum_j H_j is minimized when the maximum +//! number of clauses is satisfied. +//! +//! ## This Example +//! - Instance: 2-SAT with 3 variables and 4 clauses +//! - C1: x1 OR x2 +//! - C2: NOT x1 OR x3 +//! - C3: x2 OR NOT x3 +//! - C4: NOT x2 OR NOT x3 +//! - QUBO variables: 3 (one per Boolean variable) +//! - Expected: Assignments satisfying all 4 clauses (if possible) or +//! maximizing satisfied clauses +//! +//! ## Outputs +//! - `docs/paper/examples/ksatisfiability_to_qubo.json` — reduction structure +//! - `docs/paper/examples/ksatisfiability_to_qubo.result.json` — solutions +//! +//! ## Usage +//! ```bash +//! cargo run --example reduction_ksatisfiability_to_qubo +//! ``` + +use problemreductions::export::*; +use problemreductions::prelude::*; + +fn main() { + println!("=== K-Satisfiability (2-SAT) -> QUBO Reduction ===\n"); + + // 4 clauses over 3 variables + let clauses = vec![ + CNFClause::new(vec![1, 2]), // x1 OR x2 + CNFClause::new(vec![-1, 3]), // NOT x1 OR x3 + CNFClause::new(vec![2, -3]), // x2 OR NOT x3 + CNFClause::new(vec![-2, -3]), // NOT x2 OR NOT x3 + ]; + let clause_strings = [ + "x1 OR x2".to_string(), + "NOT x1 OR x3".to_string(), + "x2 OR NOT x3".to_string(), + "NOT x2 OR NOT x3".to_string(), + ]; + + let ksat = KSatisfiability::<2, i32>::new(3, clauses); + + // Reduce to QUBO + let reduction = ReduceTo::::reduce_to(&ksat); + let qubo = reduction.target_problem(); + + println!("Source: KSatisfiability<2> with 3 variables, 4 clauses"); + for (i, c) in clause_strings.iter().enumerate() { + println!(" C{}: {}", i + 1, c); + } + println!("Target: QUBO with {} variables", qubo.num_variables()); + println!("Q matrix:"); + for row in qubo.matrix() { + println!(" {:?}", row); + } + + // Solve QUBO with brute force + let solver = BruteForce::new(); + let qubo_solutions = solver.find_best(qubo); + + // Extract and verify solutions + println!("\nOptimal solutions:"); + let num_clauses = ksat.clauses().len(); + let mut solutions = Vec::new(); + for sol in &qubo_solutions { + let extracted = reduction.extract_solution(sol); + let assignment: Vec = extracted + .iter() + .map(|&x| if x == 1 { "ON".to_string() } else { "OFF".to_string() }) + .collect(); + let satisfied = ksat.solution_size(&extracted).size; + println!( + " Switches: [{}] -> {}/{} clauses satisfied", + assignment.join(", "), + satisfied, + num_clauses + ); + + solutions.push(SolutionPair { + source_config: extracted, + target_config: sol.clone(), + }); + } + + println!("\nVerification passed: all solutions maximize satisfied clauses"); + + // Export JSON + let overhead = lookup_overhead("KSatisfiability", "QUBO") + .expect("KSatisfiability -> QUBO overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: KSatisfiability::<2, i32>::NAME.to_string(), + variant: variant_to_map(KSatisfiability::<2, i32>::variant()), + instance: serde_json::json!({ + "num_vars": ksat.num_vars(), + "num_clauses": ksat.clauses().len(), + "k": 2, + }), + }, + target: ProblemSide { + problem: QUBO::::NAME.to_string(), + variant: variant_to_map(QUBO::::variant()), + instance: serde_json::json!({ + "num_vars": qubo.num_vars(), + "matrix": qubo.matrix(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("ksatisfiability_to_qubo", &data, &results); +} diff --git a/examples/reduction_matching_to_ilp.rs b/examples/reduction_matching_to_ilp.rs new file mode 100644 index 00000000..d67e9ace --- /dev/null +++ b/examples/reduction_matching_to_ilp.rs @@ -0,0 +1,91 @@ +//! # Matching to ILP Reduction +//! +//! ## Mathematical Formulation +//! Variables: x_e in {0,1} for each edge e. +//! Constraints: sum_{e incident to v} x_e <= 1 for each vertex v. +//! Objective: maximize sum of w_e * x_e. +//! +//! ## This Example +//! - Instance: Path graph P4 (4 vertices, 3 edges: 0-1, 1-2, 2-3) +//! - Source Matching: max matching size 2 (e.g., {0-1, 2-3}) +//! - Target ILP: 3 binary variables (one per edge), 4 vertex constraints +//! +//! ## Output +//! Exports `docs/paper/examples/matching_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::BruteForceFloat; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create Matching instance: path graph P4 with unit weights + let edges = vec![(0, 1), (1, 2), (2, 3)]; + let matching = Matching::::unweighted(4, edges.clone()); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&matching); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: Matching with {} variables (edges)", matching.num_variables()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve target ILP + let solver = BruteForce::new(); + let ilp_solutions = solver.find_best_float(ilp); + println!("\n=== Solution ==="); + println!("ILP solutions found: {}", ilp_solutions.len()); + + let ilp_solution = &ilp_solutions[0].0; + println!("ILP solution: {:?}", ilp_solution); + + // 5. Extract source solution + let matching_solution = reduction.extract_solution(ilp_solution); + println!("Source Matching solution: {:?}", matching_solution); + + // 6. Verify + let size = matching.solution_size(&matching_solution); + println!("Solution valid: {}, size: {:?}", size.is_valid, size.size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let mut solutions = Vec::new(); + for (target_config, _score) in &ilp_solutions { + let source_sol = reduction.extract_solution(target_config); + let s = matching.solution_size(&source_sol); + assert!(s.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_config.clone(), + }); + } + + let overhead = lookup_overhead_or_empty("Matching", "ILP"); + + let data = ReductionData { + source: ProblemSide { + problem: Matching::::NAME.to_string(), + variant: variant_to_map(Matching::::variant()), + instance: serde_json::json!({ + "num_vertices": matching.num_vertices(), + "num_edges": matching.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("matching_to_ilp", &data, &results); +} diff --git a/examples/reduction_matching_to_setpacking.rs b/examples/reduction_matching_to_setpacking.rs new file mode 100644 index 00000000..ee19de4a --- /dev/null +++ b/examples/reduction_matching_to_setpacking.rs @@ -0,0 +1,97 @@ +//! # Matching to Set Packing Reduction +//! +//! ## Mathematical Equivalence +//! Each edge e = (u,v) becomes a set S_e = {u, v}. Universe U = V. +//! A matching (edges with no shared vertices) maps to a packing (sets with +//! no shared elements) with the same weight. +//! +//! ## This Example +//! - Instance: Path graph P4 (4 vertices, 3 edges) with unit weights +//! - Source matching: max size 2 (e.g., edges {(0,1), (2,3)}) +//! - Target SetPacking: max packing 2 +//! +//! ## Output +//! Exports `docs/paper/examples/matching_to_setpacking.json` and `.result.json`. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + println!("\n=== Matching -> Set Packing Reduction ===\n"); + + // Path graph P4: 0-1-2-3 with unit weights + let edges = vec![(0, 1), (1, 2), (2, 3)]; + let source = Matching::::unweighted(4, edges.clone()); + + println!("Source: Matching on P4"); + println!(" Vertices: 4"); + println!(" Edges: {:?}", edges); + + // Reduce to SetPacking + let reduction = ReduceTo::>::reduce_to(&source); + let target = reduction.target_problem(); + + println!("\nTarget: SetPacking"); + println!(" Sets: {} sets", target.num_sets()); + for (i, set) in target.sets().iter().enumerate() { + println!(" S_{} = {:?}", i, set); + } + + // Solve the target problem + let solver = BruteForce::new(); + let target_solutions = solver.find_best(target); + + println!("\nBest target solutions: {}", target_solutions.len()); + + // Extract and verify each solution + let mut solutions = Vec::new(); + for (i, target_sol) in target_solutions.iter().enumerate() { + let source_sol = reduction.extract_solution(target_sol); + let source_size = source.solution_size(&source_sol); + let target_size = target.solution_size(target_sol); + + println!( + " Solution {}: target={:?} (size={}), source={:?} (size={}, valid={})", + i, target_sol, target_size.size, source_sol, source_size.size, source_size.is_valid + ); + assert!(source_size.is_valid, "Extracted source solution must be valid"); + + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_sol.clone(), + }); + } + + // Export JSON + let overhead = lookup_overhead("Matching", "SetPacking") + .expect("Matching -> SetPacking overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: Matching::::NAME.to_string(), + variant: variant_to_map(Matching::::variant()), + instance: serde_json::json!({ + "num_vertices": source.num_vertices(), + "num_edges": source.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: SetPacking::::NAME.to_string(), + variant: variant_to_map(SetPacking::::variant()), + instance: serde_json::json!({ + "num_sets": target.num_sets(), + "sets": target.sets(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("matching_to_setpacking", &data, &results); + + println!("\nDone: Matching(P4) optimal=2 maps to SetPacking optimal=2"); +} diff --git a/examples/reduction_maxcut_to_spinglass.rs b/examples/reduction_maxcut_to_spinglass.rs new file mode 100644 index 00000000..cddbfaaf --- /dev/null +++ b/examples/reduction_maxcut_to_spinglass.rs @@ -0,0 +1,84 @@ +//! # Max-Cut to Spin Glass Reduction +//! +//! ## Mathematical Equivalence +//! Max-Cut maps to Ising by setting J_{ij} = w_{ij} and h_i = 0. Maximizing the +//! cut value sum w_{ij} (for i,j on different sides) equals minimizing the Ising +//! energy -sum J_{ij} s_i s_j since s_i s_j = -1 when vertices are on opposite sides. +//! +//! ## This Example +//! - Instance: Triangle K3 with unit edge weights +//! - Source MaxCut: 3 vertices, 3 edges, max cut = 2 +//! - Target SpinGlass: 3 spins +//! +//! ## Output +//! Exports `docs/paper/examples/maxcut_to_spinglass.json` and `maxcut_to_spinglass.result.json`. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + let maxcut = MaxCut::::new(3, vec![(0, 1, 1), (1, 2, 1), (0, 2, 1)]); + + let reduction = ReduceTo::>::reduce_to(&maxcut); + let sg = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: MaxCut with {} variables", maxcut.num_variables()); + println!("Target: SpinGlass with {} variables", sg.num_variables()); + + let solver = BruteForce::new(); + let sg_solutions = solver.find_best(sg); + println!("\n=== Solution ==="); + println!("Target solutions found: {}", sg_solutions.len()); + + // Extract and verify solutions + let mut solutions = Vec::new(); + for target_sol in &sg_solutions { + let source_sol = reduction.extract_solution(target_sol); + let size = maxcut.solution_size(&source_sol); + assert!(size.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_sol.clone(), + }); + } + + let maxcut_solution = reduction.extract_solution(&sg_solutions[0]); + println!("Source MaxCut solution: {:?}", maxcut_solution); + + let size = maxcut.solution_size(&maxcut_solution); + println!("Solution size: {:?}", size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // Export JSON + let edges: Vec<(usize, usize, i32)> = maxcut.edges(); + let overhead = lookup_overhead("MaxCut", "SpinGlass") + .expect("MaxCut -> SpinGlass overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: MaxCut::::NAME.to_string(), + variant: variant_to_map(MaxCut::::variant()), + instance: serde_json::json!({ + "num_vertices": maxcut.num_vertices(), + "num_edges": maxcut.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: SpinGlass::::NAME.to_string(), + variant: variant_to_map(SpinGlass::::variant()), + instance: serde_json::json!({ + "num_spins": sg.num_variables(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("maxcut_to_spinglass", &data, &results); +} diff --git a/examples/reduction_qubo_to_spinglass.rs b/examples/reduction_qubo_to_spinglass.rs new file mode 100644 index 00000000..a0cb7a58 --- /dev/null +++ b/examples/reduction_qubo_to_spinglass.rs @@ -0,0 +1,86 @@ +//! # QUBO to Spin Glass Reduction +//! +//! ## Mathematical Equivalence +//! The reverse substitution x_i = (s_i + 1)/2 transforms binary QUBO variables +//! back to Ising spins. The QUBO matrix Q maps to couplings J and fields h via +//! Q_{ij} = -4J_{ij} for off-diagonal and Q_{ii} = 2*sum_j J_{ij} - 2h_i for diagonal. +//! +//! ## This Example +//! - Instance: 3-variable QUBO with diagonal [-1, -2, -1] and coupling Q_{01} = 3 +//! - Source QUBO: 3 binary variables +//! - Target SpinGlass: 3 spins +//! +//! ## Output +//! Exports `docs/paper/examples/qubo_to_spinglass.json` and +//! `docs/paper/examples/qubo_to_spinglass.result.json` for use in paper code blocks. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + let matrix = vec![ + vec![-1.0, 3.0, 0.0], + vec![0.0, -2.0, 0.0], + vec![0.0, 0.0, -1.0], + ]; + let qubo = QUBO::from_matrix(matrix.clone()); + + let reduction = ReduceTo::>::reduce_to(&qubo); + let sg = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: QUBO with {} variables", qubo.num_variables()); + println!("Target: SpinGlass with {} variables", sg.num_variables()); + + let solver = BruteForce::new(); + let sg_solutions = solver.find_best(sg); + println!("\n=== Solution ==="); + println!("Target solutions found: {}", sg_solutions.len()); + + let qubo_solution = reduction.extract_solution(&sg_solutions[0]); + println!("Source QUBO solution: {:?}", qubo_solution); + + let size = qubo.solution_size(&qubo_solution); + println!("Solution size: {:?}", size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // Collect all solutions + let mut solutions = Vec::new(); + for target_sol in &sg_solutions { + let source_sol = reduction.extract_solution(target_sol); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_sol.clone(), + }); + } + + // Export JSON + let overhead = lookup_overhead("QUBO", "SpinGlass") + .expect("QUBO -> SpinGlass overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: QUBO::::NAME.to_string(), + variant: variant_to_map(QUBO::::variant()), + instance: serde_json::json!({ + "num_vars": qubo.num_vars(), + "matrix": matrix, + }), + }, + target: ProblemSide { + problem: SpinGlass::::NAME.to_string(), + variant: variant_to_map(SpinGlass::::variant()), + instance: serde_json::json!({ + "num_spins": sg.num_variables(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("qubo_to_spinglass", &data, &results); +} diff --git a/examples/reduction_sat_to_coloring.rs b/examples/reduction_sat_to_coloring.rs new file mode 100644 index 00000000..6bd6942f --- /dev/null +++ b/examples/reduction_sat_to_coloring.rs @@ -0,0 +1,118 @@ +//! # SAT to 3-Coloring Reduction (Garey & Johnson 1979) +//! +//! ## Mathematical Equivalence +//! Creates a graph with a base triangle (TRUE, FALSE, AUX), variable gadgets +//! (pos_i, neg_i connected to AUX), and clause gadgets using OR-gadgets. +//! phi is satisfiable iff the constructed graph is 3-colorable. +//! +//! ## This Example +//! - Instance: phi = (x1 v x2), 2 vars, 1 clause +//! - Source SAT: satisfiable +//! - Target: 3-Coloring with larger graph +//! +//! ## Output +//! Exports `docs/paper/examples/sat_to_coloring.json` and `sat_to_coloring.result.json`. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create SAT instance: phi = (x1 v x2), 2 variables, 1 clause + let sat = Satisfiability::::new( + 2, + vec![ + CNFClause::new(vec![1, 2]), // x1 OR x2 + ], + ); + + println!("=== SAT to 3-Coloring Reduction (Garey & Johnson 1979) ===\n"); + println!("Source SAT formula:"); + println!(" (x1 v x2)"); + println!(" {} variables, {} clauses", sat.num_vars(), sat.num_clauses()); + + // 2. Reduce to 3-Coloring + // SAT reduces to KColoring<3, SimpleGraph, i32> + let reduction = ReduceTo::>::reduce_to(&sat); + let coloring = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: Satisfiability with {} variables", sat.num_variables()); + println!( + "Target: 3-Coloring with {} vertices, {} edges", + coloring.num_vertices(), + coloring.num_edges() + ); + println!(" Base triangle: TRUE(0), FALSE(1), AUX(2)"); + println!(" Variable gadgets: pos_i and neg_i vertices connected to AUX"); + println!(" Clause gadgets: OR-gadgets forcing output to TRUE color"); + + // 3. Solve the target 3-Coloring problem + let solver = BruteForce::new(); + let coloring_solutions = solver.find_best(coloring); + println!("\n=== Solution ==="); + println!("Target 3-Coloring solutions found: {}", coloring_solutions.len()); + + // 4. Extract and verify source solutions + let sat_solution = reduction.extract_solution(&coloring_solutions[0]); + println!("Extracted SAT solution: {:?}", sat_solution); + println!( + " Interpretation: x1={}, x2={}", + sat_solution[0], sat_solution[1] + ); + + let size = sat.solution_size(&sat_solution); + println!("SAT solution valid: {}", size.is_valid); + assert!(size.is_valid, "Extracted SAT solution must be valid"); + + // Verify all coloring solutions map to valid SAT assignments + let mut valid_count = 0; + let mut solutions = Vec::new(); + for col_sol in &coloring_solutions { + let sat_sol = reduction.extract_solution(col_sol); + let s = sat.solution_size(&sat_sol); + if s.is_valid { + valid_count += 1; + } + solutions.push(SolutionPair { + source_config: sat_sol, + target_config: col_sol.clone(), + }); + } + println!( + "All {} coloring solutions map to valid SAT assignments: {}", + coloring_solutions.len(), + valid_count == coloring_solutions.len() + ); + assert_eq!(valid_count, coloring_solutions.len()); + + println!("\nReduction verified successfully"); + + // 5. Export JSON + let overhead = lookup_overhead("Satisfiability", "KColoring") + .expect("Satisfiability -> KColoring overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: Satisfiability::::NAME.to_string(), + variant: variant_to_map(Satisfiability::::variant()), + instance: serde_json::json!({ + "num_vars": sat.num_vars(), + "num_clauses": sat.num_clauses(), + }), + }, + target: ProblemSide { + problem: KColoring::<3, SimpleGraph, i32>::NAME.to_string(), + variant: variant_to_map(KColoring::<3, SimpleGraph, i32>::variant()), + instance: serde_json::json!({ + "num_vertices": coloring.num_vertices(), + "num_edges": coloring.num_edges(), + "num_colors": 3, + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("sat_to_coloring", &data, &results); +} diff --git a/examples/reduction_sat_to_dominatingset.rs b/examples/reduction_sat_to_dominatingset.rs new file mode 100644 index 00000000..60b22154 --- /dev/null +++ b/examples/reduction_sat_to_dominatingset.rs @@ -0,0 +1,128 @@ +//! # SAT to Dominating Set Reduction (Garey & Johnson 1979) +//! +//! ## Mathematical Equivalence +//! For each variable x_i, create a triangle (pos_i, neg_i, dummy_i). For each +//! clause c_j, create a vertex connected to the literals it contains. phi is +//! satisfiable iff the graph has a dominating set of size n. +//! +//! ## This Example +//! - Instance: phi = (x1 v x2) ^ (~x1 v x2), 2 vars, 2 clauses +//! - Source SAT: satisfiable (e.g., x2=1) +//! - Target: Dominating set +//! +//! ## Output +//! Exports `docs/paper/examples/sat_to_dominatingset.json` and +//! `docs/paper/examples/sat_to_dominatingset.result.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create SAT instance: phi = (x1 v x2) ^ (~x1 v x2), 2 vars, 2 clauses + let sat = Satisfiability::::new( + 2, + vec![ + CNFClause::new(vec![1, 2]), // x1 OR x2 + CNFClause::new(vec![-1, 2]), // NOT x1 OR x2 + ], + ); + + println!("=== SAT to Dominating Set Reduction (Garey & Johnson 1979) ===\n"); + println!("Source SAT formula:"); + println!(" (x1 v x2) ^ (~x1 v x2)"); + println!(" {} variables, {} clauses", sat.num_vars(), sat.num_clauses()); + + // 2. Reduce to Dominating Set + let reduction = ReduceTo::>::reduce_to(&sat); + let ds = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: Satisfiability with {} variables", sat.num_variables()); + println!( + "Target: DominatingSet with {} vertices, {} edges", + ds.num_vertices(), + ds.num_edges() + ); + println!(" Variable gadgets: 3 vertices per variable (pos, neg, dummy) forming triangles"); + println!(" Clause vertices: 1 per clause, connected to relevant literal vertices"); + println!(" Layout: vertices 0-5 are variable gadgets, vertices 6-7 are clause vertices"); + + // 3. Solve the target DS problem + let solver = BruteForce::new(); + let ds_solutions = solver.find_best(ds); + println!("\n=== Solution ==="); + println!("Target DS solutions found: {}", ds_solutions.len()); + + // 4. Extract and verify source solutions + let sat_solution = reduction.extract_solution(&ds_solutions[0]); + println!("Extracted SAT solution: {:?}", sat_solution); + println!( + " Interpretation: x1={}, x2={}", + sat_solution[0], sat_solution[1] + ); + + let size = sat.solution_size(&sat_solution); + println!("SAT solution valid: {}", size.is_valid); + assert!(size.is_valid, "Extracted SAT solution must be valid"); + + // Verify all DS solutions map to valid SAT assignments + let mut valid_count = 0; + for ds_sol in &ds_solutions { + let sat_sol = reduction.extract_solution(ds_sol); + let s = sat.solution_size(&sat_sol); + if s.is_valid { + valid_count += 1; + } + } + println!( + "{}/{} DS solutions map to valid SAT assignments", + valid_count, + ds_solutions.len() + ); + // Note: Not all optimal DS solutions necessarily map back to valid SAT solutions + // because some dominating sets may use dummy vertices. The important thing is that + // at least one does, verifying the reduction's correctness. + assert!(valid_count > 0, "At least one DS solution must map to a valid SAT assignment"); + + println!("\nReduction verified successfully"); + + // 5. Collect all valid solutions + let mut solutions = Vec::new(); + for ds_sol in &ds_solutions { + let sat_sol = reduction.extract_solution(ds_sol); + if sat.solution_size(&sat_sol).is_valid { + solutions.push(SolutionPair { + source_config: sat_sol, + target_config: ds_sol.clone(), + }); + } + } + + // 6. Export JSON + let overhead = lookup_overhead("Satisfiability", "DominatingSet") + .expect("Satisfiability -> DominatingSet overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: Satisfiability::::NAME.to_string(), + variant: variant_to_map(Satisfiability::::variant()), + instance: serde_json::json!({ + "num_vars": sat.num_vars(), + "num_clauses": sat.num_clauses(), + }), + }, + target: ProblemSide { + problem: DominatingSet::::NAME.to_string(), + variant: variant_to_map(DominatingSet::::variant()), + instance: serde_json::json!({ + "num_vertices": ds.num_vertices(), + "num_edges": ds.num_edges(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("sat_to_dominatingset", &data, &results); +} diff --git a/examples/reduction_sat_to_is.rs b/examples/reduction_sat_to_is.rs new file mode 100644 index 00000000..ca5b096e --- /dev/null +++ b/examples/reduction_sat_to_is.rs @@ -0,0 +1,119 @@ +//! # SAT to Independent Set Reduction (Karp 1972) +//! +//! ## Mathematical Equivalence +//! Given a CNF formula phi with m clauses, construct a graph G where each literal +//! in each clause becomes a vertex. Intra-clause edges form cliques, cross-clause +//! edges connect complementary literals. phi is satisfiable iff G has IS of size m. +//! +//! ## This Example +//! - Instance: phi = (x1 v x2) ^ (~x1 v x3) ^ (x2 v ~x3), 3 vars, 3 clauses +//! - Source SAT: satisfiable (e.g., x1=1, x2=1, x3=1) +//! - Target IS: size 3 (one vertex per clause) +//! +//! ## Output +//! Exports `docs/paper/examples/sat_to_is.json` and `sat_to_is.result.json`. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create SAT instance: phi = (x1 v x2) ^ (~x1 v x3) ^ (x2 v ~x3) + // 3 variables, 3 clauses + let sat = Satisfiability::::new( + 3, + vec![ + CNFClause::new(vec![1, 2]), // x1 OR x2 + CNFClause::new(vec![-1, 3]), // NOT x1 OR x3 + CNFClause::new(vec![2, -3]), // x2 OR NOT x3 + ], + ); + + println!("=== SAT to Independent Set Reduction (Karp 1972) ===\n"); + println!("Source SAT formula:"); + println!(" (x1 v x2) ^ (~x1 v x3) ^ (x2 v ~x3)"); + println!(" {} variables, {} clauses", sat.num_vars(), sat.num_clauses()); + + // 2. Reduce to Independent Set + let reduction = ReduceTo::>::reduce_to(&sat); + let is = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: Satisfiability with {} variables", sat.num_variables()); + println!( + "Target: IndependentSet with {} vertices, {} edges", + is.num_vertices(), + is.num_edges() + ); + println!(" Each literal occurrence becomes a vertex."); + println!(" Edges connect literals within the same clause (clique)"); + println!(" and complementary literals across clauses."); + + // 3. Solve the target IS problem + let solver = BruteForce::new(); + let is_solutions = solver.find_best(is); + println!("\n=== Solution ==="); + println!("Target IS solutions found: {}", is_solutions.len()); + + // 4. Extract and verify source solutions + let sat_solution = reduction.extract_solution(&is_solutions[0]); + println!("Extracted SAT solution: {:?}", sat_solution); + println!( + " Interpretation: x1={}, x2={}, x3={}", + sat_solution[0], sat_solution[1], sat_solution[2] + ); + + let size = sat.solution_size(&sat_solution); + println!("SAT solution valid: {}", size.is_valid); + assert!(size.is_valid, "Extracted SAT solution must be valid"); + + // Verify all IS solutions map to valid SAT assignments + let mut valid_count = 0; + let mut solutions = Vec::new(); + for is_sol in &is_solutions { + let sat_sol = reduction.extract_solution(is_sol); + let s = sat.solution_size(&sat_sol); + if s.is_valid { + valid_count += 1; + } + solutions.push(SolutionPair { + source_config: sat_sol, + target_config: is_sol.clone(), + }); + } + println!( + "All {} IS solutions map to valid SAT assignments: {}", + is_solutions.len(), + valid_count == is_solutions.len() + ); + assert_eq!(valid_count, is_solutions.len()); + + println!("\nReduction verified successfully"); + + // 5. Export JSON + let overhead = lookup_overhead("Satisfiability", "IndependentSet") + .expect("Satisfiability -> IndependentSet overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: Satisfiability::::NAME.to_string(), + variant: variant_to_map(Satisfiability::::variant()), + instance: serde_json::json!({ + "num_vars": sat.num_vars(), + "num_clauses": sat.num_clauses(), + }), + }, + target: ProblemSide { + problem: IndependentSet::::NAME.to_string(), + variant: variant_to_map(IndependentSet::::variant()), + instance: serde_json::json!({ + "num_vertices": is.num_vertices(), + "num_edges": is.num_edges(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("sat_to_is", &data, &results); +} diff --git a/examples/reduction_sat_to_ksat.rs b/examples/reduction_sat_to_ksat.rs new file mode 100644 index 00000000..39d513f3 --- /dev/null +++ b/examples/reduction_sat_to_ksat.rs @@ -0,0 +1,129 @@ +//! # SAT to k-SAT Reduction (Cook-Levin) +//! +//! ## Mathematical Equivalence +//! Small clauses (< k literals) are padded with auxiliary variables and their +//! negations. Large clauses (> k literals) are split using auxiliary variables +//! in a chain that preserves satisfiability. +//! +//! ## This Example +//! - Instance: phi = (x1) ^ (x1 v x2 v x3 v x4), 4 vars, 2 clauses +//! - First clause has 1 literal (will be padded to 3) +//! - Second clause has 4 literals (will be split into two 3-literal clauses) +//! - Source SAT: satisfiable +//! - Target: 3-SAT with 3 literals per clause +//! +//! ## Output +//! Exports `docs/paper/examples/sat_to_ksat.json` and `sat_to_ksat.result.json`. + +use problemreductions::export::*; +use problemreductions::prelude::*; + +fn main() { + // 1. Create SAT instance with varied clause sizes: + // phi = (x1) ^ (x1 v x2 v x3 v x4) + // - Clause 1 has 1 literal (needs padding to 3) + // - Clause 2 has 4 literals (needs splitting into 3-literal clauses) + let sat = Satisfiability::::new( + 4, + vec![ + CNFClause::new(vec![1]), // x1 (1 literal - will be padded) + CNFClause::new(vec![1, 2, 3, 4]), // x1 v x2 v x3 v x4 (4 literals - will be split) + ], + ); + + println!("=== SAT to 3-SAT Reduction ===\n"); + println!("Source SAT formula:"); + println!(" (x1) ^ (x1 v x2 v x3 v x4)"); + println!(" {} variables, {} clauses", sat.num_vars(), sat.num_clauses()); + println!(" Clause sizes: 1 and 4 (both need transformation for 3-SAT)"); + + // 2. Reduce to 3-SAT (K=3) + let reduction = ReduceTo::>::reduce_to(&sat); + let ksat = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: Satisfiability with {} variables, {} clauses", sat.num_vars(), sat.num_clauses()); + println!( + "Target: 3-SAT with {} variables, {} clauses", + ksat.num_vars(), + ksat.num_clauses() + ); + println!(" Additional variables: {} (ancilla/auxiliary)", ksat.num_vars() - sat.num_vars()); + println!(" Clause (x1) padded: (x1 v a v b) ^ (x1 v a v ~b) ^ ... "); + println!(" Clause (x1 v x2 v x3 v x4) split: (x1 v x2 v c) ^ (~c v x3 v x4)"); + + // Print target clauses + println!("\n Target 3-SAT clauses:"); + for (i, clause) in ksat.clauses().iter().enumerate() { + println!(" Clause {}: {:?}", i, clause.literals); + } + + // 3. Solve the target 3-SAT problem + let solver = BruteForce::new(); + let ksat_solutions = solver.find_best(ksat); + println!("\n=== Solution ==="); + println!("Target 3-SAT solutions found: {}", ksat_solutions.len()); + + // 4. Extract and verify source solutions + let sat_solution = reduction.extract_solution(&ksat_solutions[0]); + println!("Extracted SAT solution: {:?}", sat_solution); + println!( + " Interpretation: x1={}, x2={}, x3={}, x4={}", + sat_solution[0], sat_solution[1], sat_solution[2], sat_solution[3] + ); + + let size = sat.solution_size(&sat_solution); + println!("SAT solution valid: {}", size.is_valid); + assert!(size.is_valid, "Extracted SAT solution must be valid"); + + // Verify all 3-SAT solutions map to valid SAT assignments + let mut valid_count = 0; + let mut solutions = Vec::new(); + for ks_sol in &ksat_solutions { + let sat_sol = reduction.extract_solution(ks_sol); + let s = sat.solution_size(&sat_sol); + if s.is_valid { + valid_count += 1; + } + solutions.push(SolutionPair { + source_config: sat_sol, + target_config: ks_sol.clone(), + }); + } + println!( + "All {} 3-SAT solutions map to valid SAT assignments: {}", + ksat_solutions.len(), + valid_count == ksat_solutions.len() + ); + assert_eq!(valid_count, ksat_solutions.len()); + + println!("\nReduction verified successfully"); + + // 5. Export JSON + let overhead = lookup_overhead("Satisfiability", "KSatisfiability") + .expect("Satisfiability -> KSatisfiability overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: Satisfiability::::NAME.to_string(), + variant: variant_to_map(Satisfiability::::variant()), + instance: serde_json::json!({ + "num_vars": sat.num_vars(), + "num_clauses": sat.num_clauses(), + }), + }, + target: ProblemSide { + problem: KSatisfiability::<3, i32>::NAME.to_string(), + variant: variant_to_map(KSatisfiability::<3, i32>::variant()), + instance: serde_json::json!({ + "num_vars": ksat.num_vars(), + "num_clauses": ksat.num_clauses(), + "k": 3, + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("sat_to_ksat", &data, &results); +} diff --git a/examples/reduction_setcovering_to_ilp.rs b/examples/reduction_setcovering_to_ilp.rs new file mode 100644 index 00000000..305455e7 --- /dev/null +++ b/examples/reduction_setcovering_to_ilp.rs @@ -0,0 +1,96 @@ +//! # Set Covering to ILP Reduction +//! +//! ## Mathematical Formulation +//! Variables: x_i in {0,1} for each set S_i. +//! Constraints: sum_{S_i containing e} x_i >= 1 for each element e in universe. +//! Objective: minimize sum of w_i * x_i. +//! +//! ## This Example +//! - Instance: Universe size 3, sets: S0={0,1}, S1={1,2}, S2={0,2} +//! - Source SetCovering: min cover size 2 (any two sets cover all elements) +//! - Target ILP: 3 binary variables, 3 element-coverage constraints +//! +//! ## Output +//! Exports `docs/paper/examples/setcovering_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::BruteForceFloat; + +fn main() { + // 1. Create SetCovering instance: universe {0,1,2}, 3 sets + let sc = SetCovering::::new( + 3, + vec![ + vec![0, 1], + vec![1, 2], + vec![0, 2], + ], + ); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&sc); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: SetCovering with {} variables", sc.num_variables()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve target ILP + let solver = BruteForce::new(); + let ilp_solutions = solver.find_best_float(ilp); + println!("\n=== Solution ==="); + println!("ILP solutions found: {}", ilp_solutions.len()); + + let ilp_solution = &ilp_solutions[0].0; + println!("ILP solution: {:?}", ilp_solution); + + // 5. Extract source solution + let sc_solution = reduction.extract_solution(ilp_solution); + println!("Source SetCovering solution: {:?}", sc_solution); + + // 6. Verify + let size = sc.solution_size(&sc_solution); + println!("Solution valid: {}, size: {:?}", size.is_valid, size.size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let mut solutions = Vec::new(); + for (target_config, _score) in &ilp_solutions { + let source_sol = reduction.extract_solution(target_config); + let s = sc.solution_size(&source_sol); + assert!(s.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_config.clone(), + }); + } + + let overhead = lookup_overhead_or_empty("SetCovering", "ILP"); + + let data = ReductionData { + source: ProblemSide { + problem: SetCovering::::NAME.to_string(), + variant: variant_to_map(SetCovering::::variant()), + instance: serde_json::json!({ + "num_sets": sc.num_sets(), + "sets": sc.sets(), + "universe_size": sc.universe_size(), + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("setcovering_to_ilp", &data, &results); +} diff --git a/examples/reduction_setpacking_to_ilp.rs b/examples/reduction_setpacking_to_ilp.rs new file mode 100644 index 00000000..f445aee9 --- /dev/null +++ b/examples/reduction_setpacking_to_ilp.rs @@ -0,0 +1,93 @@ +//! # Set Packing to ILP Reduction +//! +//! ## Mathematical Formulation +//! Variables: x_i in {0,1} for each set S_i. +//! Constraints: x_i + x_j <= 1 for each overlapping pair (i,j). +//! Objective: maximize sum of w_i * x_i. +//! +//! ## This Example +//! - Instance: 3 sets: S0={0,1}, S1={1,2}, S2={2,3,4} +//! Overlapping pairs: (S0,S1) share element 1, (S1,S2) share element 2 +//! - Source SetPacking: max packing size 2 (S0 and S2 are disjoint) +//! - Target ILP: 3 binary variables, 2 overlap constraints +//! +//! ## Output +//! Exports `docs/paper/examples/setpacking_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::BruteForceFloat; + +fn main() { + // 1. Create SetPacking instance: 3 sets + let sp = SetPacking::::new(vec![ + vec![0, 1], + vec![1, 2], + vec![2, 3, 4], + ]); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&sp); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: SetPacking with {} variables", sp.num_variables()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve target ILP + let solver = BruteForce::new(); + let ilp_solutions = solver.find_best_float(ilp); + println!("\n=== Solution ==="); + println!("ILP solutions found: {}", ilp_solutions.len()); + + let ilp_solution = &ilp_solutions[0].0; + println!("ILP solution: {:?}", ilp_solution); + + // 5. Extract source solution + let sp_solution = reduction.extract_solution(ilp_solution); + println!("Source SetPacking solution: {:?}", sp_solution); + + // 6. Verify + let size = sp.solution_size(&sp_solution); + println!("Solution valid: {}, size: {:?}", size.is_valid, size.size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let mut solutions = Vec::new(); + for (target_config, _score) in &ilp_solutions { + let source_sol = reduction.extract_solution(target_config); + let s = sp.solution_size(&source_sol); + assert!(s.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_config.clone(), + }); + } + + let overhead = lookup_overhead_or_empty("SetPacking", "ILP"); + + let data = ReductionData { + source: ProblemSide { + problem: SetPacking::::NAME.to_string(), + variant: variant_to_map(SetPacking::::variant()), + instance: serde_json::json!({ + "num_sets": sp.num_sets(), + "sets": sp.sets(), + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("setpacking_to_ilp", &data, &results); +} diff --git a/examples/reduction_setpacking_to_qubo.rs b/examples/reduction_setpacking_to_qubo.rs new file mode 100644 index 00000000..26279acd --- /dev/null +++ b/examples/reduction_setpacking_to_qubo.rs @@ -0,0 +1,115 @@ +//! # Set Packing to QUBO Reduction (Penalty Method) +//! +//! ## Mathematical Relationship +//! The Maximum Set Packing problem selects the largest collection of +//! non-overlapping sets from a family of sets. It is mapped to QUBO as: +//! +//! H(x) = -sum_i x_i + P * sum_{i 1 penalizes selecting +//! overlapping sets. The QUBO minimization maximizes the number of selected +//! non-overlapping sets. +//! +//! ## This Example +//! - Instance: 3 sets over universe {0,1,2,3,4} +//! - Set A = {0, 1} +//! - Set B = {1, 2} +//! - Set C = {2, 3, 4} +//! - Overlaps: A-B share element 1, B-C share element 2 +//! - QUBO variables: 3 (one per set) +//! - Expected: Optimal packing selects {A, C} (size 2), since A and C +//! do not overlap +//! +//! ## Outputs +//! - `docs/paper/examples/setpacking_to_qubo.json` — reduction structure +//! - `docs/paper/examples/setpacking_to_qubo.result.json` — solutions +//! +//! ## Usage +//! ```bash +//! cargo run --example reduction_setpacking_to_qubo +//! ``` + +use problemreductions::export::*; +use problemreductions::prelude::*; + +fn main() { + println!("=== Set Packing -> QUBO Reduction ===\n"); + + // 3 sets over universe {0,1,2,3,4} + let sets = vec![ + vec![0, 1], // Set A + vec![1, 2], // Set B + vec![2, 3, 4], // Set C + ]; + let set_names = ["Set-A".to_string(), "Set-B".to_string(), "Set-C".to_string()]; + let sp = SetPacking::::new(sets.clone()); + + // Reduce to QUBO + let reduction = ReduceTo::::reduce_to(&sp); + let qubo = reduction.target_problem(); + + println!("Source: SetPacking with 3 sets over universe {{0,1,2,3,4}}"); + println!(" Set A = {{0, 1}}, Set B = {{1, 2}}, Set C = {{2, 3, 4}}"); + println!("Target: QUBO with {} variables", qubo.num_variables()); + println!("Q matrix:"); + for row in qubo.matrix() { + println!(" {:?}", row); + } + + // Solve QUBO with brute force + let solver = BruteForce::new(); + let qubo_solutions = solver.find_best(qubo); + + // Extract and verify solutions + println!("\nOptimal solutions:"); + let mut solutions = Vec::new(); + for sol in &qubo_solutions { + let extracted = reduction.extract_solution(sol); + let selected: Vec = extracted + .iter() + .enumerate() + .filter(|(_, &x)| x == 1) + .map(|(i, _)| set_names[i].clone()) + .collect(); + let packing_size = selected.len(); + println!(" Selected: {:?} (packing size {})", selected, packing_size); + + // Closed-loop verification: check solution is valid in original problem + let sol_size = sp.solution_size(&extracted); + assert!(sol_size.is_valid, "Solution must be valid in source problem"); + + solutions.push(SolutionPair { + source_config: extracted, + target_config: sol.clone(), + }); + } + + println!("\nVerification passed: all solutions are valid set packings"); + + // Export JSON + let overhead = lookup_overhead("SetPacking", "QUBO") + .expect("SetPacking -> QUBO overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: SetPacking::::NAME.to_string(), + variant: variant_to_map(SetPacking::::variant()), + instance: serde_json::json!({ + "num_sets": sp.num_sets(), + "sets": sp.sets(), + }), + }, + target: ProblemSide { + problem: QUBO::::NAME.to_string(), + variant: variant_to_map(QUBO::::variant()), + instance: serde_json::json!({ + "num_vars": qubo.num_vars(), + "matrix": qubo.matrix(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("setpacking_to_qubo", &data, &results); +} diff --git a/examples/reduction_spinglass_to_maxcut.rs b/examples/reduction_spinglass_to_maxcut.rs new file mode 100644 index 00000000..d4b4d7fc --- /dev/null +++ b/examples/reduction_spinglass_to_maxcut.rs @@ -0,0 +1,86 @@ +//! # Spin Glass to Max-Cut Reduction +//! +//! ## Mathematical Equivalence +//! When external fields h_i = 0, the Ising Hamiltonian H = -sum J_{ij} s_i s_j maps +//! directly to a Max-Cut problem: maximizing the cut value is equivalent to minimizing +//! the Ising energy. When h_i != 0, an ancilla spin is added with w_{i,a} = h_i. +//! +//! ## This Example +//! - Instance: 3-spin frustrated triangle (J_{01} = 1, J_{12} = 1, J_{02} = 1, h = 0) +//! - Source SpinGlass: 3 spins, no external fields +//! - Target MaxCut: 3 vertices (direct mapping, no ancilla) +//! +//! ## Output +//! Exports `docs/paper/examples/spinglass_to_maxcut.json` and `spinglass_to_maxcut.result.json`. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + let sg = SpinGlass::::new( + 3, + vec![((0, 1), 1), ((1, 2), 1), ((0, 2), 1)], + vec![0, 0, 0], + ); + + let reduction = ReduceTo::>::reduce_to(&sg); + let maxcut = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: SpinGlass with {} variables", sg.num_variables()); + println!("Target: MaxCut with {} variables", maxcut.num_variables()); + + let solver = BruteForce::new(); + let maxcut_solutions = solver.find_best(maxcut); + println!("\n=== Solution ==="); + println!("Target solutions found: {}", maxcut_solutions.len()); + + // Extract and verify solutions + let mut solutions = Vec::new(); + for target_sol in &maxcut_solutions { + let source_sol = reduction.extract_solution(target_sol); + let size = sg.solution_size(&source_sol); + assert!(size.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_sol.clone(), + }); + } + + let sg_solution = reduction.extract_solution(&maxcut_solutions[0]); + println!("Source SpinGlass solution: {:?}", sg_solution); + + let size = sg.solution_size(&sg_solution); + println!("Solution size: {:?}", size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // Export JSON + let overhead = lookup_overhead("SpinGlass", "MaxCut") + .expect("SpinGlass -> MaxCut overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: SpinGlass::::NAME.to_string(), + variant: variant_to_map(SpinGlass::::variant()), + instance: serde_json::json!({ + "num_spins": sg.num_variables(), + }), + }, + target: ProblemSide { + problem: MaxCut::::NAME.to_string(), + variant: variant_to_map(MaxCut::::variant()), + instance: serde_json::json!({ + "num_vertices": maxcut.num_vertices(), + "num_edges": maxcut.num_edges(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("spinglass_to_maxcut", &data, &results); +} diff --git a/examples/reduction_spinglass_to_qubo.rs b/examples/reduction_spinglass_to_qubo.rs new file mode 100644 index 00000000..ff091ff9 --- /dev/null +++ b/examples/reduction_spinglass_to_qubo.rs @@ -0,0 +1,87 @@ +//! # Spin Glass to QUBO Reduction +//! +//! ## Mathematical Equivalence +//! The substitution s_i = 2x_i - 1 transforms Ising spins s in {-1,+1} to binary +//! variables x in {0,1}. Expanding the Ising Hamiltonian H(s) under this substitution +//! yields a QUBO objective Q(x) plus a constant offset. +//! +//! ## This Example +//! - Instance: 3-spin antiferromagnetic chain with fields +//! - Couplings: J_{01} = -1.0, J_{12} = -1.0 +//! - Fields: h = [0.5, -0.5, 0.5] +//! - Source SpinGlass: 3 spins +//! - Target QUBO: 3 binary variables +//! +//! ## Output +//! Exports `docs/paper/examples/spinglass_to_qubo.json` and +//! `docs/paper/examples/spinglass_to_qubo.result.json` for use in paper code blocks. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + let sg = SpinGlass::::new( + 3, + vec![((0, 1), -1.0), ((1, 2), -1.0)], + vec![0.5, -0.5, 0.5], + ); + + let reduction = ReduceTo::>::reduce_to(&sg); + let qubo = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: SpinGlass with {} variables", sg.num_variables()); + println!("Target: QUBO with {} variables", qubo.num_variables()); + + let solver = BruteForce::new(); + let qubo_solutions = solver.find_best(qubo); + println!("\n=== Solution ==="); + println!("Target solutions found: {}", qubo_solutions.len()); + + let sg_solution = reduction.extract_solution(&qubo_solutions[0]); + println!("Source SpinGlass solution: {:?}", sg_solution); + + let size = sg.solution_size(&sg_solution); + println!("Solution size: {:?}", size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // Collect all solutions + let mut solutions = Vec::new(); + for target_sol in &qubo_solutions { + let source_sol = reduction.extract_solution(target_sol); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_sol.clone(), + }); + } + + // Export JSON + let overhead = lookup_overhead("SpinGlass", "QUBO") + .expect("SpinGlass -> QUBO overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: SpinGlass::::NAME.to_string(), + variant: variant_to_map(SpinGlass::::variant()), + instance: serde_json::json!({ + "num_spins": sg.num_variables(), + }), + }, + target: ProblemSide { + problem: QUBO::::NAME.to_string(), + variant: variant_to_map(QUBO::::variant()), + instance: serde_json::json!({ + "num_vars": qubo.num_vars(), + "matrix": qubo.matrix(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("spinglass_to_qubo", &data, &results); +} diff --git a/examples/reduction_vc_to_ilp.rs b/examples/reduction_vc_to_ilp.rs new file mode 100644 index 00000000..cb316499 --- /dev/null +++ b/examples/reduction_vc_to_ilp.rs @@ -0,0 +1,90 @@ +//! # Vertex Covering to ILP Reduction +//! +//! ## Mathematical Formulation +//! Variables: x_v in {0,1} for each vertex v. +//! Constraints: x_u + x_v >= 1 for each edge (u,v). +//! Objective: minimize sum of w_v * x_v. +//! +//! ## This Example +//! - Instance: Cycle C4 (4 vertices, 4 edges: 0-1-2-3-0) +//! - Source VC: min cover size 2 +//! - Target ILP: 4 binary variables, 4 constraints +//! +//! ## Output +//! Exports `docs/paper/examples/vc_to_ilp.json` for use in paper code blocks. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::solvers::BruteForceFloat; +use problemreductions::topology::SimpleGraph; + +fn main() { + // 1. Create VC instance: cycle C4 + let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3), (3, 0)]); + + // 2. Reduce to ILP + let reduction = ReduceTo::::reduce_to(&vc); + let ilp = reduction.target_problem(); + + // 3. Print transformation + println!("\n=== Problem Transformation ==="); + println!("Source: VertexCovering with {} variables", vc.num_variables()); + println!("Target: ILP with {} variables, {} constraints", ilp.num_vars, ilp.constraints.len()); + + // 4. Solve target ILP + let solver = BruteForce::new(); + let ilp_solutions = solver.find_best_float(ilp); + println!("\n=== Solution ==="); + println!("ILP solutions found: {}", ilp_solutions.len()); + + let ilp_solution = &ilp_solutions[0].0; + println!("ILP solution: {:?}", ilp_solution); + + // 5. Extract source solution + let vc_solution = reduction.extract_solution(ilp_solution); + println!("Source VC solution: {:?}", vc_solution); + + // 6. Verify + let size = vc.solution_size(&vc_solution); + println!("Solution valid: {}, size: {:?}", size.is_valid, size.size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // 7. Collect solutions and export JSON + let mut solutions = Vec::new(); + for (target_config, _score) in &ilp_solutions { + let source_sol = reduction.extract_solution(target_config); + let s = vc.solution_size(&source_sol); + assert!(s.is_valid); + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_config.clone(), + }); + } + + let overhead = lookup_overhead_or_empty("VertexCovering", "ILP"); + + let data = ReductionData { + source: ProblemSide { + problem: VertexCovering::::NAME.to_string(), + variant: variant_to_map(VertexCovering::::variant()), + instance: serde_json::json!({ + "num_vertices": vc.num_vertices(), + "num_edges": vc.num_edges(), + "edges": vc.edges(), + }), + }, + target: ProblemSide { + problem: ILP::NAME.to_string(), + variant: variant_to_map(ILP::variant()), + instance: serde_json::json!({ + "num_vars": ilp.num_vars, + "num_constraints": ilp.constraints.len(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("vc_to_ilp", &data, &results); +} diff --git a/examples/reduction_vc_to_is.rs b/examples/reduction_vc_to_is.rs new file mode 100644 index 00000000..9624a0cc --- /dev/null +++ b/examples/reduction_vc_to_is.rs @@ -0,0 +1,87 @@ +//! # Vertex Cover to Independent Set Reduction +//! +//! ## Mathematical Equivalence +//! C ⊆ V is a vertex cover iff V \ C is an independent set. The reduction +//! creates an identical graph with identical weights. Solution extraction +//! computes the complement: IS = V \ VC. +//! +//! ## This Example +//! - Instance: Cycle C4 (4 vertices, 4 edges) +//! - Source VC: min size 2 +//! - Target IS: max size 2 +//! +//! ## Output +//! Exports `docs/paper/examples/vc_to_is.json` and `vc_to_is.result.json`. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + let vc = VertexCovering::::new(4, vec![(0, 1), (1, 2), (2, 3), (0, 3)]); + + let reduction = ReduceTo::>::reduce_to(&vc); + let is = reduction.target_problem(); + + println!("\n=== Problem Transformation ==="); + println!("Source: VertexCovering with {} variables", vc.num_variables()); + println!("Target: IndependentSet with {} variables", is.num_variables()); + + let solver = BruteForce::new(); + let is_solutions = solver.find_best(is); + println!("\n=== Solution ==="); + println!("Target solutions found: {}", is_solutions.len()); + + // Extract and verify solutions + let mut solutions = Vec::new(); + for target_sol in &is_solutions { + let source_sol = reduction.extract_solution(target_sol); + let size = vc.solution_size(&source_sol); + assert!(size.is_valid); + solutions.push(SolutionPair { + source_config: source_sol.clone(), + target_config: target_sol.clone(), + }); + } + + let vc_solution = reduction.extract_solution(&is_solutions[0]); + println!("Source VC solution: {:?}", vc_solution); + + let size = vc.solution_size(&vc_solution); + println!("Solution size: {:?}", size); + assert!(size.is_valid); + println!("\nReduction verified successfully"); + + // Export JSON + let vc_edges = vc.edges(); + let is_edges = is.edges(); + let overhead = lookup_overhead("VertexCovering", "IndependentSet") + .expect("VertexCovering -> IndependentSet overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: VertexCovering::::NAME.to_string(), + variant: variant_to_map(VertexCovering::::variant()), + instance: serde_json::json!({ + "num_vertices": vc.num_vertices(), + "num_edges": vc.num_edges(), + "edges": vc_edges, + }), + }, + target: ProblemSide { + problem: IndependentSet::::NAME.to_string(), + variant: variant_to_map(IndependentSet::::variant()), + instance: serde_json::json!({ + "num_vertices": is.num_vertices(), + "num_edges": is.num_edges(), + "edges": is_edges, + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("vc_to_is", &data, &results); +} diff --git a/examples/reduction_vc_to_qubo.rs b/examples/reduction_vc_to_qubo.rs new file mode 100644 index 00000000..38920c95 --- /dev/null +++ b/examples/reduction_vc_to_qubo.rs @@ -0,0 +1,115 @@ +//! # Vertex Covering to QUBO Reduction (Penalty Method) +//! +//! ## Mathematical Relationship +//! The Minimum Vertex Cover (MVC) problem on a graph G = (V, E) is mapped to +//! QUBO by constructing a penalty Hamiltonian: +//! +//! H(x) = sum_{i in V} x_i + P * sum_{(i,j) in E} (1 - x_i)(1 - x_j) +//! +//! where P is a penalty weight ensuring every edge has at least one endpoint +//! selected. The QUBO minimization finds configurations that minimize the +//! number of selected vertices while covering all edges. +//! +//! ## This Example +//! - Instance: Cycle graph C4 with 4 vertices and 4 edges (0-1-2-3-0) +//! - Source: VertexCovering with minimum size 2 +//! - QUBO variables: 4 (one per vertex) +//! - Expected: Optimal vertex covers of size 2 (e.g., {0,2} or {1,3}) +//! +//! ## Outputs +//! - `docs/paper/examples/vc_to_qubo.json` — reduction structure +//! - `docs/paper/examples/vc_to_qubo.result.json` — solutions +//! +//! ## Usage +//! ```bash +//! cargo run --example reduction_vc_to_qubo +//! ``` + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + println!("=== Vertex Covering -> QUBO Reduction ===\n"); + + // Cycle C4: 0-1-2-3-0 + let edges = vec![(0, 1), (1, 2), (2, 3), (0, 3)]; + let vc = VertexCovering::::new(4, edges.clone()); + + // Reduce to QUBO + let reduction = ReduceTo::::reduce_to(&vc); + let qubo = reduction.target_problem(); + + println!("Source: VertexCovering on cycle C4 (4 vertices, 4 edges)"); + println!("Target: QUBO with {} variables", qubo.num_variables()); + println!("Q matrix:"); + for row in qubo.matrix() { + println!(" {:?}", row); + } + + // Solve QUBO with brute force + let solver = BruteForce::new(); + let qubo_solutions = solver.find_best(qubo); + + // Extract and verify solutions + println!("\nOptimal solutions:"); + let mut solutions = Vec::new(); + for sol in &qubo_solutions { + let extracted = reduction.extract_solution(sol); + let selected: Vec = extracted + .iter() + .enumerate() + .filter(|(_, &x)| x == 1) + .map(|(i, _)| i) + .collect(); + let size = selected.len(); + println!( + " Cover vertices: {:?} ({} vertices)", + selected, size + ); + + // Closed-loop verification: check solution is valid in original problem + let sol_size = vc.solution_size(&extracted); + assert!(sol_size.is_valid, "Solution must be valid in source problem"); + + solutions.push(SolutionPair { + source_config: extracted, + target_config: sol.clone(), + }); + } + + // All optimal solutions should have size 2 + assert!( + solutions.iter().all(|s| s.source_config.iter().filter(|&&x| x == 1).count() == 2), + "All optimal VC solutions on C4 should have size 2" + ); + println!("\nVerification passed: all solutions are valid with size 2"); + + // Export JSON + let overhead = lookup_overhead("VertexCovering", "QUBO") + .expect("VertexCovering -> QUBO overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: VertexCovering::::NAME.to_string(), + variant: variant_to_map(VertexCovering::::variant()), + instance: serde_json::json!({ + "num_vertices": vc.num_vertices(), + "num_edges": vc.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: QUBO::::NAME.to_string(), + variant: variant_to_map(QUBO::::variant()), + instance: serde_json::json!({ + "num_vars": qubo.num_vars(), + "matrix": qubo.matrix(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("vc_to_qubo", &data, &results); +} diff --git a/examples/reduction_vc_to_setcovering.rs b/examples/reduction_vc_to_setcovering.rs new file mode 100644 index 00000000..09812fe1 --- /dev/null +++ b/examples/reduction_vc_to_setcovering.rs @@ -0,0 +1,112 @@ +//! # Vertex Cover to Set Covering Reduction +//! +//! ## Mathematical Equivalence +//! Universe U = {0, ..., |E|-1} (edge indices). For each vertex v, set +//! S_v = edges incident to v. A vertex cover (every edge has an endpoint +//! in the cover) maps to a set cover (every universe element in some set). +//! +//! ## This Example +//! - Instance: Triangle K3 (3 vertices, 3 edges) +//! - Source VC: min size 2 +//! - Target SetCovering: min cover 2 +//! +//! ## Output +//! Exports `docs/paper/examples/vc_to_setcovering.json` for use in paper code blocks. +//! +//! See docs/paper/reductions.typ for the full reduction specification. + +use problemreductions::export::*; +use problemreductions::prelude::*; +use problemreductions::topology::SimpleGraph; + +fn main() { + println!("\n=== Vertex Cover -> Set Covering Reduction ===\n"); + + // Triangle K3: 3 vertices, 3 edges + let edges = vec![(0, 1), (1, 2), (0, 2)]; + let source = VertexCovering::::new(3, edges.clone()); + + println!("Source: VertexCovering on K3"); + println!(" Vertices: 3"); + println!(" Edges: {:?}", edges); + + // Reduce to SetCovering + let reduction = ReduceTo::>::reduce_to(&source); + let target = reduction.target_problem(); + + println!("\nTarget: SetCovering"); + println!(" Universe size: {}", target.universe_size()); + println!(" Sets: {} sets", target.num_sets()); + for (i, set) in target.sets().iter().enumerate() { + println!(" S_{} = {:?}", i, set); + } + + // Solve the target problem + let solver = BruteForce::new(); + let target_solutions = solver.find_best(target); + + println!("\nBest target solutions: {}", target_solutions.len()); + + // Extract and verify each solution + let mut solutions = Vec::new(); + for (i, target_sol) in target_solutions.iter().enumerate() { + let source_sol = reduction.extract_solution(target_sol); + let source_size = source.solution_size(&source_sol); + let target_size = target.solution_size(target_sol); + + println!( + " Solution {}: target={:?} (size={}), source={:?} (size={}, valid={})", + i, target_sol, target_size.size, source_sol, source_size.size, source_size.is_valid + ); + + assert!( + source_size.is_valid, + "Extracted source solution must be valid" + ); + + solutions.push(SolutionPair { + source_config: source_sol, + target_config: target_sol.clone(), + }); + } + + // Use the first solution for verification + let target_sol = &target_solutions[0]; + let source_sol = reduction.extract_solution(target_sol); + let source_size = source.solution_size(&source_sol); + let target_size = target.solution_size(target_sol); + + assert_eq!(source_size.size, 2, "VC on K3 has optimal size 2"); + assert_eq!(target_size.size, 2, "SetCovering should also have size 2"); + + // Export JSON + let overhead = lookup_overhead("VertexCovering", "SetCovering") + .expect("VertexCovering -> SetCovering overhead not found"); + + let data = ReductionData { + source: ProblemSide { + problem: VertexCovering::::NAME.to_string(), + variant: variant_to_map(VertexCovering::::variant()), + instance: serde_json::json!({ + "num_vertices": source.num_vertices(), + "num_edges": source.num_edges(), + "edges": edges, + }), + }, + target: ProblemSide { + problem: SetCovering::::NAME.to_string(), + variant: variant_to_map(SetCovering::::variant()), + instance: serde_json::json!({ + "num_sets": target.num_sets(), + "sets": target.sets(), + "universe_size": target.universe_size(), + }), + }, + overhead: overhead_to_json(&overhead), + }; + + let results = ResultData { solutions }; + write_example("vc_to_setcovering", &data, &results); + + println!("\nDone: VC(K3) optimal=2 maps to SetCovering optimal=2"); +} diff --git a/src/export.rs b/src/export.rs new file mode 100644 index 00000000..e5af4e91 --- /dev/null +++ b/src/export.rs @@ -0,0 +1,131 @@ +//! JSON export schema for reduction examples. +//! +//! Provides a unified serialization format for all reduction example programs. +//! Each example produces two files: +//! - `.json` — reduction structure (source, target, overhead) +//! - `.result.json` — runtime solutions +//! +//! The schema mirrors the internal types: `ReductionOverhead` for polynomials, +//! `Problem::variant()` for problem variants, and `Problem::NAME` for problem names. + +use crate::rules::registry::{ReductionEntry, ReductionOverhead}; +use serde::Serialize; +use std::collections::HashMap; +use std::fs; +use std::path::Path; + +/// One side (source or target) of a reduction. +#[derive(Serialize, Clone, Debug)] +pub struct ProblemSide { + /// Problem name matching `Problem::NAME` (e.g., `"IndependentSet"`). + pub problem: String, + /// Variant attributes (e.g., `{"graph": "SimpleGraph", "weight": "Unweighted"}`). + pub variant: HashMap, + /// Problem-specific instance data (edges, matrix, clauses, etc.). + pub instance: serde_json::Value, +} + +/// A monomial in JSON: coefficient × Π(variable^exponent). +#[derive(Serialize, Clone, Debug)] +pub struct MonomialJson { + pub coefficient: f64, + pub variables: Vec<(String, u8)>, +} + +/// One output field mapped to a polynomial. +#[derive(Serialize, Clone, Debug)] +pub struct OverheadEntry { + pub field: String, + pub polynomial: Vec, +} + +/// Top-level reduction structure (written to `.json`). +#[derive(Serialize, Clone, Debug)] +pub struct ReductionData { + pub source: ProblemSide, + pub target: ProblemSide, + pub overhead: Vec, +} + +/// One source↔target solution pair. +#[derive(Serialize, Clone, Debug)] +pub struct SolutionPair { + pub source_config: Vec, + pub target_config: Vec, +} + +/// Runtime results (written to `.result.json`). +#[derive(Serialize, Clone, Debug)] +pub struct ResultData { + pub solutions: Vec, +} + +/// Convert a `ReductionOverhead` to JSON-serializable entries. +pub fn overhead_to_json(overhead: &ReductionOverhead) -> Vec { + overhead + .output_size + .iter() + .map(|(field, poly)| OverheadEntry { + field: field.to_string(), + polynomial: poly + .terms + .iter() + .map(|m| MonomialJson { + coefficient: m.coefficient, + variables: m + .variables + .iter() + .map(|(name, exp)| (name.to_string(), *exp)) + .collect(), + }) + .collect(), + }) + .collect() +} + +/// Look up `ReductionOverhead` from inventory by source and target problem names. +/// +/// Searches all registered `ReductionEntry` items for a matching source/target pair. +/// Returns `None` if no matching reduction is registered (e.g., ILP reductions +/// that don't use the `#[reduction]` macro). +pub fn lookup_overhead(source_name: &str, target_name: &str) -> Option { + for entry in inventory::iter:: { + if entry.source_name == source_name && entry.target_name == target_name { + return Some(entry.overhead()); + } + } + None +} + +/// Look up overhead, returning an empty overhead if not registered. +pub fn lookup_overhead_or_empty(source_name: &str, target_name: &str) -> ReductionOverhead { + lookup_overhead(source_name, target_name).unwrap_or_default() +} + +/// Convert `Problem::variant()` output to a `HashMap`. +pub fn variant_to_map(variant: Vec<(&str, &str)>) -> HashMap { + variant + .into_iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect() +} + +/// Write both `.json` and `.result.json` to `docs/paper/examples/`. +pub fn write_example(name: &str, reduction: &ReductionData, results: &ResultData) { + let dir = Path::new("docs/paper/examples"); + fs::create_dir_all(dir).expect("Failed to create examples directory"); + + let reduction_path = dir.join(format!("{}.json", name)); + let json = serde_json::to_string_pretty(reduction).expect("Failed to serialize reduction"); + fs::write(&reduction_path, json).expect("Failed to write reduction JSON"); + println!("Exported: {}", reduction_path.display()); + + let results_path = dir.join(format!("{}.result.json", name)); + let json = serde_json::to_string_pretty(results).expect("Failed to serialize results"); + fs::write(&results_path, json).expect("Failed to write results JSON"); + println!("Exported: {}", results_path.display()); +} + +#[cfg(test)] +#[path = "unit_tests/export.rs"] +mod tests; diff --git a/src/lib.rs b/src/lib.rs index 8b80dffb..3a264f7d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -63,6 +63,7 @@ pub mod config; pub mod error; +pub mod export; pub mod graph_types; pub mod io; pub mod models; diff --git a/src/unit_tests/export.rs b/src/unit_tests/export.rs new file mode 100644 index 00000000..e74814e3 --- /dev/null +++ b/src/unit_tests/export.rs @@ -0,0 +1,218 @@ +use super::*; +use crate::polynomial::Polynomial; +use crate::rules::registry::ReductionOverhead; + +#[test] +fn test_overhead_to_json_empty() { + let overhead = ReductionOverhead::default(); + let entries = overhead_to_json(&overhead); + assert!(entries.is_empty()); +} + +#[test] +fn test_overhead_to_json_single_field() { + let overhead = ReductionOverhead::new(vec![( + "num_vertices", + Polynomial::var("n") + Polynomial::var("m"), + )]); + let entries = overhead_to_json(&overhead); + assert_eq!(entries.len(), 1); + assert_eq!(entries[0].field, "num_vertices"); + assert_eq!(entries[0].polynomial.len(), 2); + + // Check first monomial: 1*n + assert_eq!(entries[0].polynomial[0].coefficient, 1.0); + assert_eq!(entries[0].polynomial[0].variables, vec![("n".to_string(), 1)]); + + // Check second monomial: 1*m + assert_eq!(entries[0].polynomial[1].coefficient, 1.0); + assert_eq!(entries[0].polynomial[1].variables, vec![("m".to_string(), 1)]); +} + +#[test] +fn test_overhead_to_json_constant_monomial() { + let overhead = ReductionOverhead::new(vec![( + "num_vars", + Polynomial::constant(42.0), + )]); + let entries = overhead_to_json(&overhead); + assert_eq!(entries.len(), 1); + assert_eq!(entries[0].field, "num_vars"); + assert_eq!(entries[0].polynomial.len(), 1); + assert_eq!(entries[0].polynomial[0].coefficient, 42.0); + assert!(entries[0].polynomial[0].variables.is_empty()); +} + +#[test] +fn test_overhead_to_json_scaled_power() { + let overhead = ReductionOverhead::new(vec![( + "num_edges", + Polynomial::var_pow("n", 2).scale(3.0), + )]); + let entries = overhead_to_json(&overhead); + assert_eq!(entries.len(), 1); + assert_eq!(entries[0].polynomial.len(), 1); + assert_eq!(entries[0].polynomial[0].coefficient, 3.0); + assert_eq!(entries[0].polynomial[0].variables, vec![("n".to_string(), 2)]); +} + +#[test] +fn test_overhead_to_json_multiple_fields() { + let overhead = ReductionOverhead::new(vec![ + ("num_vertices", Polynomial::var("n")), + ("num_edges", Polynomial::var_pow("n", 2)), + ]); + let entries = overhead_to_json(&overhead); + assert_eq!(entries.len(), 2); + assert_eq!(entries[0].field, "num_vertices"); + assert_eq!(entries[1].field, "num_edges"); +} + +#[test] +fn test_variant_to_map_empty() { + let map = variant_to_map(vec![]); + assert!(map.is_empty()); +} + +#[test] +fn test_variant_to_map_single() { + let map = variant_to_map(vec![("graph", "SimpleGraph")]); + assert_eq!(map.len(), 1); + assert_eq!(map["graph"], "SimpleGraph"); +} + +#[test] +fn test_variant_to_map_multiple() { + let map = variant_to_map(vec![("graph", "SimpleGraph"), ("weight", "i32")]); + assert_eq!(map.len(), 2); + assert_eq!(map["graph"], "SimpleGraph"); + assert_eq!(map["weight"], "i32"); +} + +#[test] +fn test_lookup_overhead_known_reduction() { + // IS -> VC is a known registered reduction + let result = lookup_overhead("IndependentSet", "VertexCovering"); + assert!(result.is_some()); +} + +#[test] +fn test_lookup_overhead_unknown_reduction() { + let result = lookup_overhead("NonExistent", "AlsoNonExistent"); + assert!(result.is_none()); +} + +#[test] +fn test_lookup_overhead_or_empty_known() { + let overhead = lookup_overhead_or_empty("IndependentSet", "VertexCovering"); + assert!(!overhead.output_size.is_empty()); +} + +#[test] +fn test_lookup_overhead_or_empty_unknown() { + let overhead = lookup_overhead_or_empty("NonExistent", "AlsoNonExistent"); + assert!(overhead.output_size.is_empty()); +} + +#[test] +fn test_write_example_creates_files() { + use std::fs; + + let data = ReductionData { + source: ProblemSide { + problem: "TestProblem".to_string(), + variant: variant_to_map(vec![("graph", "SimpleGraph")]), + instance: serde_json::json!({"num_vertices": 3}), + }, + target: ProblemSide { + problem: "TargetProblem".to_string(), + variant: variant_to_map(vec![]), + instance: serde_json::json!({"num_vars": 5}), + }, + overhead: vec![], + }; + + let results = ResultData { + solutions: vec![SolutionPair { + source_config: vec![1, 0, 1], + target_config: vec![1, 0, 1, 0, 0], + }], + }; + + write_example("_test_export", &data, &results); + + // Verify files exist and contain valid JSON + let reduction_path = "docs/paper/examples/_test_export.json"; + let results_path = "docs/paper/examples/_test_export.result.json"; + + let reduction_json: serde_json::Value = + serde_json::from_str(&fs::read_to_string(reduction_path).unwrap()).unwrap(); + assert_eq!(reduction_json["source"]["problem"], "TestProblem"); + assert_eq!(reduction_json["target"]["problem"], "TargetProblem"); + + let results_json: serde_json::Value = + serde_json::from_str(&fs::read_to_string(results_path).unwrap()).unwrap(); + assert_eq!(results_json["solutions"][0]["source_config"], serde_json::json!([1, 0, 1])); + + // Clean up test files + let _ = fs::remove_file(reduction_path); + let _ = fs::remove_file(results_path); +} + +#[test] +fn test_problem_side_serialization() { + let side = ProblemSide { + problem: "IndependentSet".to_string(), + variant: variant_to_map(vec![("graph", "SimpleGraph"), ("weight", "i32")]), + instance: serde_json::json!({"num_vertices": 4, "edges": [[0, 1], [1, 2]]}), + }; + let json = serde_json::to_value(&side).unwrap(); + assert_eq!(json["problem"], "IndependentSet"); + assert!(json["variant"]["graph"] == "SimpleGraph"); + assert!(json["instance"]["num_vertices"] == 4); +} + +#[test] +fn test_reduction_data_serialization() { + let data = ReductionData { + source: ProblemSide { + problem: "IS".to_string(), + variant: variant_to_map(vec![]), + instance: serde_json::json!({"n": 3}), + }, + target: ProblemSide { + problem: "VC".to_string(), + variant: variant_to_map(vec![]), + instance: serde_json::json!({"n": 3}), + }, + overhead: vec![OverheadEntry { + field: "num_vertices".to_string(), + polynomial: vec![MonomialJson { + coefficient: 1.0, + variables: vec![("n".to_string(), 1)], + }], + }], + }; + let json = serde_json::to_value(&data).unwrap(); + assert_eq!(json["overhead"][0]["field"], "num_vertices"); + assert_eq!(json["overhead"][0]["polynomial"][0]["coefficient"], 1.0); +} + +#[test] +fn test_result_data_serialization() { + let results = ResultData { + solutions: vec![ + SolutionPair { + source_config: vec![1, 0], + target_config: vec![0, 1], + }, + SolutionPair { + source_config: vec![0, 1], + target_config: vec![1, 0], + }, + ], + }; + let json = serde_json::to_value(&results).unwrap(); + assert_eq!(json["solutions"].as_array().unwrap().len(), 2); + assert_eq!(json["solutions"][0]["source_config"], serde_json::json!([1, 0])); +} diff --git a/tests/main.rs b/tests/main.rs index aa16c637..41db87ce 100644 --- a/tests/main.rs +++ b/tests/main.rs @@ -2,3 +2,5 @@ mod integration; #[path = "suites/reductions.rs"] mod reductions; +#[path = "suites/examples.rs"] +mod examples; diff --git a/tests/suites/examples.rs b/tests/suites/examples.rs new file mode 100644 index 00000000..2bb6236d --- /dev/null +++ b/tests/suites/examples.rs @@ -0,0 +1,58 @@ +use std::process::Command; + +fn run_example(name: &str) { + let output = Command::new("cargo") + .args(["run", "--all-features", "--example", name]) + .output() + .unwrap_or_else(|e| panic!("Failed to execute example {}: {}", name, e)); + + assert!( + output.status.success(), + "Example {} failed with status {:?}\nstdout: {}\nstderr: {}", + name, + output.status, + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ); +} + +#[test] +#[ignore] +fn test_all_reduction_examples() { + let examples = [ + "reduction_circuit_to_spinglass", + "reduction_clique_to_ilp", + "reduction_coloring_to_ilp", + "reduction_coloring_to_qubo", + "reduction_dominatingset_to_ilp", + "reduction_factoring_to_circuit", + "reduction_factoring_to_ilp", + "reduction_ilp_to_qubo", + "reduction_is_to_ilp", + "reduction_is_to_qubo", + "reduction_is_to_setpacking", + "reduction_is_to_vc", + "reduction_ksatisfiability_to_qubo", + "reduction_matching_to_ilp", + "reduction_matching_to_setpacking", + "reduction_maxcut_to_spinglass", + "reduction_qubo_to_spinglass", + "reduction_sat_to_coloring", + "reduction_sat_to_dominatingset", + "reduction_sat_to_is", + "reduction_sat_to_ksat", + "reduction_setcovering_to_ilp", + "reduction_setpacking_to_ilp", + "reduction_setpacking_to_qubo", + "reduction_spinglass_to_maxcut", + "reduction_spinglass_to_qubo", + "reduction_vc_to_ilp", + "reduction_vc_to_is", + "reduction_vc_to_qubo", + "reduction_vc_to_setcovering", + ]; + + for name in &examples { + run_example(name); + } +}