diff --git a/data/AP_p-3_n-5_ins-1.dat b/data/AP_p-3_n-5_ins-1.dat new file mode 100644 index 00000000..078b97d6 --- /dev/null +++ b/data/AP_p-3_n-5_ins-1.dat @@ -0,0 +1,20 @@ +3 +5 +[[[6, 1, 20, 2, 3], +[2, 6, 9, 10, 18], +[1, 6, 20, 5, 9], +[6, 8, 6, 9, 6], +[7, 10, 10, 6, 2]] +, +[[17, 20, 8, 8, 20], +[10, 13, 1, 10, 15], +[4, 11, 1, 13, 1], +[19, 13, 7, 18, 17], +[15, 3, 5, 1, 11]] +, +[[10, 7, 1, 19, 12], +[2, 15, 12, 10, 3], +[11, 20, 16, 12, 9], +[10, 15, 20, 11, 7], +[1, 9, 20, 7, 6]] +] diff --git a/data/F5050W01.dat b/data/F5050W01.dat new file mode 100644 index 00000000..a948b996 --- /dev/null +++ b/data/F5050W01.dat @@ -0,0 +1,168 @@ +# Problem F5050W01 : N=50/P=2/K=1 + +# N +50 + +# P +2 + +# K +1 + +# Objectif 1 +112 +14 +156 +175 +82 +221 +44 +94 +292 +223 +274 +61 +246 +279 +104 +179 +234 +289 +86 +41 +78 +159 +112 +223 +112 +149 +158 +207 +68 +197 +41 +289 +29 +170 +263 +164 +197 +162 +149 +100 +191 +100 +257 +257 +102 +75 +260 +23 +220 +89 + +# Objectif 2 +86 +129 +32 +230 +165 +260 +83 +138 +236 +95 +178 +105 +158 +245 +244 +99 +288 +149 +85 +14 +39 +64 +129 +185 +18 +239 +287 +15 +44 +241 +41 +293 +73 +17 +17 +81 +158 +13 +228 +203 +8 +107 +138 +185 +140 +282 +96 +43 +292 +115 + +# Contrainte 1 +280 +235 +278 +37 +214 +155 +216 +69 +261 +202 +245 +132 +82 +1 +102 +253 +200 +242 +289 +178 +91 +200 +115 +168 +65 +157 +74 +225 +138 +211 +88 +115 +26 +44 +227 +4 +69 +206 +289 +254 +247 +20 +121 +125 +77 +62 +103 +123 +199 +268 + +3904 diff --git a/data/KP_p-3_n-10_ins-1.dat b/data/KP_p-3_n-10_ins-1.dat new file mode 100644 index 00000000..92eb248c --- /dev/null +++ b/data/KP_p-3_n-10_ins-1.dat @@ -0,0 +1,7 @@ +3 +10 +2137 +[[566, 611, 506, 180, 817, 184, 585, 423, 26, 317], +[62, 84, 977, 979, 874, 54, 269, 93, 881, 563], +[664, 982, 962, 140, 224, 215, 12, 869, 332, 537]] +[557, 898, 148, 63, 78, 964, 246, 662, 386, 272] diff --git a/data/didactic1.txt b/data/didactic1.txt new file mode 100755 index 00000000..816538ef --- /dev/null +++ b/data/didactic1.txt @@ -0,0 +1,24 @@ +8 +5 + +7 20 21 53 34 +74 71 88 51 35 +69 88 87 40 57 +86 64 79 81 59 +76 67 47 25 76 +8 14 76 82 77 +69 58 16 75 40 +96 24 38 96 13 + +33 99 45 81 71 +66 11 10 90 97 +70 15 87 12 10 +73 66 89 91 12 + 2 93 48 90 68 +44 22 50 98 45 +35 51 6 47 5 +55 37 73 37 74 + +99 27 54 11 29 + +52 6 21 98 6 \ No newline at end of file diff --git a/data/minisat-segfault.cnf b/data/minisat-segfault.cnf old mode 100755 new mode 100644 diff --git a/src/instances/fio/opb.rs b/src/instances/fio/opb.rs index 49204548..f7798e93 100644 --- a/src/instances/fio/opb.rs +++ b/src/instances/fio/opb.rs @@ -12,7 +12,7 @@ use crate::{ instances::{ManageVars, SatInstance}, types::{ constraints::{CardConstraint, PbConstraint}, - Cl, Lit, Var, + Cl, Clause, Lit, Var, }, }; use anyhow::Context; @@ -35,8 +35,6 @@ use std::{ use crate::instances::MultiOptInstance; #[cfg(feature = "optimization")] use crate::instances::{Objective, OptInstance}; -#[cfg(feature = "optimization")] -use crate::types::WLitIter; /// Options for reading and writing OPB files /// Possible relational operators @@ -219,8 +217,8 @@ fn variable(input: &str, opts: Options) -> IResult<&str, Var> { /// Parses a literal. The spec for linear OPB instances only allows for /// variables but we allow negated literals with '~' as in non-linear OPB /// instances. -fn literal(input: &str, opts: Options) -> IResult<&str, Lit> { - match tag::<_, _, NomError<_>>("~")(input) { +pub(crate) fn literal(input: &str, opts: Options) -> IResult<&str, Lit> { + match alt::<_, _, NomError<_>, _>((tag("~"), tag("-")))(input) { Ok((input, _)) => map_res(|i| variable(i, opts), |v| Ok::<_, ()>(v.neg_lit()))(input), Err(_) => map_res(|i| variable(i, opts), |v| Ok::<_, ()>(v.pos_lit()))(input), } @@ -358,6 +356,80 @@ fn opb_data(input: &str, opts: Options) -> IResult<&str, OpbData> { ))(input) } +/// Possible lines that can be written to OPB +#[cfg(not(feature = "optimization"))] +pub enum OpbLine { + /// A comment line + Comment(String), + /// A clausal constraint line + Clause(Clause), + /// A cardinality constraint line + Card(CardConstraint), + /// A PB constraint line + Pb(PbConstraint), +} + +/// Possible lines that can be written to OPB +#[cfg(feature = "optimization")] +pub enum OpbLine { + /// A comment line + Comment(String), + /// A clausal constraint line + Clause(Clause), + /// A cardinality constraint line + Card(CardConstraint), + /// A PB constraint line + Pb(PbConstraint), + /// An objective line + Objective(LI), +} + +/// Writes an OPB file from an interator over [`OpbLine`]s +#[cfg(not(feature = "optimization"))] +pub fn write_opb_lines( + writer: &mut W, + data: Iter, + opts: Options, +) -> Result<(), io::Error> +where + W: Write, + Iter: Iterator, +{ + for dat in data { + match dat { + OpbLine::Comment(c) => writeln!(writer, "* {c}")?, + OpbLine::Clause(cl) => write_clause(writer, &cl, opts)?, + OpbLine::Card(card) => write_card(writer, &card, opts)?, + OpbLine::Pb(pb) => write_pb(writer, &pb, opts)?, + } + } + Ok(()) +} + +/// Writes an OPB file from an interator over [`OpbLine`]s +#[cfg(feature = "optimization")] +pub fn write_opb_lines( + writer: &mut W, + data: Iter, + opts: Options, +) -> Result<(), io::Error> +where + W: Write, + LI: crate::types::WLitIter, + Iter: Iterator>, +{ + for dat in data { + match dat { + OpbLine::Comment(c) => writeln!(writer, "* {c}")?, + OpbLine::Clause(cl) => write_clause(writer, &cl, opts)?, + OpbLine::Card(card) => write_card(writer, &card, opts)?, + OpbLine::Pb(pb) => write_pb(writer, &pb, opts)?, + OpbLine::Objective(obj) => write_objective(writer, (obj, 0), opts)?, + } + } + Ok(()) +} + /// Writes a [`SatInstance`] to an OPB file /// /// # Errors @@ -423,7 +495,7 @@ pub fn write_opt( ) -> Result<(), io::Error> where W: Write, - LI: WLitIter, + LI: crate::types::WLitIter, VM: ManageVars, { let cnf = &constrs.cnf; @@ -474,7 +546,7 @@ where W: Write, VM: ManageVars, Iter: Iterator, - LI: WLitIter, + LI: crate::types::WLitIter, { let cnf = &constrs.cnf; let cards = &constrs.cards; @@ -733,7 +805,7 @@ fn write_pb(writer: &mut W, pb: &PbConstraint, opts: Options) -> Resul /// # Errors /// /// If writing fails, returns [`io::Error`]. -fn write_objective( +fn write_objective( writer: &mut W, softs: (LI, isize), opts: Options, diff --git a/src/instances/multiopt.rs b/src/instances/multiopt.rs index 37e2155d..d1193f7c 100644 --- a/src/instances/multiopt.rs +++ b/src/instances/multiopt.rs @@ -1,6 +1,6 @@ //! # Multi-Objective Optimization Instance Representations -use std::{io, path::Path}; +use std::{collections::BTreeSet, io, path::Path}; use crate::{ encodings::{card, pb}, @@ -243,6 +243,35 @@ impl MultiOptInstance { MultiOptInstance { constrs, objs } } + fn extend_var_set(&self, varset: &mut BTreeSet) { + self.constrs.extend_var_set(varset); + for o in &self.objs { + o.var_set(varset); + } + } + + /// Gets the set of variables in the instance + pub fn var_set(&self) -> BTreeSet { + let mut varset = BTreeSet::new(); + self.extend_var_set(&mut varset); + varset + } + + /// Reindex all variables in the instance in order + /// + /// If the reindexing variable manager produces new free variables in order, this results in + /// the variable _order_ being preserved with gaps in the variable space being closed + #[must_use] + pub fn reindex_ordered(self, mut reindexer: R) -> MultiOptInstance { + let mut varset = BTreeSet::new(); + self.extend_var_set(&mut varset); + // reindex variables in order to ensure ordered reindexing + for var in varset { + reindexer.reindex(var); + } + self.reindex(reindexer) + } + #[cfg(feature = "rand")] /// Randomly shuffles the order of constraints and the objective #[must_use] diff --git a/src/instances/opt.rs b/src/instances/opt.rs index 08b7b7ba..feba122a 100644 --- a/src/instances/opt.rs +++ b/src/instances/opt.rs @@ -1,6 +1,12 @@ //! # Optimization Instance Representations -use std::{cmp, collections::hash_map, io, path::Path, slice}; +use std::{ + cmp, + collections::{hash_map, BTreeSet}, + io, + path::Path, + slice, +}; use crate::{ clause, @@ -895,6 +901,35 @@ impl Objective { } } + pub(crate) fn var_set(&self, varset: &mut BTreeSet) { + match &self.0 { + IntObj::Weighted { + soft_lits, + soft_clauses, + .. + } => { + varset.extend(soft_lits.iter().map(|(l, _)| l.var())); + varset.extend( + soft_clauses + .iter() + .flat_map(|(cl, _)| cl.iter().map(|l| l.var())), + ); + } + IntObj::Unweighted { + soft_lits, + soft_clauses, + .. + } => { + varset.extend(soft_lits.iter().map(|l| l.var())); + varset.extend( + soft_clauses + .iter() + .flat_map(|cl| cl.iter().map(|l| l.var())), + ); + } + } + } + /// Normalizes the objective to a unified representation. This sorts internal data structures. #[must_use] pub fn normalize(mut self) -> Self { @@ -1191,12 +1226,40 @@ impl Instance { } /// Reindexes all variables in the instance with a reindexing variable manager + #[must_use] pub fn reindex(self, mut reindexer: R) -> Instance { let obj = self.obj.reindex(&mut reindexer); let constrs = self.constrs.reindex(reindexer); Instance { constrs, obj } } + fn extend_var_set(&self, varset: &mut BTreeSet) { + self.constrs.extend_var_set(varset); + self.obj.var_set(varset); + } + + /// Gets the set of variables in the instance + pub fn var_set(&self) -> BTreeSet { + let mut varset = BTreeSet::new(); + self.extend_var_set(&mut varset); + varset + } + + /// Reindex all variables in the instance in order + /// + /// If the reindexing variable manager produces new free variables in order, this results in + /// the variable _order_ being preserved with gaps in the variable space being closed + #[must_use] + pub fn reindex_ordered(self, mut reindexer: R) -> Instance { + let mut varset = BTreeSet::new(); + self.extend_var_set(&mut varset); + // reindex variables in order to ensure ordered reindexing + for var in varset { + reindexer.reindex(var); + } + self.reindex(reindexer) + } + #[cfg(feature = "rand")] /// Randomly shuffles the order of constraints and the objective #[must_use] diff --git a/src/instances/sat.rs b/src/instances/sat.rs index d1704d3d..1b6637c4 100644 --- a/src/instances/sat.rs +++ b/src/instances/sat.rs @@ -1,13 +1,19 @@ //! # Satsifiability Instance Representations -use std::{cmp, collections::TryReserveError, io, ops::Index, path::Path}; +use std::{ + cmp, + collections::{BTreeSet, TryReserveError}, + io, + ops::Index, + path::Path, +}; use crate::{ clause, encodings::{atomics, card, pb, CollectClauses}, lit, types::{ - constraints::{CardConstraint, PbConstraint}, + constraints::{CardConstraint, ConstraintRef, PbConstraint}, Assignment, Clause, Lit, TernaryVal, Var, }, utils::{unreachable_err, LimitedIter}, @@ -386,6 +392,18 @@ impl Instance { self.pbs.len() } + /// Returns the total number of constraints + /// + /// This will always be equal to the sum of clauses, cardinality constraints, and PB + /// constraints. + /// ``` + /// let inst: rustsat::instances::SatInstance = Default::default(); + /// assert_eq!(inst.n_constraints(), inst.n_clauses() + inst.n_cards() + inst.n_pbs()); + /// ``` + pub fn n_constraints(&self) -> usize { + self.n_clauses() + self.n_cards() + self.n_pbs() + } + /// Adds a clause to the instance pub fn add_clause(&mut self, cl: Clause) { cl.iter().for_each(|l| { @@ -698,6 +716,15 @@ impl Instance { .for_each(|constr| pb_encoder(constr, &mut self.cnf, &mut self.var_manager)); } + /// Converts the instance to a set of [`PbConstraint`]s + pub fn into_pbs(mut self) -> (Vec, VM) { + self.pbs + .extend(self.cards.into_iter().map(PbConstraint::from)); + self.pbs + .extend(self.cnf.into_iter().map(PbConstraint::from)); + (self.pbs, self.var_manager) + } + /// Extends the instance by another instance pub fn extend(&mut self, other: Instance) { self.cnf.extend(other.cnf); @@ -705,6 +732,7 @@ impl Instance { } /// Reindexes all variables in the instance with a reindexing variable manager + #[must_use] pub fn reindex(mut self, mut reindexer: R) -> Instance { self.cnf .iter_mut() @@ -724,6 +752,42 @@ impl Instance { } } + pub(crate) fn extend_var_set(&self, varset: &mut BTreeSet) { + varset.extend(self.cnf.iter().flat_map(|cl| cl.iter().map(|l| l.var()))); + varset.extend( + self.cards + .iter() + .flat_map(|card| card.iter().map(|l| l.var())), + ); + varset.extend( + self.pbs + .iter() + .flat_map(|pbs| pbs.iter().map(|(l, _)| l.var())), + ); + } + + /// Gets the set of variables in the instance + pub fn var_set(&self) -> BTreeSet { + let mut varset = BTreeSet::new(); + self.extend_var_set(&mut varset); + varset + } + + /// Reindex all variables in the instance in order + /// + /// If the reindexing variable manager produces new free variables in order, this results in + /// the variable _order_ being preserved with gaps in the variable space being closed + #[must_use] + pub fn reindex_ordered(self, mut reindexer: R) -> Instance { + let mut varset = BTreeSet::new(); + self.extend_var_set(&mut varset); + // reindex variables in order to ensure ordered reindexing + for var in varset { + reindexer.reindex(var); + } + self.reindex(reindexer) + } + #[cfg(feature = "rand")] /// Randomly shuffles the order of constraints. #[must_use] @@ -1013,6 +1077,26 @@ impl Instance { pub fn is_sat(&self, assign: &Assignment) -> bool { self.evaluate(assign) == TernaryVal::True } + + /// Returns an unsatisfied constraint, if one exists + pub fn unsat_constraint(&self, assign: &Assignment) -> Option { + for clause in self.cnf.iter() { + if clause.evaluate(assign) != TernaryVal::True { + return Some(ConstraintRef::Clause(clause)); + } + } + for card in &self.cards { + if card.evaluate(assign) != TernaryVal::True { + return Some(ConstraintRef::Card(card)); + } + } + for pb in &self.pbs { + if pb.evaluate(assign) != TernaryVal::True { + return Some(ConstraintRef::Pb(pb)); + } + } + None + } } impl Instance { @@ -1137,3 +1221,21 @@ impl CollectClauses for Instance { self.cnf.extend_clauses(cl_iter) } } + +#[cfg(test)] +mod tests { + use crate::{clause, instances::Cnf, lit}; + + #[test] + fn reindex_ordered() { + let mut inst: super::Instance = super::Instance::default(); + inst.add_nary(&[lit![4], lit![1]]); + inst.add_nary(&[lit![0], lit![2]]); + let inst = inst.reindex_ordered(super::super::ReindexingVarManager::default()); + let (cnf, _) = inst.into_cnf(); + assert_eq!( + cnf, + Cnf::from_iter([clause![lit![3], lit![1]], clause![lit![0], lit![2]]]) + ); + } +} diff --git a/src/types.rs b/src/types.rs index 905e9f0e..8dd69905 100644 --- a/src/types.rs +++ b/src/types.rs @@ -719,6 +719,19 @@ pub enum InvalidVLine { EmptyLine, } +/// Different possible v-line fomats +enum VLineFormat { + /// Assignment specified as a space-spearated sequence of IPASIR literals. This is the format + /// used in the SAT competition. + SatComp, + /// Assignment specified as a sequence of zeroes and ones. This is the format used in the + /// MaxSAT Evaluation. + MaxSatEval, + /// Assignment specified as a sequence of space-separated OPB literals. This is the format used + /// in the PB competition. + PbComp, +} + /// Type representing an assignment of variables. #[derive(Clone, PartialEq, Eq, Default)] #[repr(transparent)] @@ -840,12 +853,36 @@ impl Assignment { Ok(assignment) } - /// Parses and saves literals from a value line. + /// Parses an assignment from a value line returned by a solver /// - /// # Errors - /// - /// [`InvalidVLine`] or parsing error, or [`nom::error::Error`] + /// The following value line formats are supported: + /// - [SAT Competition](https://satcompetition.github.io/2024/output.html): `v 1 -2 -3 4 0` + /// - [MaxSAT Evaluation](https://maxsat-evaluations.github.io/2024/rules.html): `v 1001` + /// - [PB Competition](https://www.cril.univ-artois.fr/PB24/competitionRequirements.pdf): `v x1 -x2 -x3 x4` pub fn extend_from_vline(&mut self, lines: &str) -> anyhow::Result<()> { + anyhow::ensure!(!lines.is_empty(), InvalidVLine::EmptyLine); + // determine line format + let format = if lines.contains('x') { + VLineFormat::PbComp + } else if !lines[1..].trim().contains(' ') + && lines[1..].trim() != "0" + && lines[1..] + .trim() + .chars() + .try_for_each(|c| { + if c == '1' || c == '0' { + Ok(()) + } else { + Err(()) + } + }) + .is_ok() + { + VLineFormat::MaxSatEval + } else { + VLineFormat::SatComp + }; + for line in lines.lines() { if let Some(tag) = line.chars().next() { anyhow::ensure!(tag == 'v', InvalidVLine::InvalidTag(tag)); @@ -853,28 +890,67 @@ impl Assignment { anyhow::bail!(InvalidVLine::EmptyLine); } - let line = &line[1..]; - for number in line.split_whitespace() { - let number = number.parse::()?; + match format { + VLineFormat::SatComp => self.extend_sat_comp_vline(line), + VLineFormat::MaxSatEval => self.extend_maxsat_eval_vline(line), + VLineFormat::PbComp => self.extend_pb_comp_vline(line), + }? + } + Ok(()) + } - // End of the value lines - if number == 0 { - continue; - } + /// Parses a single SAT Competition v-line + fn extend_sat_comp_vline(&mut self, line: &str) -> anyhow::Result<()> { + let line = &line[1..]; + for number in line.split_whitespace() { + let number = number.parse::()?; - let literal = Lit::from_ipasir(number)?; - let val = self.lit_value(literal); - if val == TernaryVal::True && literal.is_neg() - || val == TernaryVal::False && literal.is_pos() - { - // Catch conflicting assignments - anyhow::bail!(InvalidVLine::ConflictingAssignment(literal.var())); - } - self.assign_lit(literal); + // End of the value lines + if number == 0 { + continue; + } + + let literal = Lit::from_ipasir(number)?; + let val = self.lit_value(literal); + if val == TernaryVal::True && literal.is_neg() + || val == TernaryVal::False && literal.is_pos() + { + // Catch conflicting assignments + anyhow::bail!(InvalidVLine::ConflictingAssignment(literal.var())); } + self.assign_lit(literal); } + Ok(()) + } - anyhow::ensure!(!lines.is_empty(), InvalidVLine::EmptyLine); + /// Parses a single MaxSAT Evaluation v-line + fn extend_maxsat_eval_vline(&mut self, line: &str) -> anyhow::Result<()> { + let line = line[1..].trim(); + self.assignment.reserve(line.len()); + for char in line.chars() { + match char { + '0' => self.assignment.push(TernaryVal::False), + '1' => self.assignment.push(TernaryVal::True), + _ => anyhow::bail!("unexpected character in MaxSAT Evaluation v-line"), + } + } + Ok(()) + } + + /// Parses a single PB Competition v-line + fn extend_pb_comp_vline(&mut self, line: &str) -> anyhow::Result<()> { + let line = &line[1..]; + for number in line.split_whitespace() { + let (_, lit) = fio::opb::literal(number, fio::opb::Options::default()) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse pb literal '{}'", number))?; + let val = self.lit_value(lit); + if val == TernaryVal::True && lit.is_neg() || val == TernaryVal::False && lit.is_pos() { + // Catch conflicting assignments + anyhow::bail!(InvalidVLine::ConflictingAssignment(lit.var())); + } + self.assign_lit(lit); + } Ok(()) } @@ -1192,7 +1268,7 @@ mod tests { } #[test] - fn parse_vline() { + fn parse_sat_comp_vline() { let vline = "v 1 -2 4 -5 6 0"; let ground_truth = Assignment::from(vec![ TernaryVal::True, @@ -1213,6 +1289,50 @@ mod tests { ); } + #[test] + fn parse_maxsat_eval_vline() { + let vline = "v 100101"; + let ground_truth = Assignment::from(vec![ + TernaryVal::True, + TernaryVal::False, + TernaryVal::False, + TernaryVal::True, + TernaryVal::False, + TernaryVal::True, + ]); + assert_eq!(Assignment::from_vline(vline).unwrap(), ground_truth); + assert_eq!( + { + let mut asign = Assignment::default(); + asign.extend_from_vline(vline).unwrap(); + asign + }, + ground_truth + ); + } + + #[test] + fn parse_pb_comp_vline() { + let vline = "v x1 -x2 x4 -x5 x6"; + let ground_truth = Assignment::from(vec![ + TernaryVal::True, + TernaryVal::False, + TernaryVal::DontCare, + TernaryVal::True, + TernaryVal::False, + TernaryVal::True, + ]); + assert_eq!(Assignment::from_vline(vline).unwrap(), ground_truth); + assert_eq!( + { + let mut asign = Assignment::default(); + asign.extend_from_vline(vline).unwrap(); + asign + }, + ground_truth + ); + } + #[test] fn vline_invalid_lit_from() { let vline = "v 1 -2 4 foo -5 bar 6 0"; diff --git a/src/types/constraints.rs b/src/types/constraints.rs index 93a2e22e..aff6cc1d 100644 --- a/src/types/constraints.rs +++ b/src/types/constraints.rs @@ -1,7 +1,7 @@ //! # Constraint Types //! //! Different types of constraints. The most important one is [`Clause`] but -//! Rust SAT supports more complex constraints like [`PBConstraint`] or +//! Rust SAT supports more complex constraints like [`PbConstraint`] or //! [`CardConstraint`]. use core::slice; @@ -18,6 +18,27 @@ use super::{Assignment, IWLitIter, Lit, LitIter, RsHashSet, TernaryVal, WLitIter use crate::RequiresClausal; +/// A reference to any type of constraint throughout RustSAT +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ConstraintRef<'constr> { + /// A clausal constraint + Clause(&'constr Clause), + /// A cardinality constraint + Card(&'constr CardConstraint), + /// A pseudo-Boolean constraint + Pb(&'constr PbConstraint), +} + +impl fmt::Display for ConstraintRef<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ConstraintRef::Clause(cl) => write!(f, "{cl}"), + ConstraintRef::Card(card) => write!(f, "{card}"), + ConstraintRef::Pb(pb) => write!(f, "{pb}"), + } + } +} + /// Type representing a clause. /// Wrapper around a std collection to allow for changing the data structure. /// Optional clauses as sets will be included in the future. @@ -511,6 +532,28 @@ pub enum CardConstraint { Eq(CardEqConstr), } +impl fmt::Display for CardConstraint { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + CardConstraint::Ub(c) => write!(f, "{c}"), + CardConstraint::Lb(c) => write!(f, "{c}"), + CardConstraint::Eq(c) => write!(f, "{c}"), + } + } +} + +macro_rules! write_lit_sum { + ($f:expr, $vec:expr) => {{ + for i in 0..$vec.len() { + if i < $vec.len() - 1 { + write!($f, "{} + ", $vec[i])?; + } else { + write!($f, "{}", $vec[i])?; + } + } + }}; +} + impl CardConstraint { /// Constructs a new upper bound cardinality constraint (`sum of lits <= b`) pub fn new_ub(lits: LI, b: usize) -> Self { @@ -555,6 +598,25 @@ impl CardConstraint { } } + /// Gets the number of literals in the constraint + pub fn len(&self) -> usize { + match self { + CardConstraint::Ub(constr) => constr.lits.len(), + CardConstraint::Lb(constr) => constr.lits.len(), + CardConstraint::Eq(constr) => constr.lits.len(), + } + } + + /// Checks whether the constraint is empty + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Gets the number of literals in the constraint + pub fn n_lits(&self) -> usize { + self.len() + } + /// Changes the bound on the constraint pub fn change_bound(&mut self, b: usize) { match self { @@ -741,6 +803,12 @@ impl<'slf> IntoIterator for &'slf mut CardConstraint { } } +impl From for CardConstraint { + fn from(value: Clause) -> Self { + CardConstraint::new_lb(value, 1) + } +} + /// An upper bound cardinality constraint (`sum of lits <= b`) #[derive(Hash, Eq, PartialEq, Clone, Debug)] pub struct CardUbConstr { @@ -754,6 +822,13 @@ pub struct CardUbConstr { )] pub use CardUbConstr as CardUBConstr; +impl fmt::Display for CardUbConstr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write_lit_sum!(f, self.lits); + write!(f, " <= {}", self.b) + } +} + impl CardUbConstr { /// Decomposes the constraint to a set of input literals and an upper bound #[must_use] @@ -798,6 +873,13 @@ pub struct CardLbConstr { )] pub use CardLbConstr as CardLBConstr; +impl fmt::Display for CardLbConstr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write_lit_sum!(f, self.lits); + write!(f, " >= {}", self.b) + } +} + impl CardLbConstr { /// Decomposes the constraint to a set of input literals and a lower bound #[must_use] @@ -848,6 +930,13 @@ pub struct CardEqConstr { )] pub use CardEqConstr as CardEQConstr; +impl fmt::Display for CardEqConstr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write_lit_sum!(f, self.lits); + write!(f, " = {}", self.b) + } +} + impl CardEqConstr { /// Decomposes the constraint to a set of input literals and an equality bound #[must_use] @@ -918,6 +1007,28 @@ pub enum PbConstraint { )] pub use PbConstraint as PBConstraint; +impl fmt::Display for PbConstraint { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + PbConstraint::Ub(c) => write!(f, "{c}"), + PbConstraint::Lb(c) => write!(f, "{c}"), + PbConstraint::Eq(c) => write!(f, "{c}"), + } + } +} + +macro_rules! write_wlit_sum { + ($f:expr, $vec:expr) => {{ + for i in 0..$vec.len() { + if i < $vec.len() - 1 { + write!($f, "{} {} + ", $vec[i].1, $vec[i].0)?; + } else { + write!($f, "{} {}", $vec[i].1, $vec[i].0)?; + } + } + }}; +} + impl PbConstraint { /// Converts input literals to non-negative weights, also returns the weight sum and the sum to add to the bound fn convert_input_lits(lits: LI) -> (impl WLitIter, usize, isize) { @@ -997,6 +1108,25 @@ impl PbConstraint { } } + /// Gets the number of literals in the constraint + pub fn len(&self) -> usize { + match self { + PbConstraint::Ub(constr) => constr.lits.len(), + PbConstraint::Lb(constr) => constr.lits.len(), + PbConstraint::Eq(constr) => constr.lits.len(), + } + } + + /// Checks whether the constraint is empty + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Gets the number of literals in the constraint + pub fn n_lits(&self) -> usize { + self.len() + } + /// Checks if the constraint is always satisfied #[must_use] pub fn is_tautology(&self) -> bool { @@ -1113,7 +1243,7 @@ impl PbConstraint { /// # Errors /// /// If the constraint is not a cardinality constraint, including when it is a tautology or - /// unsat, returns [`PBToCardError`] + /// unsat, returns [`PbToCardError`] pub fn into_card_constr(self) -> Result { if self.is_tautology() { return Err(PbToCardError::Tautology); @@ -1241,9 +1371,9 @@ impl PbConstraint { return TernaryVal::DontCare; } match self { - PBConstraint::Ub(PbUbConstr { b, .. }) => (start <= *b).into(), - PBConstraint::Lb(PbLbConstr { b, .. }) => (start >= *b).into(), - PBConstraint::Eq(PbEqConstr { b, .. }) => (start == *b).into(), + PbConstraint::Ub(PbUbConstr { b, .. }) => (start <= *b).into(), + PbConstraint::Lb(PbLbConstr { b, .. }) => (start >= *b).into(), + PbConstraint::Eq(PbEqConstr { b, .. }) => (start == *b).into(), } } (Ok(start), Err(_)) => match self { @@ -1296,6 +1426,31 @@ impl<'slf> IntoIterator for &'slf mut PbConstraint { } } +impl From for PbConstraint { + fn from(value: Clause) -> Self { + PbConstraint::new_lb(value.into_iter().map(|l| (l, 1)), 1) + } +} + +impl From for PbConstraint { + fn from(value: CardConstraint) -> Self { + match value { + CardConstraint::Ub(constr) => PbConstraint::new_ub( + constr.lits.into_iter().map(|l| (l, 1)), + isize::try_from(constr.b).expect("cannot handle bounds higher than `isize::MAX`"), + ), + CardConstraint::Lb(constr) => PbConstraint::new_lb( + constr.lits.into_iter().map(|l| (l, 1)), + isize::try_from(constr.b).expect("cannot handle bounds higher than `isize::MAX`"), + ), + CardConstraint::Eq(constr) => PbConstraint::new_eq( + constr.lits.into_iter().map(|l| (l, 1)), + isize::try_from(constr.b).expect("cannot handle bounds higher than `isize::MAX`"), + ), + } + } +} + /// An upper bound pseudo-boolean constraint (`weighted sum of lits <= b`) #[derive(Eq, PartialEq, Clone, Debug)] pub struct PbUbConstr { @@ -1310,6 +1465,13 @@ pub struct PbUbConstr { )] pub use PbUbConstr as PBUBConstr; +impl fmt::Display for PbUbConstr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write_wlit_sum!(f, self.lits); + write!(f, " <= {}", self.b) + } +} + impl PbUbConstr { /// Decomposes the constraint to a set of input literals and an upper bound #[must_use] @@ -1400,6 +1562,13 @@ pub struct PbLbConstr { )] pub use PbLbConstr as PBLBConstr; +impl fmt::Display for PbLbConstr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write_wlit_sum!(f, self.lits); + write!(f, " >= {}", self.b) + } +} + impl PbLbConstr { /// Decomposes the constraint to a set of input literals and a lower bound #[must_use] @@ -1489,6 +1658,13 @@ pub struct PbEqConstr { )] pub use PbEqConstr as PBEQConstr; +impl fmt::Display for PbEqConstr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write_wlit_sum!(f, self.lits); + write!(f, " = {}", self.b) + } +} + impl PbEqConstr { /// Decomposes the constraint to a set of input literals and an equality bound #[must_use] diff --git a/tools/Cargo.toml b/tools/Cargo.toml index f115d805..45835bf4 100644 --- a/tools/Cargo.toml +++ b/tools/Cargo.toml @@ -16,6 +16,7 @@ anyhow.workspace = true atty.workspace = true clap.workspace = true concolor-clap.workspace = true +itertools.workspace = true nom.workspace = true termcolor.workspace = true rand.workspace = true diff --git a/tools/data b/tools/data new file mode 120000 index 00000000..4909e06e --- /dev/null +++ b/tools/data @@ -0,0 +1 @@ +../data \ No newline at end of file diff --git a/tools/src/bin/check-solution.rs b/tools/src/bin/check-solution.rs new file mode 100644 index 00000000..e390106f --- /dev/null +++ b/tools/src/bin/check-solution.rs @@ -0,0 +1,148 @@ +//! # check-solution +//! +//! A small tool for checking solutions to SAT and optimization instances. + +use std::{io, path::PathBuf}; + +use clap::{Parser, ValueEnum}; +use rustsat::{ + instances::{ + fio::{self, opb::Options as OpbOptions}, + MultiOptInstance, OptInstance, SatInstance, + }, + types::Assignment, +}; + +#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)] +pub enum FileFormat { + /// Infer the file format from the file extension according to the following rules: + /// - `.cnf`: DIMACS CNF file + /// - `.wcnf`: Weighted DIMACS CNF (MaxSAT) file + /// - `.mcnf`: Multi-objective MaxSAT file + /// - `.opb`: OPB file (without an objective) + /// - `.mopb` / `.pbmo`: Multi-objective OPB file + /// All file extensions can also be appended with `.bz2`, `.xz`, or `.gz` if compression is used. + Infer, + /// A DIMACS CNF file + Cnf, + /// A DIMACS WCNF file + Wcnf, + /// A DIMACS MCNF file + Mcnf, + /// An OPB file with zero or more objectives + Opb, +} + +#[derive(Parser)] +#[command(author, version, about, long_about = None)] +struct Args { + /// The instance to check the solution against + instance: PathBuf, + /// The solution specified as one or multiple v-lines. If not specified, will be read from + /// stdin. + solution: Option, + /// The file format of the instance. With infer, the file format is + /// inferred from the file extension. + #[arg(long, value_enum, default_value_t = FileFormat::Infer)] + file_format: FileFormat, + /// The index in the OPB file to treat as the lowest variable + #[arg(long, default_value_t = 1)] + opb_first_var_idx: u32, +} + +fn main() -> anyhow::Result<()> { + let args = Args::parse(); + let opb_opts = OpbOptions { + first_var_idx: args.opb_first_var_idx, + ..OpbOptions::default() + }; + let (constrs, objs) = parse_instance(args.instance, args.file_format, opb_opts)?.decompose(); + + let mut reader = if let Some(solution) = args.solution { + fio::open_compressed_uncompressed_read(solution)? + } else { + Box::new(io::BufReader::new(io::stdin())) + }; + + let mut sol = Assignment::default(); + loop { + let mut buf = String::new(); + let read = reader.read_line(&mut buf)?; + if read == 0 { + break; + } + if buf.starts_with('v') { + sol.extend_from_vline(&buf)?; + } + } + + if let Some(constr) = constrs.unsat_constraint(&sol) { + println!("unsatisfied contraint: {constr}"); + std::process::exit(1); + } + print!("objective values: "); + for i in 0..objs.len() { + if i < objs.len() - 1 { + print!("{}, ", objs[i].evaluate(&sol)) + } else { + print!("{}", objs[i].evaluate(&sol)); + } + } + println!(); + Ok(()) +} + +macro_rules! is_one_of { + ($a:expr, $($b:expr),*) => { + $( $a == $b || )* false + } +} + +fn parse_instance( + inst_path: PathBuf, + file_format: FileFormat, + opb_opts: fio::opb::Options, +) -> anyhow::Result { + match file_format { + FileFormat::Infer => { + if let Some(ext) = inst_path.extension() { + let path_without_compr = inst_path.with_extension(""); + let ext = if is_one_of!(ext, "gz", "bz2", "xz") { + // Strip compression extension + match path_without_compr.extension() { + Some(ext) => ext, + None => anyhow::bail!("no file extension after compression extension"), + } + } else { + ext + }; + let inst = if is_one_of!(ext, "cnf", "dimacs") { + let inst = SatInstance::from_dimacs_path(inst_path)?; + MultiOptInstance::compose(inst, vec![]) + } else if is_one_of!(ext, "wcnf") { + let (inst, obj) = OptInstance::from_dimacs_path(inst_path)?.decompose(); + MultiOptInstance::compose(inst, vec![obj]) + } else if is_one_of!(ext, "mcnf") { + MultiOptInstance::from_dimacs_path(inst_path)? + } else if is_one_of!(ext, "opb", "mopb", "pbmo") { + MultiOptInstance::from_opb_path(inst_path, opb_opts)? + } else { + anyhow::bail!("unknown file extension") + }; + Ok(inst) + } else { + anyhow::bail!("no file extension") + } + } + FileFormat::Cnf => { + let inst = SatInstance::from_dimacs_path(inst_path)?; + Ok(MultiOptInstance::compose(inst, vec![])) + } + FileFormat::Wcnf => { + let (inst, obj) = OptInstance::from_dimacs_path(inst_path)?.decompose(); + Ok(MultiOptInstance::compose(inst, vec![obj])) + } + FileFormat::Mcnf => MultiOptInstance::from_dimacs_path(inst_path), + FileFormat::Opb => MultiOptInstance::from_opb_path(inst_path, opb_opts), + } +} diff --git a/tools/src/bin/encodings.rs b/tools/src/bin/encodings.rs index 06398bb2..f533138b 100644 --- a/tools/src/bin/encodings.rs +++ b/tools/src/bin/encodings.rs @@ -1,9 +1,17 @@ //! # encodings //! -//! A collection of (multi-objective) MaxSAT encodings. +//! A collection of (multi-objective) MaxSAT/PBO encodings. //! -//! `clustering`: Constrained correlation clustering encodings following \[1\]. -//! `knapsack`: Multi-criteria 0-1 knapsack. +//! ## CNF/MaxSAT Encodings +//! +//! - `clustering`: Constrained correlation clustering encodings following \[1\]. +//! - `knapsack`: Multi-criteria 0-1 knapsack. +//! +//! ## PBO Encodings +//! +//! - `knapsack`: Multi-criteria 0-1 knapsack. +//! - `assignment`: Multi-criteria assignment problem. +//! - `facility-location`: Multi-criteria 0/1 uncapacitated facility location problem. //! //! ## References //! @@ -11,25 +19,76 @@ //! correlation clustering via weighted partial Maximum Satisfiability_, AIJ //! 2017. -use clap::{Args, Parser, Subcommand}; -use rustsat::{encodings::pb, instances::fio::dimacs}; +use clap::{Args, Parser, Subcommand, ValueEnum}; +use core::fmt; +use rustsat::{ + encodings::pb as pb_enc, + instances::fio::{dimacs, opb}, + types::{Lit, WLitIter}, +}; use rustsat_tools::encodings::{ - clustering::{self, saturating_map, scaling_map, Encoding, Variant}, - knapsack, + assignment, + cnf::{ + self, + clustering::{self, saturating_map, scaling_map, Encoding, Variant}, + }, + facilitylocation, knapsack, pb, }; use std::{fs::File, io, path::PathBuf}; #[derive(Parser)] #[command(author, version, about, long_about = None)] struct CliArgs { + /// The output path. Writes to `stdout` if not given. + out_path: Option, + #[command(subcommand)] + fmt: Format, +} + +#[derive(Subcommand)] +enum Format { + /// Generate a CNF encoding + Cnf(CnfArgs), + /// Generate a PB encoding + Pb(PbArgs), +} + +#[derive(Args)] +struct CnfArgs { #[command(subcommand)] - cmd: Command, + enc: CnfEncoding, + #[arg(long, short = 's')] + single_objective: bool, } #[derive(Subcommand)] -enum Command { +enum CnfEncoding { + /// Generate a clustering encoding Clustering(ClusteringArgs), + /// Generate a knapsack encoding + Knapsack(KnapsackArgs), +} + +#[derive(Args)] +struct PbArgs { + /// The index in the OPB file to treat as the lowest variable + #[arg(long, default_value_t = 1)] + first_var_idx: u32, + /// Avoid negated literals in the OPB file by transforming constraints + #[arg(long)] + avoid_negated_lits: bool, + #[command(subcommand)] + enc: PbEncoding, +} + +#[derive(Subcommand)] +enum PbEncoding { + /// Generate a knapsack encoding Knapsack(KnapsackArgs), + /// Generate an assignment problem encoding + Assignment(AssignmentArgs), + /// Generate a facility location problem encoding + FacilityLocation(FacilityLocationArgs), } #[derive(Args)] @@ -38,8 +97,6 @@ struct ClusteringArgs { /// form `[nodeA] [nodeB] [similarity value]`. Reads from `stdin` if not /// given. in_path: Option, - /// The M/WCNF output path. Writes to `stdout` if not given. - out_path: Option, /// The encoding variant #[arg(long, default_value_t = Variant::default())] variant: Variant, @@ -65,8 +122,67 @@ struct ClusteringArgs { #[derive(Args)] struct KnapsackArgs { - /// The M/WCNF output path. Writes to `stdout` if not given. - out_path: Option, + #[command(subcommand)] + variant: KnapsackCommand, +} + +#[derive(Subcommand)] +enum KnapsackCommand { + /// Encode a given input instance + Input(InputKnapsackArgs), + /// Generate a random instance + Random(RandomKnapsackArgs), +} + +#[derive(Args)] +struct InputKnapsackArgs { + /// The input file + in_path: Option, + #[arg(long, default_value_t = KnapsackInputFormat::default())] + in_format: KnapsackInputFormat, +} + +#[derive(Default, ValueEnum, Clone, Copy, PartialEq, Eq)] +enum KnapsackInputFormat { + /// Input files as provided by [MOO-Library](http://home.ku.edu.tr/~moolibrary/) + #[default] + MooLibrary, + /// Input files as provided by [vOptLib](https://github.com/vOptSolver/vOptLib/tree/master/UKP) + VOptLib, +} + +#[derive(Args)] +struct AssignmentArgs { + /// The input file in the format as provided by [MOO-Library](http://home.ku.edu.tr/~moolibrary/) + in_path: Option, +} + +#[derive(Args)] +struct FacilityLocationArgs { + /// The input file in the format as provided by [vOptLib](https://github.com/vOptSolver/vOptLib/tree/master/UFLP) + in_path: Option, +} + +impl fmt::Display for KnapsackInputFormat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + KnapsackInputFormat::MooLibrary => write!(f, "moo-library"), + KnapsackInputFormat::VOptLib => write!(f, "v-opt-lib"), + } + } +} + +impl From for knapsack::FileFormat { + fn from(val: KnapsackInputFormat) -> Self { + match val { + KnapsackInputFormat::MooLibrary => knapsack::FileFormat::MooLibrary, + KnapsackInputFormat::VOptLib => knapsack::FileFormat::VOptLib, + } + } +} + +#[derive(Args)] +struct RandomKnapsackArgs { /// The number of items to select from #[arg(long, default_value_t = 20)] n_items: usize, @@ -93,15 +209,9 @@ struct KnapsackArgs { seed: u64, } -fn clustering(args: ClusteringArgs) -> anyhow::Result<()> { - let mcnf_to_wcnf = |line: dimacs::McnfLine| match line { - dimacs::McnfLine::Comment(c) => dimacs::WcnfLine::Comment(c), - dimacs::McnfLine::Hard(cl) => dimacs::WcnfLine::Hard(cl), - dimacs::McnfLine::Soft(cl, w, _) => dimacs::WcnfLine::Soft(cl, w), - }; - +fn clustering(args: ClusteringArgs) -> anyhow::Result> { if let Some(in_path) = args.in_path { - let encoding = clustering::Encoding::new( + clustering::Encoding::new( io::BufReader::new(File::open(in_path)?), args.variant, |sim| { @@ -111,67 +221,174 @@ fn clustering(args: ClusteringArgs) -> anyhow::Result<()> { args.hard_threshold, ) }, - )?; - if let Some(out_path) = args.out_path { - let mut file = File::open(out_path)?; - if args.single_objective { - dimacs::write_wcnf(&mut file, encoding.map(mcnf_to_wcnf))?; - } else { - dimacs::write_mcnf(&mut file, encoding)?; - } - } else if args.single_objective { - dimacs::write_wcnf(&mut io::stdout(), encoding.map(mcnf_to_wcnf))?; - } else { - dimacs::write_mcnf(&mut io::stdout(), encoding)?; - } + ) } else { - let encoding = Encoding::new(io::BufReader::new(io::stdin()), args.variant, |sim| { + Encoding::new(io::BufReader::new(io::stdin()), args.variant, |sim| { saturating_map( scaling_map(sim, args.multiplier) - args.offset, args.dont_care, args.hard_threshold, ) - })?; - if let Some(out_path) = args.out_path { - let mut file = File::open(out_path)?; - if args.single_objective { - dimacs::write_wcnf(&mut file, encoding.map(mcnf_to_wcnf))?; - } else { - dimacs::write_mcnf(&mut file, encoding)?; - } - } else if args.single_objective { - dimacs::write_wcnf(&mut io::stdout(), encoding.map(mcnf_to_wcnf))?; - } else { - dimacs::write_mcnf(&mut io::stdout(), encoding)?; - } - }; - - Ok(()) + }) + } } -fn knapsack(args: KnapsackArgs) -> anyhow::Result<()> { - let encoding = knapsack::Encoding::new::(knapsack::Knapsack::random( +fn random_knapsack(args: &RandomKnapsackArgs) -> knapsack::Knapsack { + knapsack::Knapsack::random( args.n_items, args.n_objectives, args.min_value..args.max_value, args.min_weight..args.max_weight, knapsack::Capacity::FractionTotalWeight(args.cap_fraction), args.seed, - )); - if let Some(out_path) = args.out_path { - let mut file = File::open(out_path)?; - dimacs::write_mcnf(&mut file, encoding)?; + ) +} + +fn input_knapsack(args: &InputKnapsackArgs) -> anyhow::Result { + if let Some(path) = &args.in_path { + let reader = io::BufReader::new(File::open(path)?); + knapsack::Knapsack::from_file(reader, args.in_format.into()) + } else { + knapsack::Knapsack::from_file(io::BufReader::new(io::stdin()), args.in_format.into()) + } +} + +fn cnf_knapsack(inst: knapsack::Knapsack) -> impl Iterator { + cnf::knapsack::Encoding::new::(inst) +} + +fn pb_knapsack( + inst: knapsack::Knapsack, +) -> impl Iterator as IntoIterator>::IntoIter>> { + pb::knapsack::Encoding::new(inst) +} + +fn input_assignment(args: &AssignmentArgs) -> anyhow::Result { + if let Some(path) = &args.in_path { + let reader = io::BufReader::new(File::open(path)?); + assignment::Assignment::from_file(reader) + } else { + assignment::Assignment::from_file(io::BufReader::new(io::stdin())) + } +} + +fn pb_assignment( + inst: assignment::Assignment, +) -> impl Iterator as IntoIterator>::IntoIter>> { + pb::assignment::Encoding::new(inst) +} + +fn input_facility_location( + args: &FacilityLocationArgs, +) -> anyhow::Result { + if let Some(path) = &args.in_path { + let reader = io::BufReader::new(File::open(path)?); + facilitylocation::FacilityLocation::from_file(reader) + } else { + facilitylocation::FacilityLocation::from_file(io::BufReader::new(io::stdin())) + } +} + +fn pb_facility_location( + inst: facilitylocation::FacilityLocation, +) -> impl Iterator as IntoIterator>::IntoIter>> { + pb::facilitylocation::Encoding::new(inst) +} + +fn write_cnf( + encoding: impl Iterator, + path: Option, + single_objective: bool, +) -> anyhow::Result<()> { + let mcnf_to_wcnf = |line: dimacs::McnfLine| match line { + dimacs::McnfLine::Comment(c) => dimacs::WcnfLine::Comment(c), + dimacs::McnfLine::Hard(cl) => dimacs::WcnfLine::Hard(cl), + dimacs::McnfLine::Soft(cl, w, _) => dimacs::WcnfLine::Soft(cl, w), + }; + + if let Some(out_path) = path { + let mut file = io::BufWriter::new(File::create(out_path)?); + if single_objective { + dimacs::write_wcnf(&mut file, encoding.map(mcnf_to_wcnf))?; + } else { + dimacs::write_mcnf(&mut file, encoding)?; + } + } else if single_objective { + dimacs::write_wcnf(&mut io::stdout(), encoding.map(mcnf_to_wcnf))?; } else { dimacs::write_mcnf(&mut io::stdout(), encoding)?; } Ok(()) } +fn write_pb( + encoding: impl Iterator>, + path: Option, + opts: opb::Options, +) -> anyhow::Result<()> { + if let Some(out_path) = path { + let mut file = io::BufWriter::new(File::create(out_path)?); + opb::write_opb_lines(&mut file, encoding, opts)?; + } else { + opb::write_opb_lines(&mut io::stdout(), encoding, opts)?; + } + Ok(()) +} + +type BoxedDimacsIter = Box>; +type BoxedOpbIter = + Box as IntoIterator>::IntoIter>>>; + fn main() -> anyhow::Result<()> { let args = CliArgs::parse(); - match args.cmd { - Command::Clustering(args) => clustering(args), - Command::Knapsack(args) => knapsack(args), + let out_path = args.out_path; + match args.fmt { + Format::Cnf(args) => { + let enc: BoxedDimacsIter = match args.enc { + CnfEncoding::Clustering(args) => Box::new(clustering(args)?), + CnfEncoding::Knapsack(args) => { + let inst = match args.variant { + KnapsackCommand::Input(args) => input_knapsack(&args)?, + KnapsackCommand::Random(args) => random_knapsack(&args), + }; + Box::new(cnf_knapsack(inst)) + } + }; + write_cnf(enc, out_path, args.single_objective) + } + Format::Pb(args) => { + let opts = opb::Options { + first_var_idx: args.first_var_idx, + no_negated_lits: args.avoid_negated_lits, + }; + let enc: BoxedOpbIter = match args.enc { + PbEncoding::Knapsack(args) => { + let inst = match args.variant { + KnapsackCommand::Input(args) => input_knapsack(&args)?, + KnapsackCommand::Random(args) => random_knapsack(&args), + }; + Box::new(pb_knapsack(inst)) + } + PbEncoding::Assignment(args) => { + let inst = input_assignment(&args)?; + Box::new(pb_assignment(inst)) + } + PbEncoding::FacilityLocation(args) => { + let inst = input_facility_location(&args)?; + Box::new(pb_facility_location(inst)) + } + }; + write_pb(enc, out_path, opts) + } + } +} + +#[cfg(test)] +mod tests { + #[test] + fn verify_cli_args() { + use clap::CommandFactory; + super::CliArgs::command().debug_assert() } } diff --git a/tools/src/bin/gbmosplit.rs b/tools/src/bin/gbmosplit.rs index e7d2de7c..b38ea86b 100644 --- a/tools/src/bin/gbmosplit.rs +++ b/tools/src/bin/gbmosplit.rs @@ -12,18 +12,26 @@ use clap::{Parser, ValueEnum}; use rustsat::{ - instances::{ManageVars, MultiOptInstance, Objective, OptInstance}, + instances::{fio, ManageVars, MultiOptInstance, Objective, OptInstance}, types::Clause, }; -use std::{collections::BTreeSet, fmt, io::Write, path::PathBuf}; +use std::{ + collections::BTreeSet, + fmt, + io::{self, Write}, + path::PathBuf, +}; use termcolor::{Buffer, BufferWriter, Color, ColorSpec, WriteColor}; struct Cli { - in_path: PathBuf, + in_path: Option, out_path: Option, + input_format: InputFormat, + output_format: OutputFormat, split_alg: SplitAlg, max_combs: usize, always_dump: bool, + opb_opts: fio::opb::Options, stdout: BufferWriter, stderr: BufferWriter, } @@ -34,9 +42,15 @@ impl Cli { Self { in_path: args.in_path, out_path: args.out_path, + input_format: args.input_format, + output_format: args.output_format, split_alg: args.split_alg, max_combs: args.max_combs, always_dump: args.always_dump, + opb_opts: fio::opb::Options { + first_var_idx: args.opb_first_var_idx, + ..fio::opb::Options::default() + }, stdout: BufferWriter::stdout(match args.color.color { concolor_clap::ColorChoice::Always => termcolor::ColorChoice::Always, concolor_clap::ColorChoice::Never => termcolor::ColorChoice::Never, @@ -76,7 +90,7 @@ impl Cli { self.stdout.print(&buffer).unwrap(); } - fn error(&self, msg: &str) { + fn error(&self, err: &anyhow::Error) { let mut buffer = self.stderr.buffer(); buffer .set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Red))) @@ -86,7 +100,7 @@ impl Cli { buffer.set_color(ColorSpec::new().set_bold(true)).unwrap(); write!(&mut buffer, ": ").unwrap(); buffer.reset().unwrap(); - writeln!(&mut buffer, "{}", msg).unwrap(); + writeln!(&mut buffer, "{}", err).unwrap(); self.stdout.print(&buffer).unwrap(); } @@ -173,24 +187,93 @@ impl Cli { #[derive(Parser)] #[command(author, version, about, long_about = None)] struct Args { - /// The DIMACS WCNF input file - in_path: PathBuf, - /// The optional DIMACS MCNF output path. If no path is given only detection - /// results will be printed. + /// The path to the input file. If no path is given, will read from `stdin`. + in_path: Option, + /// The optional output path. If no path is given, will write to `stdout`. out_path: Option, /// The splitting algorithm to use - #[arg(long, default_value_t = SplitAlg::Gbmo)] + #[arg(long, default_value_t = SplitAlg::default())] split_alg: SplitAlg, /// The maximum number of weight combinations to check when thoroughly checking GBMO #[arg(long, default_value_t = 100000)] max_combs: usize, - /// Always dump an MCNF file, even if the instance couldn't be split - #[arg(long)] + /// The file format of the input + #[arg(long, default_value_t = InputFormat::default())] + input_format: InputFormat, + /// The file format to write + #[arg(long, default_value_t = OutputFormat::default())] + output_format: OutputFormat, + /// Always dump a multi-objective file, even if the instance couldn't be split + #[arg(long, short = 'd')] always_dump: bool, + /// The index in the OPB file to treat as the lowest variable + #[arg(long, default_value_t = 1)] + opb_first_var_idx: u32, #[command(flatten)] color: concolor_clap::Color, } +#[derive(Copy, Clone, PartialEq, Eq, ValueEnum, Default)] +enum InputFormat { + /// Infer the input file format from the file extension according to the following rules: + /// - `.wcnf`: Weighted DIMACS CNF (MaxSAT) file + /// - `.opb`: OPB file (without an objective) + /// All file extensions can also be appended with `.bz2`, `.xz`, or `.gz` if compression is used. + #[default] + Infer, + /// A DIMACS WCNF file + Wcnf, + /// An OPB file + Opb, +} + +impl fmt::Display for InputFormat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + InputFormat::Infer => write!(f, "infer"), + InputFormat::Wcnf => write!(f, "wcnf"), + InputFormat::Opb => write!(f, "opb"), + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, ValueEnum, Default)] +enum OutputFormat { + /// Same as the input format + #[default] + AsInput, + /// DIMACS WCNF + Mcnf, + /// OPB + Opb, +} + +impl fmt::Display for OutputFormat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + OutputFormat::AsInput => write!(f, "as-input"), + OutputFormat::Mcnf => write!(f, "mcnf"), + OutputFormat::Opb => write!(f, "opb"), + } + } +} + +impl OutputFormat { + fn infer(self, write_format: WriteFormat) -> WriteFormat { + match self { + OutputFormat::AsInput => write_format, + OutputFormat::Mcnf => WriteFormat::Mcnf, + OutputFormat::Opb => WriteFormat::Opb, + } + } +} + +#[derive(Copy, Clone)] +enum WriteFormat { + Mcnf, + Opb, +} + #[derive(ValueEnum, Default, Clone, Copy, PartialEq, Eq)] enum SplitAlg { /// Only detect non-generalized boolean multilevel optimization. (This @@ -236,7 +319,7 @@ fn split( let (constr, obj) = so_inst.decompose(); if !obj.weighted() { - cli.info("objective is unweighted, can't split"); + cli.warning("objective is unweighted, can't split"); let obj_stats = ObjStats { n_softs: obj.n_softs(), weight_sum: obj.weight_sum(), @@ -457,28 +540,129 @@ fn split_gbmo(sorted_clauses: Vec<(Clause, usize)>, cli: &Cli) -> (Vec { + $( $a == $b || )* false + } +} + +fn parse_instance( + path: &Option, + file_format: InputFormat, + opb_opts: fio::opb::Options, +) -> anyhow::Result<(OptInstance, WriteFormat)> { + match file_format { + InputFormat::Infer => { + if let Some(path) = path { + if let Some(ext) = path.extension() { + let path_without_compr = path.with_extension(""); + let ext = if is_one_of!(ext, "gz", "bz2", "xz") { + // Strip compression extension + match path_without_compr.extension() { + Some(ext) => ext, + None => anyhow::bail!("no file extension after compression extension"), + } + } else { + ext + }; + if is_one_of!(ext, "wcnf") { + OptInstance::from_dimacs_path(path).map(|inst| (inst, WriteFormat::Mcnf)) + } else if is_one_of!(ext, "opb") { + OptInstance::from_opb_path(path, opb_opts) + .map(|inst| (inst, WriteFormat::Opb)) + } else { + anyhow::bail!("unknown file extension") + } + } else { + anyhow::bail!("no file extension") + } + } else { + anyhow::bail!("cannot infer file format from stdin") + } + } + InputFormat::Wcnf => { + if let Some(path) = path { + OptInstance::from_dimacs_path(path).map(|inst| (inst, WriteFormat::Mcnf)) + } else { + OptInstance::from_dimacs(&mut io::BufReader::new(io::stdin())) + .map(|inst| (inst, WriteFormat::Mcnf)) + } + } + InputFormat::Opb => { + if let Some(path) = path { + OptInstance::from_opb_path(path, opb_opts).map(|inst| (inst, WriteFormat::Opb)) + } else { + OptInstance::from_opb(&mut io::BufReader::new(io::stdin()), opb_opts) + .map(|inst| (inst, WriteFormat::Opb)) + } + } + } +} + +macro_rules! handle_error { + ($res:expr, $cli:expr) => {{ + match $res { + Ok(val) => val, + Err(err) => { + $cli.error(&err); + anyhow::bail!(err) + } + } + }}; +} + +fn main() -> anyhow::Result<()> { let cli = Cli::init(); - cli.info(&format!("finding splits in {}", cli.in_path.display())); + if let Some(path) = &cli.in_path { + cli.info(&format!("finding splits in {}", path.display())); + } - let so_inst: OptInstance = OptInstance::from_dimacs_path(&cli.in_path).unwrap_or_else(|e| { - cli.error(&format!("could not read input file: {}", e)); - panic!() - }); + let (so_inst, write_format) = handle_error!( + parse_instance(&cli.in_path, cli.input_format, cli.opb_opts), + cli + ); - let (mo_inst, split_stats) = split(so_inst, &cli); + let (mut mo_inst, split_stats) = split(so_inst, &cli); - cli.print_split_stats(split_stats); + if cli.out_path.is_some() { + cli.print_split_stats(split_stats); + } let found_split = mo_inst.n_objectives() > 1; - if let Some(out_path) = &cli.out_path { - if found_split || cli.always_dump { - mo_inst.write_dimacs_path(out_path).unwrap_or_else(|e| { - cli.error(&format!("io error writing the output file: {}", e)); - panic!() - }); + let write_format = cli.output_format.infer(write_format); + + if found_split || cli.always_dump { + match write_format { + WriteFormat::Mcnf => { + mo_inst.constraints_mut().convert_to_cnf(); + if let Some(path) = &cli.out_path { + cli.info(&format!("writing mcnf to {}", path.display())); + handle_error!(mo_inst.write_dimacs_path(path), cli); + } else { + handle_error!( + mo_inst.write_dimacs(&mut io::BufWriter::new(io::stdout())), + cli + ); + } + } + WriteFormat::Opb => { + let (mut constrs, mut objs) = mo_inst.decompose(); + for obj in &mut objs { + obj.convert_to_soft_lits(constrs.var_manager_mut()); + } + let mo_inst = MultiOptInstance::compose(constrs, objs); + if let Some(path) = &cli.out_path { + cli.info(&format!("writing opb to {}", path.display())); + handle_error!(mo_inst.write_opb_path(path, cli.opb_opts), cli); + } else { + handle_error!( + mo_inst.write_opb(&mut io::BufWriter::new(io::stdout()), cli.opb_opts), + cli + ); + } + } } } diff --git a/tools/src/bin/mo2ilp.rs b/tools/src/bin/mo2ilp.rs new file mode 100644 index 00000000..5f65267d --- /dev/null +++ b/tools/src/bin/mo2ilp.rs @@ -0,0 +1,319 @@ +//! # mo2ilp +//! +//! A small tool for converting multi-objective MaxSAT/PBO instances to the custom MO-ILP input formats. + +use std::{fmt, io, path::PathBuf}; + +use anyhow::Context; +use clap::{Parser, ValueEnum}; +use itertools::Itertools; +use rustsat::{ + instances::{ + fio::{self, opb::Options as OpbOptions}, + BasicVarManager, ManageVars, MultiOptInstance, Objective, ReindexingVarManager, + }, + types::{constraints::PbConstraint, Var}, +}; + +#[derive(Parser)] +#[command(author, version, about, long_about = None)] +struct Args { + /// The MCNF/MOPB input file. Reads from `stdin` if not given. + in_path: Option, + /// The output path. Writes to `stdout` if not given. + out_path: Option, + /// The input file format + #[arg(long, value_enum, default_value_t = InputFormat::Infer)] + input_format: InputFormat, + /// The output file format + #[arg(long, value_enum, default_value_t = OutputFormat::Fgt)] + output_format: OutputFormat, + /// The index in the OPB file to treat as the lowest variable + #[arg(long, default_value_t = 1)] + first_var_idx: u32, + /// Specify what to do with variables that do not appear in the instance + #[arg(long, value_enum, default_value_t = UnusedVars::AssignZero)] + unused_vars: UnusedVars, +} + +#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)] +enum InputFormat { + /// Infer the file format from the file extension according to the following rules: + /// - `.mcnf`: Multi-objective MaxSAT file + /// - `.opb`, `.pbmo`, `.mopb`: OPB file + /// All file extensions can also be appended with `.bz2`, `.xz`, or `.gz` if compression is used. + Infer, + /// A DIMACS MCNF file + Mcnf, + /// An OPB file with multiple objectives + Opb, +} + +#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)] +enum OutputFormat { + /// Custom input format for [Bauss et al. 2023](https://git.uni-wuppertal.de/bauss/adaptive-improvements-of-multi-objective-branch-and-bound) + Bauss, + /// FGT format read by [Forget et al. 2022](https://github.com/NicolasJForget/LinearRelaxationBasedMultiObjectiveBranchAndBound/tree/v2.0) + Fgt, +} + +/// What to do with variables that do not appear in the instance +#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)] +enum UnusedVars { + /// Don't do anything + Nothing, + /// Remove them, decreasing the index of the remaining variables + Remove, + /// Assign them to zero + AssignZero, +} + +fn main() -> anyhow::Result<()> { + let args = Args::parse(); + let opb_opts = OpbOptions { + first_var_idx: args.first_var_idx, + ..OpbOptions::default() + }; + + let inst = parse_instance(args.in_path, args.input_format, opb_opts) + .context("failed to parse input instance")?; + + let inst = match args.unused_vars { + UnusedVars::Nothing => inst, + UnusedVars::Remove => { + let (mut constr, objs) = inst + .reindex_ordered(ReindexingVarManager::default()) + .decompose(); + let next_free = constr.new_var(); + let (constr, _) = + constr.change_var_manager(|_| BasicVarManager::from_next_free(next_free)); + MultiOptInstance::compose(constr, objs) + } + UnusedVars::AssignZero => { + let varset = inst.var_set(); + let n_vars = inst.constraints_ref().n_vars(); + let mut inst = inst; + for idx in 0..n_vars { + let var = Var::new(idx); + if !varset.contains(&var) { + inst.constraints_mut().add_unit(var.neg_lit()); + } + } + inst + } + }; + + if let Some(path) = args.out_path { + write_instance( + inst, + args.output_format, + &mut fio::open_compressed_uncompressed_write(path)?, + )?; + } else { + write_instance( + inst, + args.output_format, + &mut io::BufWriter::new(io::stdout()), + )?; + } + Ok(()) +} + +macro_rules! is_one_of { + ($a:expr, $($b:expr),*) => { + $( $a == $b || )* false + } +} + +fn parse_instance( + path: Option, + file_format: InputFormat, + opb_opts: OpbOptions, +) -> anyhow::Result { + match file_format { + InputFormat::Infer => { + let path = if let Some(path) = path { + path + } else { + anyhow::bail!("cannot infer file format from stdin"); + }; + if let Some(ext) = path.extension() { + let path_without_compr = path.with_extension(""); + let ext = if is_one_of!(ext, "gz", "bz2", "xz") { + // Strip compression extension + match path_without_compr.extension() { + Some(ext) => ext, + None => anyhow::bail!("no file extension after compression extension"), + } + } else { + ext + }; + if is_one_of!(ext, "mcnf") { + MultiOptInstance::from_dimacs_path(path) + } else if is_one_of!(ext, "opb", "mopb", "pbmo") { + MultiOptInstance::from_opb_path(path, opb_opts) + } else { + anyhow::bail!("unknown file extension") + } + } else { + anyhow::bail!("no file extension") + } + } + InputFormat::Mcnf => { + if let Some(path) = path { + MultiOptInstance::from_dimacs_path(path) + } else { + MultiOptInstance::from_dimacs(&mut io::BufReader::new(io::stdin())) + } + } + InputFormat::Opb => { + if let Some(path) = path { + MultiOptInstance::from_opb_path(path, opb_opts) + } else { + MultiOptInstance::from_opb(&mut io::BufReader::new(io::stdin()), opb_opts) + } + } + } +} + +fn write_instance( + inst: MultiOptInstance, + format: OutputFormat, + writer: &mut W, +) -> io::Result<()> { + let (constr, mut objs) = inst.decompose(); + let (mut pbs, mut vm) = constr.into_pbs(); + let omv = objs.iter().fold(Var::new(0), |v, o| { + if let Some(mv) = o.max_var() { + return std::cmp::max(v, mv); + } + v + }); + vm.increase_next_free(omv + 1); + pbs.extend( + objs.iter_mut() + .flat_map(|o| o.convert_to_soft_lits(&mut vm)) + .map(PbConstraint::from), + ); + // add aux variable unit for objective offset + pbs.push(PbConstraint::new_lb([(vm.new_lit(), 1)], 1)); + + // compute non-zeroes + let non_zero_constrs: usize = pbs.iter().map(PbConstraint::len).sum(); + let non_zero_objs: usize = objs.iter().map(Objective::n_softs).sum(); + + match format { + OutputFormat::Bauss => { + // #vars #constrs #objs #nonzerosinconstrs #nonzerosincosts + writeln!( + writer, + "{} {} {} {non_zero_constrs} {non_zero_objs}", + vm.n_used(), + pbs.len(), + objs.len(), + )?; + } + OutputFormat::Fgt => { + // #vars #constrs #objs + writeln!(writer, "{} {} {}", vm.n_used(), pbs.len(), objs.len(),)?; + } + } + writeln!(writer)?; + + if matches!(format, OutputFormat::Fgt) { + // objective types + writeln!(writer, "{}", (0..objs.len()).map(|_| "minsum").format(" "))?; + writeln!(writer)?; + } + + // cost matrix + for obj in objs { + write_objective(obj, vm.n_used(), writer)?; + } + writeln!(writer)?; + + // constraint matrix + let bounds = pbs + .into_iter() + .map(|c| write_constraint(c, vm.n_used(), writer)) + .collect::>>()?; + writeln!(writer)?; + + // rhs + for (op, bound) in bounds { + writeln!(writer, "{op} {bound}")?; + } + + if matches!(format, OutputFormat::Fgt) { + writeln!(writer)?; + writeln!(writer, "{}", (0..vm.n_used()).map(|_| 0).format(" "))?; + writeln!(writer, "{}", (0..vm.n_used()).map(|_| 1).format(" "))?; + } + + Ok(()) +} + +fn write_objective(obj: Objective, n_vars: u32, writer: &mut W) -> io::Result<()> { + let mut coefs = vec![0; n_vars as usize]; + for (lit, coef) in obj.iter_soft_lits().unwrap() { + let mut coef = + isize::try_from(coef).expect("cannot handle coefficients larger than `isize::MAX`"); + if lit.is_neg() { + *coefs.last_mut().unwrap() += coef; + coef = -coef; + } + coefs[lit.var().idx()] += coef; + } + *coefs.last_mut().unwrap() += obj.offset(); + writeln!(writer, "{}", coefs.into_iter().format(" ")) +} + +fn write_constraint( + constr: PbConstraint, + n_vars: u32, + writer: &mut W, +) -> io::Result<(Operator, isize)> { + let mut coefs = vec![0; n_vars as usize]; + let (lits, op, mut bound) = match constr { + PbConstraint::Ub(constr) => { + let (lits, b) = constr.decompose(); + (lits, Operator::Lte, b) + } + PbConstraint::Lb(constr) => { + let (lits, b) = constr.decompose(); + (lits, Operator::Gte, b) + } + PbConstraint::Eq(constr) => { + let (lits, b) = constr.decompose(); + (lits, Operator::Eq, b) + } + }; + for (lit, coef) in lits { + let mut coef = + isize::try_from(coef).expect("cannot handle coefficients larger than `isize::MAX`"); + if lit.is_neg() { + bound -= coef; + coef = -coef; + } + coefs[lit.var().idx()] += coef; + } + writeln!(writer, "{}", coefs.into_iter().format(" "))?; + Ok((op, bound)) +} + +#[derive(Clone, Copy)] +enum Operator { + Gte, + Lte, + Eq, +} + +impl fmt::Display for Operator { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Operator::Gte => write!(f, "0"), + Operator::Lte => write!(f, "1"), + Operator::Eq => write!(f, "2"), + } + } +} diff --git a/tools/src/encodings/assignment.rs b/tools/src/encodings/assignment.rs new file mode 100644 index 00000000..5f83dc28 --- /dev/null +++ b/tools/src/encodings/assignment.rs @@ -0,0 +1,142 @@ +//! # Shared Assignment Problem Encoding Tooling +//! +//! - Data types +//! - Input parser + +use std::io; + +/// An instance of the multi-objective assignment problem +#[derive(Clone, PartialEq, Eq)] +pub struct Assignment { + pub(crate) n_tasks: usize, + pub(crate) n_objs: usize, + pub(crate) costs: Vec, +} + +impl Assignment { + fn empty(n_tasks: usize, n_objs: usize) -> Self { + Self { + n_tasks, + n_objs, + costs: vec![0; n_objs * n_tasks * n_tasks], + } + } + + pub fn from_file(reader: impl io::BufRead) -> anyhow::Result { + parsing::parse_moolib(reader) + } + + pub fn idx(&self, task: usize, agent: usize, obj: usize) -> usize { + debug_assert!(task < self.n_tasks); + debug_assert!(agent < self.n_tasks); + debug_assert!(obj < self.n_objs); + obj * self.n_tasks * self.n_tasks + task * self.n_tasks + agent + } + + pub fn cost(&self, task: usize, agent: usize, obj: usize) -> usize { + self.costs[self.idx(task, agent, obj)] + } + + pub fn cost_mut(&mut self, task: usize, agent: usize, obj: usize) -> &mut usize { + let idx = self.idx(task, agent, obj); + self.costs.get_mut(idx).unwrap() + } +} + +mod parsing { + use std::io; + + use anyhow::Context; + use nom::character::complete::u32; + + use crate::parsing::{callback_list, single_value}; + + macro_rules! next_non_comment_line { + ($reader:expr) => { + 'block: { + let mut buf = String::new(); + while $reader.read_line(&mut buf)? > 0 { + if !buf.trim_start().starts_with('#') && !buf.trim().is_empty() { + break 'block Some(buf); + } + buf.clear(); + } + None + } + }; + } + + pub fn parse_moolib(mut reader: impl io::BufRead) -> anyhow::Result { + let line = next_non_comment_line!(reader) + .context("file ended before number of objectives line")?; + let (_, n_objs) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of objectives line '{line}'"))?; + let line = + next_non_comment_line!(reader).context("file ended before number of tasks line")?; + let (_, n_tasks) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of tasks line '{line}'"))?; + let mut inst = super::Assignment::empty(n_tasks as usize, n_objs as usize); + let mut buf = String::new(); + let mut depth = 0; + let mut obj_idx = 0; + let mut task_idx = 0; + while reader.read_line(&mut buf)? > 0 { + let mut remain = buf.trim_start(); + loop { + if remain.starts_with('[') && depth < 2 { + depth += 1; + remain = &remain[1..]; + } + if remain.starts_with(']') { + depth -= 1; + if depth == 1 { + task_idx = 0; + } + remain = &remain[1..]; + } + if remain.starts_with(',') { + match depth { + 1 => obj_idx += 1, + 2 => task_idx += 1, + _ => unreachable!(), + }; + remain = &remain[1..]; + } + if remain.starts_with('#') || remain.trim().is_empty() { + break; + } + if depth == 2 { + let mut agent_idx = 0; + remain = callback_list(remain.trim_start(), u32, |value| { + anyhow::ensure!(obj_idx < n_objs, "too many objectives"); + anyhow::ensure!(task_idx < n_tasks, "too many tasks"); + anyhow::ensure!(agent_idx < n_tasks, "too many agents"); + *inst.cost_mut(task_idx as usize, agent_idx as usize, obj_idx as usize) = + value as usize; + agent_idx += 1; + Ok(()) + })?; + } + } + buf.clear(); + } + anyhow::ensure!( + obj_idx + 1 == n_objs, + "number of objectives does not match up" + ); + Ok(inst) + } + + #[cfg(test)] + mod tests { + use std::{fs::File, io::BufReader}; + + #[test] + fn moolib() { + let reader = BufReader::new(File::open("./data/AP_p-3_n-5_ins-1.dat").unwrap()); + super::parse_moolib(reader).unwrap(); + } + } +} diff --git a/tools/src/encodings/clustering.rs b/tools/src/encodings/cnf/clustering.rs similarity index 100% rename from tools/src/encodings/clustering.rs rename to tools/src/encodings/cnf/clustering.rs diff --git a/tools/src/encodings/cnf/knapsack.rs b/tools/src/encodings/cnf/knapsack.rs new file mode 100644 index 00000000..9918d258 --- /dev/null +++ b/tools/src/encodings/cnf/knapsack.rs @@ -0,0 +1,81 @@ +//! # CNF (Multi-Criteria) Knapsack Encoding + +use rustsat::{ + clause, + encodings::pb, + instances::{fio::dimacs, BasicVarManager, Cnf, ManageVars}, + types::{constraints::PbConstraint, Lit}, +}; + +use crate::encodings::knapsack::Knapsack; + +enum Clause { + /// Clause of the capacity encoding + Capacity(::IntoIter), + /// Soft clause for item and objective + Soft(usize, usize), +} + +pub struct Encoding { + data: Knapsack, + next_clause: Clause, +} + +impl Encoding { + pub fn new(data: Knapsack) -> Self + where + PBE: pb::BoundUpper + FromIterator<(Lit, usize)>, + { + let mut vm = BasicVarManager::default(); + let cap_constr = match PbConstraint::new_ub( + data.items + .iter() + .map(|item| (vm.new_var().pos_lit(), item.weight as isize)), + data.capacity as isize, + ) { + PbConstraint::Ub(constr) => constr, + _ => unreachable!(), + }; + let mut cap_cnf = Cnf::new(); + PBE::encode_ub_constr(cap_constr, &mut cap_cnf, &mut vm).unwrap(); + Self { + data, + next_clause: Clause::Capacity(cap_cnf.into_iter()), + } + } +} + +impl Iterator for Encoding { + type Item = dimacs::McnfLine; + + fn next(&mut self) -> Option { + loop { + match &mut self.next_clause { + Clause::Capacity(iter) => { + let next = iter.next(); + if next.is_some() { + return next.map(dimacs::McnfLine::Hard); + } + self.next_clause = Clause::Soft(0, 0); + } + Clause::Soft(iidx, oidx) => { + let iidx = *iidx; + let oidx = *oidx; + if iidx >= self.data.items.len() { + return None; + } + if oidx >= self.data.items[iidx].values.len() { + self.next_clause = Clause::Soft(iidx + 1, 0); + continue; + } + self.next_clause = Clause::Soft(iidx, oidx + 1); + return Some(dimacs::McnfLine::Soft( + clause![Lit::positive(iidx as u32)], + self.data.items[iidx].values[oidx], + oidx, + )); + } + } + } + } +} diff --git a/tools/src/encodings/facilitylocation.rs b/tools/src/encodings/facilitylocation.rs new file mode 100644 index 00000000..0bf32abc --- /dev/null +++ b/tools/src/encodings/facilitylocation.rs @@ -0,0 +1,131 @@ +//! # Shared Facility Location Problem Encoding Tooling +//! +//! - Data types +//! - Input parser + +use std::io; + +/// An instance of the multi-objective assignment problem +#[derive(Clone, PartialEq, Eq, Default)] +pub struct FacilityLocation { + n_objs: usize, + n_customers: usize, + n_facilities: usize, + supply_cost: Vec, + opening_cost: Vec, +} + +impl FacilityLocation { + pub fn from_file(reader: impl io::BufRead) -> anyhow::Result { + parsing::parse_voptlib(reader) + } + + pub fn n_objs(&self) -> usize { + self.n_objs + } + + pub fn n_customers(&self) -> usize { + self.n_customers + } + + pub fn n_facilities(&self) -> usize { + self.n_facilities + } + + fn cidx(&self, obj: usize, customer: usize, facility: usize) -> usize { + debug_assert!(obj < self.n_objs); + debug_assert!(customer < self.n_customers); + debug_assert!(facility < self.n_facilities); + obj * self.n_facilities * self.n_customers + customer * self.n_facilities + facility + } + + fn ridx(&self, obj: usize, facility: usize) -> usize { + debug_assert!(obj < self.n_objs); + debug_assert!(facility < self.n_facilities); + obj * self.n_facilities + facility + } + + pub fn supply_cost(&self, obj: usize, customer: usize, facility: usize) -> usize { + self.supply_cost[self.cidx(obj, customer, facility)] + } + + pub fn opening_cost(&self, obj: usize, facility: usize) -> usize { + self.opening_cost[self.ridx(obj, facility)] + } +} + +mod parsing { + use std::io; + + use anyhow::Context; + use nom::character::complete::u32; + + use crate::parsing::{callback_separated, single_value}; + + macro_rules! next_line { + ($reader:expr) => {{ + let mut buf = String::new(); + if $reader.read_line(&mut buf)? > 0 { + Some(buf) + } else { + None + } + }}; + } + + pub fn parse_voptlib(mut reader: impl io::BufRead) -> anyhow::Result { + let line = next_line!(reader).context("file ended before number of users line")?; + let (_, n_users) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of users line '{line}'"))?; + let n_users = usize::try_from(n_users).context("u32 does not fit in usize")?; + let line = next_line!(reader).context("file ended before number of services line")?; + let (_, n_services) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of services line '{line}'"))?; + let n_services = usize::try_from(n_services).context("u32 does not fit in usize")?; + + let mut prob = super::FacilityLocation { + n_objs: 2, + n_customers: n_users, + n_facilities: n_services, + ..super::FacilityLocation::default() + }; + for _ in 0..2 { + next_line!(reader).context("file ended early")?; + for _ in 0..n_users { + let line = next_line!(reader).context("file ended in the middle of c matrix")?; + callback_separated(&line, u32, |value| { + let value = usize::try_from(value).context("u32 does not fit in usize")?; + prob.supply_cost.push(value); + Ok(()) + }) + .context("failed to parse c matrix line")?; + } + } + + for _ in 0..2 { + next_line!(reader).context("file ended early")?; + let line = next_line!(reader).context("file ended early")?; + callback_separated(&line, u32, |value| { + let value = usize::try_from(value).context("u32 does not fit in usize")?; + prob.opening_cost.push(value); + Ok(()) + }) + .context("failed to parse r vector")?; + } + + Ok(prob) + } + + #[cfg(test)] + mod tests { + use std::{fs::File, io::BufReader}; + + #[test] + fn voptlib() { + let reader = BufReader::new(File::open("./data/didactic1.txt").unwrap()); + super::parse_voptlib(reader).unwrap(); + } + } +} diff --git a/tools/src/encodings/knapsack.rs b/tools/src/encodings/knapsack.rs index 6613e100..b77111c4 100644 --- a/tools/src/encodings/knapsack.rs +++ b/tools/src/encodings/knapsack.rs @@ -1,26 +1,26 @@ -//! # (Multi-Criteria) Knapsack Encoding +//! # Shared Knapsack Encoding Tooling +//! +//! - Data types +//! - Input parser +//! - Random generator -use std::ops::Range; +use std::{io, ops::Range}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; -use rustsat::{ - clause, - encodings::pb, - instances::{fio::dimacs, BasicVarManager, Cnf, ManageVars}, - types::{constraints::PBConstraint, Lit}, -}; /// An instance of the (multi-criteria 0-1) knapsack problem +#[derive(Clone, PartialEq, Eq)] pub struct Knapsack { - items: Vec, - capacity: usize, + pub(crate) items: Vec, + pub(crate) capacity: usize, } /// Data associated with one knapsack item -struct ItemData { - weight: usize, - values: Vec, +#[derive(Clone, PartialEq, Eq)] +pub(crate) struct ItemData { + pub weight: usize, + pub values: Vec, } pub enum Capacity { @@ -57,75 +57,195 @@ impl Knapsack { }; Self { items, capacity } } -} -enum Clause { - /// Clause of the capacity encoding - Capacity(::IntoIter), - /// Soft clause for item and objective - Soft(usize, usize), + pub fn from_file(reader: impl io::BufRead, format: FileFormat) -> anyhow::Result { + match format { + FileFormat::MooLibrary => parsing::parse_moolib(reader), + FileFormat::VOptLib => parsing::parse_voptlib(reader), + } + } } -pub struct Encoding { - data: Knapsack, - next_clause: Clause, +/// Possible Knapsack input file formats +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum FileFormat { + /// Input files as provided by [MOO-Library](http://home.ku.edu.tr/~moolibrary/) + MooLibrary, + /// Input files as provided by [vOptLib](https://github.com/vOptSolver/vOptLib/tree/master/UKP) + VOptLib, } -impl Encoding { - pub fn new(data: Knapsack) -> Self - where - PBE: pb::BoundUpper + FromIterator<(Lit, usize)>, - { - let mut vm = BasicVarManager::default(); - let cap_constr = match PBConstraint::new_ub( - data.items - .iter() - .map(|item| (vm.new_var().pos_lit(), item.weight as isize)), - data.capacity as isize, - ) { - PBConstraint::Ub(constr) => constr, - _ => panic!(), - }; - let mut cap_cnf = Cnf::new(); - PBE::encode_ub_constr(cap_constr, &mut cap_cnf, &mut vm).unwrap(); - Self { - data, - next_clause: Clause::Capacity(cap_cnf.into_iter()), - } - } -} +mod parsing { + use std::io; + + use anyhow::Context; + use nom::{ + bytes::complete::tag, + character::complete::{space0, u32}, + error::Error as NomErr, + sequence::tuple, + }; -impl Iterator for Encoding { - type Item = dimacs::McnfLine; + use crate::parsing::{callback_list, single_value}; - fn next(&mut self) -> Option { - loop { - match &mut self.next_clause { - Clause::Capacity(iter) => { - let next = iter.next(); - if next.is_some() { - return next.map(dimacs::McnfLine::Hard); + macro_rules! next_non_comment_line { + ($reader:expr) => { + 'block: { + let mut buf = String::new(); + while $reader.read_line(&mut buf)? > 0 { + if !buf.trim_start().starts_with('#') && !buf.trim().is_empty() { + break 'block Some(buf); } - self.next_clause = Clause::Soft(0, 0); + buf.clear(); } - Clause::Soft(iidx, oidx) => { - let iidx = *iidx; - let oidx = *oidx; - if iidx >= self.data.items.len() { - return None; - } - if oidx >= self.data.items[iidx].values.len() { - self.next_clause = Clause::Soft(iidx + 1, 0); - continue; - } - self.next_clause = Clause::Soft(iidx, oidx + 1); - return Some(dimacs::McnfLine::Soft( - clause![Lit::positive(iidx as u32)], - self.data.items[iidx].values[oidx], - oidx, - )); + None + } + }; + } + + pub fn parse_moolib(mut reader: impl io::BufRead) -> anyhow::Result { + let line = next_non_comment_line!(reader) + .context("file ended before number of objectives line")?; + let (_, n_obj) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of objectives line '{line}'"))?; + let line = + next_non_comment_line!(reader).context("file ended before number of items line")?; + let (_, n_items) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of items line '{line}'"))?; + let line = next_non_comment_line!(reader).context("file ended before capacity line")?; + let (_, capacity) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse capacity line '{line}'"))?; + let mut inst = super::Knapsack { + items: vec![ + super::ItemData { + weight: 0, + values: vec![] + }; + n_items as usize + ], + capacity: capacity as usize, + }; + let mut buf = String::new(); + let mut started = false; + let mut ended = false; + while reader.read_line(&mut buf)? > 0 { + let remain = buf.trim_start(); + if remain.starts_with('#') || buf.trim().is_empty() { + buf.clear(); + continue; + } + let remain = if started { + remain + } else { + anyhow::ensure!(remain.starts_with('['), "expected list to start"); + started = true; + &remain[1..] + }; + let mut item_idx = 0; + let remain = callback_list(remain, u32, |value| { + anyhow::ensure!(item_idx < inst.items.len(), "too many values in value list"); + inst.items[item_idx].values.push(value as usize); + item_idx += 1; + Ok(()) + })?; + match tuple::<_, _, NomErr<_>, _>((space0, tag(","), space0))(remain) { + Ok(_) => (), + Err(_) => { + tuple::<_, _, NomErr<_>, _>((space0, tag("]")))(remain) + .map_err(|e| e.to_owned()) + .context("failed to find closing delimiter for value list")?; + ended = true; + break; } } + buf.clear(); + } + anyhow::ensure!(ended, "file ended before value list ended"); + let line = next_non_comment_line!(reader).context("file ended before weight line")?; + let mut item_idx = 0; + callback_list(line.trim_start(), u32, |weight| { + anyhow::ensure!( + item_idx < inst.items.len(), + "too many values in weight list" + ); + inst.items[item_idx].weight = weight as usize; + item_idx += 1; + Ok(()) + })?; + for item in &inst.items { + anyhow::ensure!( + item.values.len() == n_obj as usize, + "number of values for item does not match number of objectives" + ); + } + Ok(inst) + } + + pub fn parse_voptlib(mut reader: impl io::BufRead) -> anyhow::Result { + let line = + next_non_comment_line!(reader).context("file ended before number of items line")?; + let (_, n_items) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of items line '{line}'"))?; + let line = next_non_comment_line!(reader) + .context("file ended before number of objectives line")?; + let (_, n_obj) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of objectives line '{line}'"))?; + let _ = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse number of constraints line '{line}'"))?; + let mut inst = super::Knapsack { + items: vec![ + super::ItemData { + weight: 0, + values: vec![] + }; + n_items as usize + ], + capacity: 0, + }; + for obj_idx in 0..n_obj { + for item_idx in 0..n_items { + let line = next_non_comment_line!(reader).with_context(|| { + format!("file ended before {item_idx} value of objective {obj_idx}") + })?; + let (_, value) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| { + format!("failed to parse {item_idx} value of objective {obj_idx} '{line}'") + })?; + inst.items[item_idx as usize].values.push(value as usize); + } + } + for item_idx in 0..n_items { + let line = next_non_comment_line!(reader) + .with_context(|| format!("file ended before weight of item {item_idx}"))?; + let (_, weight) = single_value(u32, "#")(&line) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse weight of item {item_idx} '{line}'"))?; + inst.items[item_idx as usize].weight = weight as usize; + } + Ok(inst) + } + + #[cfg(test)] + mod tests { + use std::{fs::File, io::BufReader}; + + #[test] + fn moolib() { + let reader = BufReader::new(File::open("./data/KP_p-3_n-10_ins-1.dat").unwrap()); + super::parse_moolib(reader).unwrap(); + } + + #[test] + fn voptlib() { + let reader = BufReader::new(File::open("./data/F5050W01.dat").unwrap()); + super::parse_voptlib(reader).unwrap(); } } } diff --git a/tools/src/encodings/pb/assignment.rs b/tools/src/encodings/pb/assignment.rs new file mode 100644 index 00000000..4ef51701 --- /dev/null +++ b/tools/src/encodings/pb/assignment.rs @@ -0,0 +1,103 @@ +//! # PB (Multi-Criteria) Assignment Problem Encoding + +use rustsat::{ + instances::fio::opb, + lit, + types::{constraints::CardConstraint, Lit}, +}; + +use crate::encodings::assignment::Assignment; + +#[derive(Default)] +enum Line { + /// Hint line + #[default] + Hint, + /// Description + Description, + /// An objective with given index + Objective(usize), + /// Constraints stating that exactly one task per agent needs to be selected + OneTask(usize), + /// Constraints stating that exactly one agent per task needs to be selected + OneAgent(usize), +} + +pub struct Encoding { + data: Assignment, + next_line: Option, +} + +impl Encoding { + pub fn new(data: Assignment) -> Self { + Self { + data, + next_line: Some(Line::default()), + } + } +} + +impl Iterator for Encoding { + type Item = opb::OpbLine< as IntoIterator>::IntoIter>; + + fn next(&mut self) -> Option { + let selector = |task: usize, agent: usize| lit![(self.data.n_tasks * task + agent) as u32]; + match self.next_line.take() { + Some(line) => Some(match line { + Line::Hint => { + self.next_line = Some(Line::Description); + opb::OpbLine::Comment(format!( + "#variable= {} #constraint= {}", + self.data.n_tasks * self.data.n_tasks, + 2 * self.data.n_tasks + )) + } + Line::Description => { + self.next_line = Some(Line::Objective(0)); + opb::OpbLine::Comment("MO Assignment instance generated by RustSAT".to_string()) + } + Line::Objective(objidx) => { + let data = &self.data; + let obj: Vec<_> = (0..self.data.n_tasks) + .flat_map(|task| { + (0..self.data.n_tasks).map(move |agent| { + (selector(task, agent), data.cost(task, agent, objidx)) + }) + }) + .collect(); + self.next_line = Some(if objidx + 1 < self.data.n_objs { + Line::Objective(objidx + 1) + } else { + Line::OneTask(0) + }); + opb::OpbLine::Objective(obj.into_iter()) + } + Line::OneTask(agent) => { + let constr = CardConstraint::new_eq( + (0..self.data.n_tasks).map(|task| selector(task, agent)), + 1, + ); + self.next_line = Some(if agent + 1 < self.data.n_tasks { + Line::OneTask(agent + 1) + } else { + Line::OneAgent(0) + }); + opb::OpbLine::Card(constr) + } + Line::OneAgent(task) => { + let constr = CardConstraint::new_eq( + (0..self.data.n_tasks).map(|agent| selector(task, agent)), + 1, + ); + self.next_line = if task + 1 < self.data.n_tasks { + Some(Line::OneAgent(task + 1)) + } else { + None + }; + opb::OpbLine::Card(constr) + } + }), + None => None, + } + } +} diff --git a/tools/src/encodings/pb/facilitylocation.rs b/tools/src/encodings/pb/facilitylocation.rs new file mode 100644 index 00000000..c2be5342 --- /dev/null +++ b/tools/src/encodings/pb/facilitylocation.rs @@ -0,0 +1,120 @@ +//! # PB (Multi-Criteria) Uncapacitated Facility Location Problem Encoding + +use rustsat::{ + encodings::atomics, + instances::fio::opb, + lit, + types::{constraints::CardConstraint, Lit}, +}; + +use crate::encodings::facilitylocation::FacilityLocation; + +#[derive(Default)] +enum Line { + /// Hint line + #[default] + Hint, + /// Description + Description, + /// An objective with given index + Objective(usize), + /// Constraints stating that exactly one facility is selected per customer + OneFacility(usize), + /// Constraints stating that if a facility (first argument) is selected by a customer (second + /// argument), it must be opened + MustOpen(usize, usize), +} + +pub struct Encoding { + data: FacilityLocation, + next_line: Option, +} + +impl Encoding { + pub fn new(data: FacilityLocation) -> Self { + Self { + data, + next_line: Some(Line::default()), + } + } +} + +impl Iterator for Encoding { + type Item = opb::OpbLine< as IntoIterator>::IntoIter>; + + fn next(&mut self) -> Option { + let selected = |customer: usize, facility: usize| { + lit![(customer * self.data.n_facilities() + facility) as u32] + }; + let opening = |facility: usize| { + lit![(self.data.n_facilities() * self.data.n_customers() + facility) as u32] + }; + match self.next_line.take() { + Some(line) => Some(match line { + Line::Hint => { + self.next_line = Some(Line::Description); + opb::OpbLine::Comment(format!( + "#variable= {} #constraint= {}", + (self.data.n_customers() + 1) * self.data.n_facilities(), + self.data.n_customers() * (self.data.n_facilities() + 1) + )) + } + Line::Description => { + self.next_line = Some(Line::Objective(0)); + opb::OpbLine::Comment( + "MO uncapacitated facility location instance generated by RustSAT" + .to_string(), + ) + } + Line::Objective(objidx) => { + let data = &self.data; + let opening_cost = (0..self.data.n_facilities()) + .map(|fac| (opening(fac), data.opening_cost(objidx, fac))); + let obj: Vec<_> = (0..self.data.n_customers()) + .flat_map(|customer| { + (0..self.data.n_facilities()).map(move |fac| { + ( + selected(customer, fac), + data.supply_cost(objidx, customer, fac), + ) + }) + }) + .chain(opening_cost) + .collect(); + self.next_line = Some(if objidx + 1 < self.data.n_objs() { + Line::Objective(objidx + 1) + } else { + Line::OneFacility(0) + }); + opb::OpbLine::Objective(obj.into_iter()) + } + Line::OneFacility(customer) => { + let constr = CardConstraint::new_eq( + (0..self.data.n_facilities()).map(|fac| selected(customer, fac)), + 1, + ); + self.next_line = Some(if customer + 1 < self.data.n_customers() { + Line::OneFacility(customer + 1) + } else { + Line::MustOpen(0, 0) + }); + opb::OpbLine::Card(constr) + } + Line::MustOpen(fac, customer) => { + let constr = atomics::lit_impl_lit(selected(customer, fac), opening(fac)); + self.next_line = if customer + 1 < self.data.n_customers() { + Some(Line::MustOpen(fac, customer + 1)) + } else { + if fac + 1 < self.data.n_facilities() { + Some(Line::MustOpen(fac + 1, 0)) + } else { + None + } + }; + opb::OpbLine::Clause(constr) + } + }), + None => None, + } + } +} diff --git a/tools/src/encodings/pb/knapsack.rs b/tools/src/encodings/pb/knapsack.rs new file mode 100644 index 00000000..3812abe4 --- /dev/null +++ b/tools/src/encodings/pb/knapsack.rs @@ -0,0 +1,86 @@ +//! # PB (Multi-Criteria) Knapsack Encoding + +use rustsat::{ + instances::fio::opb, + lit, + types::{constraints::PBConstraint, Lit}, +}; + +use crate::encodings::knapsack::Knapsack; + +#[derive(Default)] +enum Line { + /// Hint line + #[default] + Hint, + /// Description + Description, + /// An objective with given index + Objective(usize), + /// The capacity constraint + Capacity, +} + +pub struct Encoding { + data: Knapsack, + next_line: Option, +} + +impl Encoding { + pub fn new(data: Knapsack) -> Self { + Self { + data, + next_line: Some(Line::default()), + } + } +} + +impl Iterator for Encoding { + type Item = opb::OpbLine< as IntoIterator>::IntoIter>; + + fn next(&mut self) -> Option { + match self.next_line.take() { + Some(line) => Some(match line { + Line::Hint => { + self.next_line = Some(Line::Description); + opb::OpbLine::Comment(format!( + "#variable= {} #constraint= 1", + self.data.items.len() + )) + } + Line::Description => { + self.next_line = Some(Line::Objective(0)); + opb::OpbLine::Comment("MO Knapsack instance generated by RustSAT".to_string()) + } + Line::Objective(oidx) => { + let obj: Vec<_> = self + .data + .items + .iter() + .enumerate() + .map(|(iidx, item)| (!lit![iidx as u32], item.values[oidx])) + .collect(); + self.next_line = Some(if oidx + 1 < self.data.items[0].values.len() { + Line::Objective(oidx + 1) + } else { + Line::Capacity + }); + opb::OpbLine::Objective(obj.into_iter()) + } + Line::Capacity => { + self.next_line = None; + let cap_constr = PBConstraint::new_ub( + self.data + .items + .iter() + .enumerate() + .map(|(idx, item)| (lit![idx as u32], item.weight as isize)), + self.data.capacity as isize, + ); + opb::OpbLine::Pb(cap_constr) + } + }), + None => None, + } + } +} diff --git a/tools/src/lib.rs b/tools/src/lib.rs index 8d5011ae..88ca853c 100644 --- a/tools/src/lib.rs +++ b/tools/src/lib.rs @@ -2,13 +2,30 @@ //! //! This crate contains tools for and built on the RustSAT library. +mod parsing; pub mod utils; pub mod encodings { //! # Encodings for Encoding Generators - pub mod clustering; + pub mod assignment; + pub mod facilitylocation; pub mod knapsack; + + pub mod cnf { + //! # CNF Encodings + + pub mod clustering; + pub mod knapsack; + } + + pub mod pb { + //! PB Encodings + + pub mod assignment; + pub mod facilitylocation; + pub mod knapsack; + } } #[cfg(feature = "cadical")] diff --git a/tools/src/parsing.rs b/tools/src/parsing.rs new file mode 100644 index 00000000..62063693 --- /dev/null +++ b/tools/src/parsing.rs @@ -0,0 +1,97 @@ +//! # Shared Parsing Functionality + +use anyhow::Context; +use nom::{ + branch::alt, + bytes::complete::tag, + character::complete::{line_ending, multispace1, not_line_ending, space0}, + combinator::{map, recognize}, + error::{context, Error as NomErr}, + sequence::{pair, tuple}, +}; + +pub fn single_value<'input, T, P>( + parser: P, + comment_tag: &'static str, +) -> impl FnMut(&'input str) -> nom::IResult<&'input str, T> +where + P: Fn(&'input str) -> nom::IResult<&'input str, T>, +{ + context( + "expected a single u32 number", + map( + tuple((space0, parser, space0, comment_or_end(comment_tag))), + |tup| tup.1, + ), + ) +} + +pub fn comment_or_end<'input>( + comment_tag: &'static str, +) -> impl FnMut(&'input str) -> nom::IResult<&'input str, &'input str> { + recognize(pair( + alt(( + recognize(tuple((tag(comment_tag), not_line_ending))), + not_line_ending, + )), + line_ending, + )) +} + +pub fn callback_list<'input, T, P>( + input: &'input str, + mut parse: P, + mut callback: impl FnMut(T) -> anyhow::Result<()>, +) -> anyhow::Result<&str> +where + P: FnMut(&'input str) -> nom::IResult<&'input str, T>, +{ + let (mut buf, _) = tuple::<_, _, NomErr<_>, _>((tag("["), space0))(input) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse list start '{input}'"))?; + loop { + let (remain, val) = parse(buf) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse value in list '{input}'"))?; + callback(val)?; + anyhow::ensure!( + !remain.trim().is_empty(), + "line ended before list was closed" + ); + buf = match tuple::<_, _, NomErr<_>, _>((space0, tag(","), space0))(remain) { + Ok((remain, _)) => remain, + Err(_) => { + let (remain, _) = tuple::<_, _, NomErr<_>, _>((space0, tag("]")))(remain) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse list end/separator '{input}'"))?; + return Ok(remain); + } + }; + } +} + +pub fn callback_separated<'input, T, P>( + input: &'input str, + mut parse: P, + mut callback: impl FnMut(T) -> anyhow::Result<()>, +) -> anyhow::Result<&str> +where + P: FnMut(&'input str) -> nom::IResult<&'input str, T>, +{ + let mut buf = input.trim_start(); + loop { + let (remain, val) = parse(buf) + .map_err(|e| e.to_owned()) + .with_context(|| format!("failed to parse value in list '{input}'"))?; + callback(val)?; + buf = match multispace1::<_, NomErr<_>>(remain) { + Ok((remain, _)) => { + if remain.is_empty() { + return Ok(""); + } + remain + } + Err(_) => return Ok(remain), + }; + } +}