Skip to content

Commit

Permalink
Re-add statistics for SDDs, extra benchmarking (#122)
Browse files Browse the repository at this point in the history
This PR adds all the stats that I removed in the great traitening/safenings. 

In addition, it:

- slightly changes how finite field's `sub` works, to not use `%`
- adds a new QC test

## some results

Identical final SDDs/allocated nodes, but less recursive calls / get or insert!

```sh
$ cargo run --example semantic_hash_experiment -- -d --file cnf/rand-3-25-100-1.cnf
num vars: 25 | num clauses: 100
from dtree (min-fill)
 
c: 00081 nodes | 007492 nodes alloc | 30398 num recur | 11309 g/i | app cache: 07492 hits, 12.6% recur | 00235 #c | t: 32.919167ms
s: 00081 nodes | 007492 nodes alloc | 29159 num recur | 10965 g/i | app cache: 07492 hits, 11.9% recur | 00000 #c | t: 48.064416ms
```

Worse final SDD, but marginally less nodes allocated / recursive calls / gets or inserts.

```sh
$ cargo run --example semantic_hash_experiment -- -d --file cnf/rand-3-25-100-2.cnf
num vars: 25 | num clauses: 100
from dtree (min-fill)

c: 02179 nodes | 007328 nodes alloc | 28504 num recur | 10158 g/i | app cache: 07328 hits, 9.9% recur | 00358 #c | t: 26.376334ms
s: 03467 nodes | 007317 nodes alloc | 27422 num recur | 09870 g/i | app cache: 07317 hits, 9.2% recur | 00000 #c | t: 44.302917ms
```
  • Loading branch information
mattxwang authored Jul 8, 2023
1 parent 4233303 commit ca987a2
Show file tree
Hide file tree
Showing 8 changed files with 150 additions and 71 deletions.
93 changes: 44 additions & 49 deletions examples/semantic_hash_experiment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,16 +57,14 @@ struct Args {
struct BenchStats {
label: String,
time: Duration,
num_nodes_cnf: usize,
num_child_nodes: usize,
num_nodes_alloc: usize,
// num_nodes_bdd: usize,
// num_nodes_sdd: usize,
// num_rec: usize,
// num_get_or_insert: usize,
// brt_cache_rate: f32,
// app_cache_rate: f32,
// num_compr: usize,
// num_compr_and: usize,
num_recursive_calls: usize,
num_get_or_insert_bdd: usize,
num_get_or_insert_sdd: usize,
app_cache_rate: f32,
app_cache_size: usize,
num_compr: usize,
}

impl BenchStats {
Expand All @@ -76,41 +74,35 @@ impl BenchStats {
sdd: &SddPtr,
builder: &impl SddBuilder<'a>,
) -> BenchStats {
// let stats = builder.stats();
let stats = builder.stats();
BenchStats {
label,
time,
num_nodes_cnf: sdd.num_child_nodes(),
num_child_nodes: sdd.num_child_nodes(),
num_nodes_alloc: builder.node_iter().len(),
// num_rec: stats.num_rec,
// num_get_or_insert: stats.num_get_or_insert,
// brt_cache_rate: (builder.num_app_cache_hits()) as f32 / (stats.num_get_or_insert as f32)
// * 100.0,
// app_cache_rate: stats.num_app_cache_hits as f32 / stats.num_rec as f32 * 100.0,
// num_compr: stats.num_compr,
// num_compr_and: stats.num_compr_and,
num_recursive_calls: stats.num_recursive_calls,
num_get_or_insert_bdd: stats.num_get_or_insert_bdd,
num_get_or_insert_sdd: stats.num_get_or_insert_sdd,
app_cache_rate: stats.app_cache_hits as f32 / stats.num_recursive_calls as f32 * 100.0,
app_cache_size: stats.app_cache_size,
num_compr: stats.num_compressions,
}
}
}

impl Display for BenchStats {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// write!(f, "{}: {:05} nodes | {:06} bdd / {:06} sdd uniq | {:07} rec | {:06} g/i, {:.1}% brt cache | {:.1}% app cache | {:05}/{:05} compr/and | t: {:?}",
// write!(f, "{}: {:05} nodes | {:06} nodes alloc | {:07} rec | {:06} g/i, {:.1}% brt cache | {:.1}% app cache | {:05}/{:05} compr/and | t: {:?}",
write!(
f,
"{}: {:05} nodes | {:06} nodes alloc | t: {:?}",
"{}: {:05} nodes | {:06} nodes alloc | {:05} num recur | {:05} g/i | app cache: {:05} hits, {:.1}% recur | {:05} #c | t: {:?}",
self.label,
self.num_nodes_cnf,
// self.num_nodes_bdd,
// self.num_nodes_sdd,
self.num_child_nodes,
self.num_nodes_alloc,
// self.num_rec,
// self.num_get_or_insert,
// self.brt_cache_rate,
// self.app_cache_rate,
// self.num_compr,
// self.num_compr_and,
self.num_recursive_calls,
self.num_get_or_insert_bdd + self.num_get_or_insert_sdd,
self.app_cache_size,
self.app_cache_rate,
self.num_compr,
self.time,
)
}
Expand Down Expand Up @@ -147,40 +139,36 @@ fn run_random_comparisons(cnf: Cnf, order: &[VarLabel], num: usize, bias: f64) {
let mut avg_nodes_cnf_sem = 0;
let mut avg_nodes_cnf_compr = 0;

// let mut avg_rec_sem = 0;
// let mut avg_rec_compr = 0;
let mut avg_rec_sem = 0;
let mut avg_rec_compr = 0;

for _ in 0..num {
let vtree = VTree::rand_split(order, bias);

let (compr, sem) = run_compr_sem(&cnf, &vtree);
println!(
// "c/s: {:.2}x nodes ({:.2}x b+sdd) | {:.2}x rec | {:.2}x g/i | {:.2}x %brt | {:.2}x %app | r% {:.2}",
"c/s: {:.2}x nodes ({:.2}x b+sdd) | r% {:.2}",
sem.num_nodes_cnf as f32 / compr.num_nodes_cnf as f32,
// (sem.num_nodes_bdd + sem.num_nodes_sdd) as f32
// / (compr.num_nodes_bdd + compr.num_nodes_sdd) as f32,
"c/s: {:.2}x nodes ({:.2}x b+sdd) | {:.2}x rec | {:.2}x g/i | | {:.2}x %app hits | {:.2}x % app size | r% {:.2}",
sem.num_child_nodes as f32 / compr.num_child_nodes as f32,
(sem.num_nodes_alloc) as f32 / (compr.num_nodes_alloc) as f32,
// sem.num_rec as f32 / compr.num_rec as f32,
// sem.num_get_or_insert as f32 / compr.num_get_or_insert as f32,
// sem.brt_cache_rate / compr.brt_cache_rate,
// sem.app_cache_rate / compr.app_cache_rate,
sem.num_recursive_calls as f32 / compr.num_recursive_calls as f32,
(sem.num_get_or_insert_bdd + sem.num_get_or_insert_sdd) as f32 / (compr.num_get_or_insert_bdd + compr.num_get_or_insert_sdd) as f32,
sem.app_cache_rate / compr.app_cache_rate,
sem.app_cache_size as f32 / compr.app_cache_size as f32,
vtree_rightness(&vtree)
);

avg_nodes_cnf_sem += sem.num_nodes_cnf;
avg_nodes_cnf_compr += compr.num_nodes_cnf;
avg_nodes_cnf_sem += sem.num_child_nodes;
avg_nodes_cnf_compr += compr.num_child_nodes;

// avg_rec_sem += sem.num_rec;
// avg_rec_compr += compr.num_rec;
avg_rec_sem += sem.num_recursive_calls;
avg_rec_compr += compr.num_recursive_calls;
}
println!("---");
println!(
// "total c/s (n={}): {:.2}x nodes | {:.2}x rec",
"total c/s (n={}): {:.2}x nodes",
"total c/s (n={}): {:.2}x nodes | {:.2}x rec",
num,
(avg_nodes_cnf_sem as f32 / avg_nodes_cnf_compr as f32),
// (avg_rec_sem as f32 / avg_rec_compr as f32)
(avg_rec_sem as f32 / avg_rec_compr as f32)
);
}

Expand Down Expand Up @@ -211,6 +199,9 @@ fn run_canonicalizer_experiment(c: Cnf, vtree: VTree, verbose: bool) {
if !sem_builder.eq(compr_cnf, sem_cnf) {
println!(" ");
println!("not equal! test is broken...");
println!("compr_cnf: {}", sem_builder.cached_semantic_hash(compr_cnf));
println!("sem_cnf: {}", sem_builder.cached_semantic_hash(sem_cnf));
println!("map: {:#?}", sem_builder.map());
if verbose {
eprintln!("=== COMPRESSED CNF ===");
eprintln!("{}", sem_builder.print_sdd(compr_cnf));
Expand All @@ -233,7 +224,11 @@ fn main() {
.collect::<Vec<VarLabel>>();
let vars = binding.as_slice();

println!("num vars: {}", cnf.num_vars());
println!(
"num vars: {} | num clauses: {}",
cnf.num_vars(),
cnf.clauses().len()
);

if args.run_random_vtrees > 0 {
println!(
Expand Down
2 changes: 0 additions & 2 deletions src/backing_store/bump_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,12 +149,10 @@ where
.map(|x| x.ptr.unwrap())
}

#[allow(dead_code)]
pub fn num_nodes(&self) -> usize {
self.len
}

#[allow(dead_code)]
pub fn hits(&self) -> usize {
self.hits
}
Expand Down
10 changes: 9 additions & 1 deletion src/builder/sdd/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,12 @@ use crate::{repr::cnf::Cnf, repr::logical_expr::LogicalExpr, repr::var_label::Va
#[derive(Default)]
pub struct SddBuilderStats {
pub app_cache_hits: usize,
pub app_cache_size: usize,
pub num_logically_redundant: usize,
pub num_recursive_calls: usize,
pub num_compressions: usize,
pub num_get_or_insert_bdd: usize,
pub num_get_or_insert_sdd: usize,
}

pub trait SddBuilder<'a>: BottomUpBuilder<'a, SddPtr<'a>> {
Expand Down Expand Up @@ -515,6 +520,7 @@ pub trait SddBuilder<'a>: BottomUpBuilder<'a, SddPtr<'a>> {
}

fn stats(&self) -> SddBuilderStats;
fn log_recursive_call(&self);
}

impl<'a, T> BottomUpBuilder<'a, SddPtr<'a>> for T
Expand Down Expand Up @@ -546,7 +552,8 @@ where
}

fn and(&'a self, a: SddPtr<'a>, b: SddPtr<'a>) -> SddPtr<'a> {
// println!("and a: {}\nb: {}", self.print_sdd(a), self.print_sdd(b));
self.log_recursive_call();

// first, check for a base case
match (a, b) {
(a, b) if self.is_true(a) => return b,
Expand Down Expand Up @@ -610,6 +617,7 @@ where
/// TODO: This is highly inefficient, will re-traverse nodes, needs a cache
/// TODO : this can bail out early by checking the vtree
fn condition(&'a self, f: SddPtr<'a>, lbl: VarLabel, value: bool) -> SddPtr<'a> {
self.log_recursive_call();
match f {
SddPtr::PtrTrue | SddPtr::PtrFalse => f,
SddPtr::Var(label, polarity) => {
Expand Down
37 changes: 23 additions & 14 deletions src/builder/sdd/compression.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use std::collections::HashMap;

use crate::backing_store::bump_table::BackedRobinhoodTable;
use crate::backing_store::UniqueTable;
use crate::builder::cache::all_app::AllTable;
use crate::builder::cache::ite::Ite;
use crate::builder::cache::LruTable;
use crate::builder::BottomUpBuilder;
use crate::repr::bdd::create_semantic_hash_map;
use crate::repr::ddnnf::DDNNFPtr;
use crate::repr::sdd::binary_sdd::BinarySDD;
use crate::repr::sdd::sdd_or::{SddAnd, SddOr};
Expand All @@ -25,6 +24,11 @@ pub struct CompressionSddBuilder<'a> {
// caches
ite_cache: RefCell<AllTable<SddPtr<'a>>>,
app_cache: RefCell<HashMap<SddAnd<'a>, SddPtr<'a>>>,
// stats
num_recursive_calls: RefCell<usize>,
num_compressions: RefCell<usize>,
num_get_or_insert_bdd: RefCell<usize>,
num_get_or_insert_sdd: RefCell<usize>,
}

impl<'a> SddBuilder<'a> for CompressionSddBuilder<'a> {
Expand Down Expand Up @@ -61,6 +65,7 @@ impl<'a> SddBuilder<'a> for CompressionSddBuilder<'a> {

#[inline]
fn get_or_insert_bdd(&'a self, bdd: BinarySDD<'a>) -> SddPtr<'a> {
*self.num_get_or_insert_bdd.borrow_mut() += 1;
unsafe {
let tbl = &mut *self.bdd_tbl.as_ptr();
SddPtr::BDD(tbl.get_or_insert(bdd))
Expand All @@ -69,6 +74,7 @@ impl<'a> SddBuilder<'a> for CompressionSddBuilder<'a> {

#[inline]
fn get_or_insert_sdd(&'a self, or: SddOr<'a>) -> SddPtr<'a> {
*self.num_get_or_insert_sdd.borrow_mut() += 1;
unsafe {
let tbl = &mut *self.sdd_tbl.as_ptr();
SddPtr::Reg(tbl.get_or_insert(or))
Expand All @@ -88,6 +94,7 @@ impl<'a> SddBuilder<'a> for CompressionSddBuilder<'a> {
/// Canonicalizes the list of (prime, sub) terms in-place
/// `node`: a list of (prime, sub) pairs
fn compress(&'a self, node: &mut Vec<SddAnd<'a>>) {
*self.num_compressions.borrow_mut() += 1;
for i in 0..node.len() {
// see if we can compress i
let mut j = i + 1;
Expand Down Expand Up @@ -132,22 +139,20 @@ impl<'a> SddBuilder<'a> for CompressionSddBuilder<'a> {
}

fn stats(&self) -> super::builder::SddBuilderStats {
let mut s: HashSet<u128> = HashSet::new();
let hasher = create_semantic_hash_map::<100000000063>(self.num_vars() + 1000); // TODO FIX THIS BADNESS
let mut num_collisions = 0;
for n in self.node_iter() {
let h = n.cached_semantic_hash(self.get_vtree_manager(), &hasher);
if s.contains(&h.value()) {
num_collisions += 1;
}
s.insert(h.value());
}

SddBuilderStats {
app_cache_hits: self.bdd_tbl.borrow().hits() + self.sdd_tbl.borrow().hits(),
num_logically_redundant: num_collisions,
app_cache_size: self.bdd_tbl.borrow().num_nodes() + self.sdd_tbl.borrow().num_nodes(),
num_logically_redundant: 0,
num_recursive_calls: *self.num_recursive_calls.borrow(),
num_compressions: *self.num_compressions.borrow(),
num_get_or_insert_bdd: *self.num_get_or_insert_bdd.borrow(),
num_get_or_insert_sdd: *self.num_get_or_insert_sdd.borrow(),
}
}

fn log_recursive_call(&self) {
*self.num_recursive_calls.borrow_mut() += 1
}
}

impl<'a> CompressionSddBuilder<'a> {
Expand All @@ -160,6 +165,10 @@ impl<'a> CompressionSddBuilder<'a> {
sdd_tbl: RefCell::new(BackedRobinhoodTable::new()),
vtree: vtree_man,
should_compress: true,
num_recursive_calls: RefCell::new(0),
num_compressions: RefCell::new(0),
num_get_or_insert_bdd: RefCell::new(0),
num_get_or_insert_sdd: RefCell::new(0),
}
}

Expand Down
26 changes: 26 additions & 0 deletions src/builder/sdd/semantic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ pub struct SemanticSddBuilder<'a, const P: u128> {
app_cache: RefCell<HashMap<u128, SddPtr<'a>>>,
// semantic hashing
map: WmcParams<FiniteField<P>>,
// stats
num_recursive_calls: RefCell<usize>,
num_get_or_insert_bdd: RefCell<usize>,
num_get_or_insert_sdd: RefCell<usize>,
}

impl<'a, const P: u128> SddBuilder<'a> for SemanticSddBuilder<'a, P> {
Expand Down Expand Up @@ -79,6 +83,7 @@ impl<'a, const P: u128> SddBuilder<'a> for SemanticSddBuilder<'a, P> {
}

fn get_or_insert_bdd(&'a self, bdd: BinarySDD<'a>) -> SddPtr<'a> {
*self.num_get_or_insert_bdd.borrow_mut() += 1;
let semantic_hash = bdd.semantic_hash(&self.vtree, &self.map);

if let Some(sdd) = self.check_cached_hash_and_neg(semantic_hash) {
Expand All @@ -93,6 +98,7 @@ impl<'a, const P: u128> SddBuilder<'a> for SemanticSddBuilder<'a, P> {
}

fn get_or_insert_sdd(&'a self, or: SddOr<'a>) -> SddPtr<'a> {
*self.num_get_or_insert_sdd.borrow_mut() += 1;
let semantic_hash = or.semantic_hash(&self.vtree, &self.map);
if let Some(sdd) = self.check_cached_hash_and_neg(semantic_hash) {
return sdd;
Expand Down Expand Up @@ -130,9 +136,18 @@ impl<'a, const P: u128> SddBuilder<'a> for SemanticSddBuilder<'a, P> {

SddBuilderStats {
app_cache_hits: self.bdd_tbl.borrow().hits() + self.sdd_tbl.borrow().hits(),
app_cache_size: self.bdd_tbl.borrow().num_nodes() + self.sdd_tbl.borrow().num_nodes(),
num_logically_redundant: num_collisions,
num_recursive_calls: *self.num_recursive_calls.borrow(),
num_compressions: 0,
num_get_or_insert_bdd: *self.num_get_or_insert_bdd.borrow(),
num_get_or_insert_sdd: *self.num_get_or_insert_sdd.borrow(),
}
}

fn log_recursive_call(&self) {
*self.num_recursive_calls.borrow_mut() += 1
}
}

impl<'a, const P: u128> SemanticSddBuilder<'a, P> {
Expand All @@ -147,9 +162,20 @@ impl<'a, const P: u128> SemanticSddBuilder<'a, P> {
bdd_tbl: RefCell::new(BackedRobinhoodTable::new()),
sdd_tbl: RefCell::new(BackedRobinhoodTable::new()),
map,
num_recursive_calls: RefCell::new(0),
num_get_or_insert_bdd: RefCell::new(0),
num_get_or_insert_sdd: RefCell::new(0),
}
}

pub fn cached_semantic_hash(&self, sdd: SddPtr) -> FiniteField<P> {
sdd.cached_semantic_hash(&self.vtree, &self.map)
}

pub fn map(&self) -> &WmcParams<FiniteField<P>> {
&self.map
}

fn hash_bdd(&self, elem: &BinarySDD) -> u64 {
let mut hasher = FxHasher::default();
elem.semantic_hash(&self.vtree, &self.map)
Expand Down
Loading

0 comments on commit ca987a2

Please sign in to comment.