diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 60604d7..c1317c7 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -106,9 +106,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "clap" -version = "4.3.12" +version = "4.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eab9e8ceb9afdade1ab3f0fd8dbce5b1b2f468ad653baf10e771781b2b67b73" +checksum = "5fd304a20bff958a57f04c4e96a2e7594cc4490a0e809cbd48bb6437edaa452d" dependencies = [ "clap_builder", "clap_derive", @@ -117,9 +117,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.3.12" +version = "4.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f2763db829349bf00cfc06251268865ed4363b93a943174f638daf3ecdba2cd" +checksum = "01c6a3f08f1fe5662a35cfe393aec09c4df95f60ee93b7556505260f75eee9e1" dependencies = [ "anstream", "anstyle", @@ -218,9 +218,9 @@ dependencies = [ [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "encode_unicode" @@ -427,18 +427,18 @@ checksum = "edc55135a600d700580e406b4de0d59cb9ad25e344a3a091a97ded2622ec4ec6" [[package]] name = "proc-macro2" -version = "1.0.65" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92de25114670a878b1261c79c9f8f729fb97e95bac93f6312f583c60dd6a1dfe" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.30" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5907a1b7c277254a8b15170f6e7c97cfa60ee7872a3217663bb81151e48184bb" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" dependencies = [ "proc-macro2", ] @@ -489,9 +489,9 @@ dependencies = [ [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "smallvec" @@ -507,9 +507,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "syn" -version = "2.0.26" +version = "2.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" dependencies = [ "proc-macro2", "quote", diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 1dc02ba..39123b1 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -26,4 +26,4 @@ hashbrown = { version = "0.14", features = ["rayon"] } [[bin]] name = "opencubes" -path = "./src/cli.rs" +path = "./src/cli/cli.rs" diff --git a/rust/src/cli.rs b/rust/src/cli/cli.rs similarity index 66% rename from rust/src/cli.rs rename to rust/src/cli/cli.rs index ceccfe3..b298e5d 100644 --- a/rust/src/cli.rs +++ b/rust/src/cli/cli.rs @@ -1,24 +1,46 @@ use std::{ collections::{BTreeMap, HashSet}, - io::ErrorKind, path::PathBuf, time::{Duration, Instant}, }; use clap::{Args, Parser, Subcommand, ValueEnum}; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; -use opencubes::{ - hashless, - naive_polycube::NaivePolyCube, - pcube::{PCubeFile, RawPCube}, - pointlist, rotation_reduced, -}; +use opencubes::{naive_polycube::NaivePolyCube, pcube::PCubeFile}; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; +mod enumerate; +use enumerate::enumerate; + +fn finish_bar(bar: &ProgressBar, duration: Duration, expansions: usize, n: usize) { + let time = duration.as_micros(); + let secs = time / 1_000_000; + let micros = time % 1_000_000; + + if let Some(len) = bar.length() { + let pos_width = format!("{}", len).len(); + + let template = format!( + "[{{elapsed_precise}}] {{bar:40.cyan/blue}} {{pos:>{pos_width}}}/{{len}} {{msg}}" + ); + + bar.set_style( + ProgressStyle::with_template(&template) + .unwrap() + .progress_chars("#>-"), + ); + } + + bar.finish_with_message(format!( + "Done! Found {expansions} expansions (N = {n}) in {secs}.{micros} s" + )); +} + fn unknown_bar() -> ProgressBar { let style = ProgressStyle::with_template("[{elapsed_precise}] [{spinner:10.cyan/blue}] {msg}") .unwrap() .tick_strings(&[ + ">---------", "=>--------", "<=>-------", "-<=>------", @@ -29,19 +51,23 @@ fn unknown_bar() -> ProgressBar { "------<=>-", "-------<=>", "--------<=", + "---------<", + "--------<=", "-------<=>", "------<=>-", - "----<=>---", "-----<=>--", "---<=>----", "--<=>-----", "-<=>------", "<=>-------", "=>--------", - "----------", ]); - ProgressBar::new(100).with_style(style) + let bar = ProgressBar::new(100).with_style(style); + + bar.enable_steady_tick(Duration::from_millis(66)); + + bar } pub fn make_bar(len: u64) -> indicatif::ProgressBar { @@ -50,7 +76,7 @@ pub fn make_bar(len: u64) -> indicatif::ProgressBar { let pos_width = format!("{len}").len(); let template = - format!("[{{elapsed_precise}}] {{bar:40.cyan/blue}} {{pos:>{pos_width}}}/{{len}} remaining: [{{eta_precise}}] {{msg}}"); + format!("[{{elapsed_precise}}] {{bar:40.cyan/blue}} {{pos:>{pos_width}}}/{{len}} {{msg}} remaining: [{{eta_precise}}]"); bar.set_style( ProgressStyle::with_template(&template) @@ -280,218 +306,6 @@ pub fn validate(opts: &ValidateArgs) -> std::io::Result<()> { Ok(()) } -fn load_cache_file(n: usize) -> Option { - let name = format!("cubes_{n}.pcube"); - - match PCubeFile::new_file(&name) { - Ok(file) => Some(file), - Err(e) => { - if e.kind() == ErrorKind::InvalidData || e.kind() == ErrorKind::Other { - println!("Enountered invalid cache file {name}. Error: {e}."); - } else { - println!("Could not load cache file '{name}'. Error: {e}"); - } - None - } - } -} - -/// load closes cache file to n into a vec -/// returns a vec and the next order above the found cache file -fn load_cache(n: usize) -> (Vec, usize) { - let calculate_from = 2; - - for n in (calculate_from..n).rev() { - let name = format!("cubes_{n}.pcube"); - let cache = if let Some(file) = load_cache_file(n) { - file - } else { - continue; - }; - - println!("Found cache for N = {n}. Loading data..."); - - if !cache.canonical() { - println!("Cached cubes are not canonical. Canonicalizing...") - } - - let len = cache.len(); - - let mut error = None; - let mut total_loaded = 0; - - let filter = |value| { - total_loaded += 1; - match value { - Ok(v) => Some(v), - Err(e) => { - error = Some(e); - None - } - } - }; - - let cached: HashSet<_> = cache.filter_map(filter).collect(); - - if let Some(e) = error { - println!("Error occured while loading {name}. Error: {e}"); - } else { - let total_len = len.unwrap_or(total_loaded); - - if total_len != cached.len() { - println!("There were non-unique cubes in the cache file. Continuing...") - } - - return (cached.into_iter().collect(), n + 1); - } - } - - println!("no cache file found reverting to start building from n=1"); - let mut base = RawPCube::new_empty(1, 1, 1); - base.set(0, 0, 0, true); - - let current = [base.clone()].to_vec(); - //calculate from 2 because 1 is in the vec - (current, 2) -} - -fn unique_expansions( - mut expansion_fn: F, - use_cache: bool, - n: usize, - compression: Compression, - current: Vec, - calculate_from: usize, - bar: &ProgressBar, -) -> Vec -where - F: FnMut(&ProgressBar, std::slice::Iter<'_, NaivePolyCube>) -> Vec, -{ - if n == 0 { - return Vec::new(); - } - - let mut current = current - .into_iter() - .map(NaivePolyCube::from) - .map(|v| v.canonical_form()) - .collect::>(); - - for i in calculate_from..=n { - bar.set_length(current.len() as u64); - bar.set_message(format!("base polycubes expanded for N = {i}...")); - - let start = Instant::now(); - - let next = expansion_fn(&bar, current.iter()); - - bar.set_message(format!( - "Found {} unique expansions (N = {i}) in {} ms.", - next.len(), - start.elapsed().as_millis(), - )); - - bar.finish(); - - if use_cache { - let name = &format!("cubes_{i}.pcube"); - if !std::fs::File::open(name).is_ok() { - println!("Saving {} cubes to cache file", next.len()); - PCubeFile::write_file(false, compression.into(), next.iter().map(Into::into), name) - .unwrap(); - } else { - println!("Cache file already exists for N = {i}. Not overwriting."); - } - } - - current = next; - } - - current -} - -pub fn enumerate(opts: &EnumerateOpts) { - let n = opts.n; - let cache = !opts.no_cache; - - if n < 2 { - println!("n < 2 unsuported"); - return; - } - - let start = Instant::now(); - - let (seed_list, startn) = if cache { - load_cache(n) - } else { - let mut base = RawPCube::new_empty(1, 1, 1); - base.set(0, 0, 0, true); - - let current = [base].to_vec(); - //calculate from 2 because 1 is in the vec - (current, 2) - }; - let bar = make_bar(seed_list.len() as u64); - - //Select enumeration function to run - let cubes_len = match (opts.mode, opts.no_parallelism) { - (EnumerationMode::Standard, true) => { - let cubes = unique_expansions( - |bar, current: std::slice::Iter<'_, NaivePolyCube>| { - NaivePolyCube::unique_expansions(bar, current) - }, - cache, - n, - opts.cache_compression, - seed_list, - startn, - &bar, - ); - cubes.len() - } - (EnumerationMode::Standard, false) => { - let cubes = unique_expansions( - |bar, current: std::slice::Iter<'_, NaivePolyCube>| { - NaivePolyCube::unique_expansions_rayon(bar, current) - }, - cache, - n, - opts.cache_compression, - seed_list, - startn, - &bar, - ); - cubes.len() - } - (EnumerationMode::RotationReduced, para) => { - if n > 16 { - println!("n > 16 not supported for rotation reduced"); - return; - } - if !para { - println!("no parallel implementation for rotation-reduced, running single threaded") - } - rotation_reduced::gen_polycubes(n, &bar) - } - (EnumerationMode::PointList, para) => { - if n > 16 { - println!("n > 16 not supported for point-list"); - return; - } - let cubes = pointlist::gen_polycubes(n, cache, !para, seed_list, startn, &bar); - cubes.len() - } - (EnumerationMode::Hashless, para) => { - hashless::gen_polycubes(n, !para, seed_list, startn, &bar) - } - }; - - let duration = start.elapsed(); - - println!("Unique polycubes found for N = {n}: {cubes_len}.",); - println!("Duration: {} ms", duration.as_millis()); -} - pub fn convert(opts: &ConvertArgs) { if opts.output_path.is_some() && opts.path.len() > 1 { println!("Cannot convert more than 1 file when output path is provided"); diff --git a/rust/src/cli/enumerate.rs b/rust/src/cli/enumerate.rs new file mode 100644 index 0000000..0901896 --- /dev/null +++ b/rust/src/cli/enumerate.rs @@ -0,0 +1,314 @@ +use std::{io::ErrorKind, sync::Arc, time::Instant}; + +use opencubes::{ + hashless::MapStore, + iterator::{indicatif::PolycubeProgressBarIter, *}, + naive_polycube::NaivePolyCube, + pcube::{PCubeFile, RawPCube}, + pointlist, + polycube_reps::CubeMapPos, + rotation_reduced, +}; + +use crate::{finish_bar, make_bar, unknown_bar, Compression, EnumerateOpts, EnumerationMode}; + +use rayon::{iter::ParallelBridge, prelude::ParallelIterator}; + +#[derive(Clone)] +struct AllUniques { + current: Arc>, + offset: usize, + n: usize, +} + +impl Iterator for AllUniques { + type Item = RawPCube; + + fn next(&mut self) -> Option { + let output = self.current.get(self.offset)?.clone(); + self.offset += 1; + Some(output) + } + + fn size_hint(&self) -> (usize, Option) { + let len = self.current.len() - self.offset; + (len, Some(len)) + } +} + +impl ExactSizeIterator for AllUniques {} + +impl PolycubeIterator for AllUniques { + fn is_canonical(&self) -> bool { + false + } + + fn n_hint(&self) -> Option { + Some(self.n) + } +} + +impl AllPolycubeIterator for AllUniques {} +impl UniquePolycubeIterator for AllUniques {} +impl AllUniquePolycubeIterator for AllUniques {} + +fn save_to_cache( + compression: Compression, + n: usize, + // Ideally, this would be `AllUniquePolycubeIterator` but it's + // a bit unwieldy + cubes: impl Iterator + ExactSizeIterator, +) { + let name = &format!("cubes_{n}.pcube"); + if !std::fs::File::open(name).is_ok() { + println!("Saving {} cubes to cache file", cubes.len()); + PCubeFile::write_file(false, compression.into(), cubes, name).unwrap(); + } else { + println!("Cache file already exists for N = {n}. Not overwriting."); + } +} + +fn load_cache_file(n: usize) -> Option { + let name = format!("cubes_{n}.pcube"); + + match PCubeFile::new_file(&name) { + Ok(file) => Some(file), + Err(e) => { + if e.kind() == ErrorKind::InvalidData || e.kind() == ErrorKind::Other { + println!("Enountered invalid cache file {name}. Error: {e}."); + } + None + } + } +} + +enum CacheOrbase { + Cache(opencubes::pcube::AllUnique), + Base(bool), +} + +impl Iterator for CacheOrbase { + type Item = RawPCube; + + fn next(&mut self) -> Option { + match self { + CacheOrbase::Cache(cache) => cache.next(), + CacheOrbase::Base(v) if v == &false => { + *v = true; + let mut base = RawPCube::new_empty(1, 1, 1); + base.set(0, 0, 0, true); + Some(base) + } + CacheOrbase::Base(_) => None, + } + } + + fn size_hint(&self) -> (usize, Option) { + match self { + CacheOrbase::Cache(c) => c.size_hint(), + CacheOrbase::Base(_) => (1, Some(1)), + } + } +} + +impl PolycubeIterator for CacheOrbase { + fn is_canonical(&self) -> bool { + match self { + CacheOrbase::Cache(c) => c.is_canonical(), + CacheOrbase::Base(_) => true, + } + } + + fn n_hint(&self) -> Option { + match self { + CacheOrbase::Cache(c) => Some(c.n()), + CacheOrbase::Base(_) => Some(1), + } + } +} + +impl UniquePolycubeIterator for CacheOrbase {} +impl AllPolycubeIterator for CacheOrbase {} +impl AllUniquePolycubeIterator for CacheOrbase {} + +/// load largest findable cachefile with size <= n - 1 into a vec +/// returns a vec and the next order above the found cache file +fn load_cache(n: usize) -> CacheOrbase { + let calculate_from = 2; + + for n in (calculate_from..n).rev() { + let cache = if let Some(file) = load_cache_file(n) { + file + } else { + continue; + }; + + println!("Found cache for N = {n}."); + return CacheOrbase::Cache(cache.assume_all_unique()); + } + + println!( + "No cache file found for size <= {}. Starting from N = 1", + n.saturating_sub(1) + ); + + CacheOrbase::Base(false) +} + +fn unique_expansions( + save_cache: bool, + n: usize, + compression: Compression, + current: impl AllUniquePolycubeIterator, + parallel: bool, +) -> Vec { + if n == 0 { + return Vec::new(); + } + + let calculate_from = current.n(); + let current = current.collect(); + + let mut current = AllUniques { + current: Arc::new(current), + offset: 0, + n: calculate_from, + }; + + let mut i = calculate_from; + + loop { + let bar = make_bar(current.len() as u64); + bar.set_message(format!("Expanding base polycubes of N = {i}...")); + + let start = Instant::now(); + + let with_bar = PolycubeProgressBarIter::new(bar.clone(), current); + let next: Vec = if parallel { + NaivePolyCube::unique_expansions_rayon(with_bar).collect() + } else { + NaivePolyCube::unique_expansions(with_bar).collect() + }; + + finish_bar(&bar, start.elapsed(), next.len(), i + 1); + + if save_cache { + save_to_cache(compression, i + 1, next.iter().map(Clone::clone)); + } + + i += 1; + + if n.saturating_sub(i) == 0 { + return next; + } else { + current = AllUniques { + current: Arc::new(next), + offset: 0, + n: i + 1, + }; + } + } +} + +/// run pointlist based generation algorithm +pub fn enumerate_hashless( + n: usize, + parallel: bool, + current: impl AllUniquePolycubeIterator + Send, +) -> usize { + let t1_start = Instant::now(); + + let start_n = current.n(); + let bar = if let (_, Some(max)) = current.size_hint() { + make_bar(max as u64) + } else { + unknown_bar() + }; + + bar.set_message(format!("Expanding seeds of N = {}...", start_n)); + + let process = |seed: RawPCube| { + let seed: CubeMapPos<32> = seed.into(); + let children = MapStore::enumerate_canonical_children_min_mem(&seed, start_n, n); + bar.inc(1); + children + }; + + let count: usize = if parallel { + current.par_bridge().map(process).sum() + } else { + current.map(process).sum() + }; + + finish_bar(&bar, t1_start.elapsed(), count, n); + + count +} + +pub fn enumerate(opts: &EnumerateOpts) { + let n = opts.n; + let cache = !opts.no_cache; + + let start = Instant::now(); + + let seed_list = if opts.no_cache { + CacheOrbase::Base(false) + } else { + load_cache(n) + }; + + //Select enumeration function to run + let cubes_len = match (opts.mode, opts.no_parallelism) { + (EnumerationMode::Standard, no_parallelism) => { + let cubes = + unique_expansions(cache, n, opts.cache_compression, seed_list, !no_parallelism); + cubes.len() + } + (EnumerationMode::RotationReduced, not_parallel) => { + if n > 16 { + println!("n > 16 not supported for rotation reduced"); + return; + } + if !not_parallel { + println!("no parallel implementation for rotation-reduced, running single threaded") + } + let bar = if let (_, Some(max)) = seed_list.size_hint() { + make_bar(max as u64) + } else { + unknown_bar() + }; + + rotation_reduced::gen_polycubes(n, &bar) + } + (EnumerationMode::PointList, not_parallel) => { + if n > 16 { + println!("n > 16 not supported for point-list"); + return; + } + let bar = if let (_, Some(max)) = seed_list.size_hint() { + make_bar(max as u64) + } else { + unknown_bar() + }; + + let startn = seed_list.n() + 1; + let cubes = pointlist::gen_polycubes( + n, + cache, + !not_parallel, + seed_list.collect(), + startn, + &bar, + ); + cubes.len() + } + (EnumerationMode::Hashless, not_parallel) => { + enumerate_hashless(n, !not_parallel, seed_list) + } + }; + + let duration = start.elapsed(); + + println!("Unique polycubes found for N = {n}: {cubes_len}.",); + println!("Duration: {} ms", duration.as_millis()); +} diff --git a/rust/src/hashless.rs b/rust/src/hashless.rs index 4ce4ed2..28d98f9 100644 --- a/rust/src/hashless.rs +++ b/rust/src/hashless.rs @@ -1,9 +1,6 @@ -use std::{cmp::max, time::Instant}; +use std::cmp::max; -use crate::pcube::RawPCube; use hashbrown::HashSet; -use indicatif::ProgressBar; -use rayon::prelude::{IntoParallelRefIterator, ParallelIterator}; use crate::{ pointlist::{array_insert, array_shift}, @@ -11,247 +8,195 @@ use crate::{ rotations::{rot_matrix_points, to_min_rot_points, MatrixCol}, }; -/// helper function to not duplicate code for canonicalising polycubes -/// and storing them in the hashset -fn insert_map(store: &mut HashSet>, dim: &Dim, map: &CubeMapPos<32>, count: usize) { - if !store.contains(map) { - let map = to_min_rot_points(map, dim, count); - store.insert(map); - } +pub struct MapStore { + inner: HashSet>, } -/// try expaning each cube into both x+1 and x-1, calculating new dimension -/// and ensuring x is never negative -#[inline] -fn expand_xs(dst: &mut HashSet>, seed: &CubeMapPos<32>, shape: &Dim, count: usize) { - for (i, coord) in seed.cubes[0..count].iter().enumerate() { - if !seed.cubes[(i + 1)..count].contains(&(coord + 1)) { - let mut new_shape = *shape; - let mut exp_map = *seed; - - array_insert(coord + 1, &mut exp_map.cubes[i..=count]); - new_shape.x = max(new_shape.x, ((coord + 1) & 0x1f) as usize); - insert_map(dst, &new_shape, &exp_map, count + 1) - } - if coord & 0x1f != 0 { - if !seed.cubes[0..i].contains(&(coord - 1)) { - let mut exp_map = *seed; - //faster move of top half hopefully - array_shift(&mut exp_map.cubes[i..=count]); - array_insert(coord - 1, &mut exp_map.cubes[0..=i]); - insert_map(dst, shape, &exp_map, count + 1) - } - } else { - let mut new_shape = *shape; - new_shape.x += 1; - let mut exp_map = *seed; - for i in 0..count { - exp_map.cubes[i] += 1; +macro_rules! cube_map_pos_expand { + ($name:ident, $dim:ident, $shift:literal) => { + #[inline(always)] + pub fn $name<'a>( + &'a self, + shape: &'a Dim, + count: usize, + ) -> impl Iterator + 'a { + struct Iter<'a, const C: usize> { + inner: &'a CubeMapPos, + shape: &'a Dim, + count: usize, + i: usize, + stored: Option<(Dim, usize, CubeMapPos)>, } - array_shift(&mut exp_map.cubes[i..=count]); - array_insert(*coord, &mut exp_map.cubes[0..=i]); - insert_map(dst, &new_shape, &exp_map, count + 1) - } - } -} -/// try expaning each cube into both y+1 and y-1, calculating new dimension -/// and ensuring y is never negative -#[inline] -fn expand_ys(dst: &mut HashSet>, seed: &CubeMapPos<32>, shape: &Dim, count: usize) { - for (i, coord) in seed.cubes[0..count].iter().enumerate() { - if !seed.cubes[(i + 1)..count].contains(&(coord + (1 << 5))) { - let mut new_shape = *shape; - let mut exp_map = *seed; - array_insert(coord + (1 << 5), &mut exp_map.cubes[i..=count]); - new_shape.y = max(new_shape.y, (((coord >> 5) + 1) & 0x1f) as usize); - insert_map(dst, &new_shape, &exp_map, count + 1) - } - if (coord >> 5) & 0x1f != 0 { - if !seed.cubes[0..i].contains(&(coord - (1 << 5))) { - let mut exp_map = *seed; - //faster move of top half hopefully - array_shift(&mut exp_map.cubes[i..=count]); - array_insert(coord - (1 << 5), &mut exp_map.cubes[0..=i]); - insert_map(dst, shape, &exp_map, count + 1) - } - } else { - let mut new_shape = *shape; - new_shape.y += 1; - let mut exp_map = *seed; - for i in 0..count { - exp_map.cubes[i] += 1 << 5; - } - array_shift(&mut exp_map.cubes[i..=count]); - array_insert(*coord, &mut exp_map.cubes[0..=i]); - insert_map(dst, &new_shape, &exp_map, count + 1) - } - } -} + impl<'a, const C: usize> Iterator for Iter<'a, C> { + type Item = (Dim, usize, CubeMapPos); -/// try expaning each cube into both z+1 and z-1, calculating new dimension -/// and ensuring z is never negative -#[inline] -fn expand_zs(dst: &mut HashSet>, seed: &CubeMapPos<32>, shape: &Dim, count: usize) { - for (i, coord) in seed.cubes[0..count].iter().enumerate() { - if !seed.cubes[(i + 1)..count].contains(&(coord + (1 << 10))) { - let mut new_shape = *shape; - let mut exp_map = *seed; - array_insert(coord + (1 << 10), &mut exp_map.cubes[i..=count]); - new_shape.z = max(new_shape.z, (((coord >> 10) + 1) & 0x1f) as usize); - insert_map(dst, &new_shape, &exp_map, count + 1) - } - if (coord >> 10) & 0x1f != 0 { - if !seed.cubes[0..i].contains(&(coord - (1 << 10))) { - let mut exp_map = *seed; - //faster move of top half hopefully - array_shift(&mut exp_map.cubes[i..=count]); - array_insert(coord - (1 << 10), &mut exp_map.cubes[0..=i]); - insert_map(dst, shape, &exp_map, count + 1) + fn next(&mut self) -> Option { + loop { + if let Some(stored) = self.stored.take() { + return Some(stored); + } + + let i = self.i; + + if i == self.count { + return None; + } + + self.i += 1; + let coord = *self.inner.cubes.get(i)?; + + let plus = coord + (1 << $shift); + let minus = coord - (1 << $shift); + + if !self.inner.cubes[(i + 1)..self.count].contains(&plus) { + let mut new_shape = *self.shape; + let mut new_map = *self.inner; + + array_insert(plus, &mut new_map.cubes[i..=self.count]); + new_shape.$dim = + max(new_shape.$dim, (((coord >> $shift) + 1) & 0x1f) as usize); + + self.stored = Some((new_shape, self.count + 1, new_map)); + } + + let mut new_map = *self.inner; + let mut new_shape = *self.shape; + + // If the coord is out of bounds for $dim, shift everything + // over and create the cube at the out-of-bounds position. + // If it is in bounds, check if the $dim - 1 value already + // exists. + let insert_coord = if (coord >> $shift) & 0x1f != 0 { + if !self.inner.cubes[0..i].contains(&minus) { + minus + } else { + continue; + } + } else { + new_shape.$dim += 1; + for i in 0..self.count { + new_map.cubes[i] += 1 << $shift; + } + coord + }; + + array_shift(&mut new_map.cubes[i..=self.count]); + array_insert(insert_coord, &mut new_map.cubes[0..=i]); + return Some((new_shape, self.count + 1, new_map)); + } + } } - } else { - let mut new_shape = *shape; - new_shape.z += 1; - let mut exp_map = *seed; - for i in 0..count { - exp_map.cubes[i] += 1 << 10; + + Iter { + inner: self, + shape, + count, + i: 0, + stored: None, } - array_shift(&mut exp_map.cubes[i..=count]); - array_insert(*coord, &mut exp_map.cubes[0..=i]); - insert_map(dst, &new_shape, &exp_map, count + 1) } - } + }; } -/// reduce number of expansions needing to be performed based on -/// X >= Y >= Z constraint on Dim -#[inline] -fn do_cube_expansion( - dst: &mut HashSet>, - seed: &CubeMapPos<32>, - shape: &Dim, - count: usize, -) { - if shape.y < shape.x { - expand_ys(dst, seed, shape, count); - } - if shape.z < shape.y { - expand_zs(dst, seed, shape, count); - } - expand_xs(dst, seed, shape, count); +impl CubeMapPos { + cube_map_pos_expand!(expand_x, x, 0); + cube_map_pos_expand!(expand_y, y, 5); + cube_map_pos_expand!(expand_z, z, 10); } -/// perform the cube expansion for a given polycube -/// if perform extra expansions for cases where the dimensions are equal as -/// square sides may miss poly cubes otherwise -#[inline] -fn expand_cube_map( - dst: &mut HashSet>, - seed: &CubeMapPos<32>, - shape: &Dim, - count: usize, -) { - if shape.x == shape.y && shape.x > 0 { - let rotz = rot_matrix_points( - seed, - shape, - count, - MatrixCol::YN, - MatrixCol::XN, - MatrixCol::ZN, - 1025, - ); - do_cube_expansion(dst, &rotz, shape, count); +impl MapStore { + pub fn new() -> Self { + Self { + inner: HashSet::new(), + } } - if shape.y == shape.z && shape.y > 0 { - let rotx = rot_matrix_points( - seed, - shape, - count, - MatrixCol::XN, - MatrixCol::ZP, - MatrixCol::YP, - 1025, - ); - do_cube_expansion(dst, &rotx, shape, count); + + /// helper function to not duplicate code for canonicalising polycubes + /// and storing them in the hashset + fn insert_map(&mut self, dim: &Dim, map: &CubeMapPos, count: usize) { + if !self.inner.contains(map) { + let map = to_min_rot_points(map, dim, count); + self.inner.insert(map); + } } - if shape.x == shape.z && shape.x > 0 { - let roty = rot_matrix_points( - seed, - shape, - count, - MatrixCol::ZP, - MatrixCol::YP, - MatrixCol::XN, - 1025, - ); - do_cube_expansion(dst, &roty, shape, count); + + /// reduce number of expansions needing to be performed based on + /// X >= Y >= Z constraint on Dim + #[inline] + fn do_cube_expansion(&mut self, seed: &CubeMapPos, shape: &Dim, count: usize) { + let expand_ys = if shape.y < shape.x { + Some(seed.expand_y(shape, count)) + } else { + None + }; + + let expand_zs = if shape.z < shape.y { + Some(seed.expand_z(shape, count)) + } else { + None + }; + + seed.expand_x(shape, count) + .chain(expand_ys.into_iter().flatten()) + .chain(expand_zs.into_iter().flatten()) + .for_each(|(dim, new_count, map)| self.insert_map(&dim, &map, new_count)); } - do_cube_expansion(dst, seed, shape, count); -} -fn enumerate_canonical_children(seed: &CubeMapPos<32>, count: usize, target: usize) -> usize { - let mut children = HashSet::new(); - children.clear(); - let shape = seed.extrapolate_dim(); - expand_cube_map(&mut children, seed, &shape, count); - children.retain(|child| child.is_canonical_root(count, seed)); - if count + 1 == target { - children.len() - } else { - children - .iter() - .map(|child| enumerate_canonical_children(child, count + 1, target)) - .sum() + /// perform the cube expansion for a given polycube + /// if perform extra expansions for cases where the dimensions are equal as + /// square sides may miss poly cubes otherwise + #[inline] + fn expand_cube_map(&mut self, seed: &CubeMapPos, shape: &Dim, count: usize) { + use MatrixCol::*; + + if shape.x == shape.y && shape.x > 0 { + let rotz = rot_matrix_points(seed, shape, count, YN, XN, ZN, 1025); + self.do_cube_expansion(&rotz, shape, count); + } + + if shape.y == shape.z && shape.y > 0 { + let rotx = rot_matrix_points(seed, shape, count, XN, ZP, YP, 1025); + self.do_cube_expansion(&rotx, shape, count); + } + if shape.x == shape.z && shape.x > 0 { + let roty = rot_matrix_points(seed, shape, count, ZP, YP, XN, 1025); + self.do_cube_expansion(&roty, shape, count); + } + + self.do_cube_expansion(seed, shape, count); } -} -/// run pointlist based generation algorithm -pub fn gen_polycubes( - n: usize, - parallel: bool, - current: Vec, - calculate_from: usize, - bar: &ProgressBar, -) -> usize { - let t1_start = Instant::now(); - - let seed_count = current.len(); - bar.set_length(seed_count as u64); - bar.set_message(format!( - "seed subsets expanded for N = {}...", - calculate_from - 1 - )); - - let process = |seed| { - let children = enumerate_canonical_children(&seed, calculate_from - 1, n); - bar.set_message(format!( - "seed subsets expanded for N = {}...", - calculate_from - 1, - )); - bar.inc(1); - children - }; + /// Calculate the amount of canonical children of size `target` + /// that polycube `seed` of size `count` has. + /// + /// This function does not store variants of the polycubes that + /// it enumerates, it just keeps the count. This way, memory + /// overhead is minimal. + // TODO: improve this name once we unify this and pointslist + pub fn enumerate_canonical_children_min_mem( + seed: &CubeMapPos, + count: usize, + target: usize, + ) -> usize { + let mut map = Self::new(); + let shape = seed.extrapolate_dim(); - //convert input vector of NaivePolyCubes and convert them to - let count: usize = if parallel { - current - .par_iter() - .map(|seed| seed.into()) - .map(process) - .sum() - } else { - current.iter().map(|seed| seed.into()).map(process).sum() - }; - let time = t1_start.elapsed().as_micros(); - bar.set_message(format!( - "Found {} unique expansions (N = {n}) in {}.{:06}s", - count, - time / 1000000, - time % 1000000 - )); - - bar.finish(); - count - //count_polycubes(&seeds); + let seed = to_min_rot_points(seed, &shape, count); + let shape = seed.extrapolate_dim(); + + map.expand_cube_map(&seed, &shape, count); + + map.inner + .retain(|child| child.is_canonical_root(count, &seed)); + + if count + 1 == target { + map.inner.len() + } else { + map.inner + .iter() + .map(|child| Self::enumerate_canonical_children_min_mem(child, count + 1, target)) + .sum() + } + } } diff --git a/rust/src/iterator.rs b/rust/src/iterator.rs new file mode 100644 index 0000000..4e8bde8 --- /dev/null +++ b/rust/src/iterator.rs @@ -0,0 +1,186 @@ +use std::collections::{HashMap, HashSet}; + +use crate::pcube::RawPCube; + +/// An iterator over polycubes +pub trait PolycubeIterator: Iterator +where + Self: Sized, +{ + /// Returns true if all polycubes returned are in _some_ canonical + /// form. No guarantee is provided about the type of canonicality, nor + /// about uniqueness. However, if this returns `true` it is guaranteed + /// that all cubes returned by this iterator are in a form that can be + /// used directly to check for uniqueness. + fn is_canonical(&self) -> bool; + + fn n_hint(&self) -> Option; +} + +/// A trait for converting a [`PolycubeIterator`] into a [`UniquePolycubeIterator`]. +pub trait IntoUniquePolycubeIterator +where + Self: Sized + PolycubeIterator, +{ + fn into_unique(self) -> UniquePolycubes { + UniquePolycubes::new(self) + } +} + +impl IntoUniquePolycubeIterator for T where T: PolycubeIterator {} + +/// An iterator over at least one variant of all unique polycubes +/// of size [`n`](AllPolycubeIterator::n). +/// +/// Iterators that implement this trait guarantee that they yield +/// at least one copy of all polycubes for size `n`, but do not guarantee +/// anything about the orientation of those cubes, nor about the amount +/// of times each copy of that polycubes occurs. +pub trait AllPolycubeIterator: PolycubeIterator { + /// The size of the polycubes returned by this + /// iterator. + fn n(&self) -> usize { + let n_hint = self.n_hint(); + assert!(n_hint.is_some()); + // SAFETY: we asserted that n_hint is some + unsafe { n_hint.unwrap_unchecked() } + } +} + +/// An iterator over unique polycubes. +/// +/// Unique, in this context, means that no two items yielded by this +/// iterator have the same canonical form. +pub trait UniquePolycubeIterator: PolycubeIterator {} + +/// An iterator over all unique polycubes of size [`n`](AllPolycubeIterator::n). +pub trait AllUniquePolycubeIterator: UniquePolycubeIterator + AllPolycubeIterator {} + +/// A struct that yields only unique Polycubes. +pub struct UniquePolycubes { + stored: HashMap<(u8, u8, u8), HashSet>>, + inner: T, +} + +impl UniquePolycubes +where + T: PolycubeIterator, +{ + pub fn new(inner: T) -> Self { + Self { + inner, + stored: HashMap::new(), + } + } +} + +impl Iterator for UniquePolycubes +where + T: Iterator, +{ + type Item = RawPCube; + + fn next(&mut self) -> Option { + while let Some(v) = self.inner.next() { + let entry = self.stored.entry(v.dims()).or_default(); + + // No need to canonicalize, as a `UniquePolycubes` can only be constructed + // from a `PolycubeIterator` that is canonical. + + if entry.contains(v.data()) { + continue; + } + + if entry.insert(v.data().to_vec()) { + return Some(v); + } + } + + None + } +} + +impl PolycubeIterator for UniquePolycubes +where + T: PolycubeIterator, +{ + fn n_hint(&self) -> Option { + self.inner.n_hint() + } + + fn is_canonical(&self) -> bool { + let is_canonical = self.inner.is_canonical(); + assert!(is_canonical); + is_canonical + } +} + +impl UniquePolycubeIterator for UniquePolycubes where T: PolycubeIterator {} + +impl AllPolycubeIterator for UniquePolycubes +where + T: AllPolycubeIterator, +{ + fn n(&self) -> usize { + self.inner.n() + } +} + +impl AllUniquePolycubeIterator for UniquePolycubes where T: AllPolycubeIterator {} + +// TODO: hide this behind a feature? +pub mod indicatif { + use indicatif::ProgressBar; + + use super::{ + AllPolycubeIterator, AllUniquePolycubeIterator, PolycubeIterator, UniquePolycubeIterator, + }; + + pub struct PolycubeProgressBarIter { + bar: ProgressBar, + inner: T, + } + + impl PolycubeProgressBarIter + where + T: PolycubeIterator, + { + pub fn new(bar: ProgressBar, inner: T) -> Self { + Self { inner, bar } + } + } + + impl Iterator for PolycubeProgressBarIter + where + T: Iterator, + { + type Item = T::Item; + + fn next(&mut self) -> Option { + self.bar.inc(1); + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } + } + + impl PolycubeIterator for PolycubeProgressBarIter + where + T: PolycubeIterator, + { + fn is_canonical(&self) -> bool { + self.inner.is_canonical() + } + + fn n_hint(&self) -> Option { + self.inner.n_hint() + } + } + + impl ExactSizeIterator for PolycubeProgressBarIter where T: ExactSizeIterator {} + impl AllPolycubeIterator for PolycubeProgressBarIter where T: AllPolycubeIterator {} + impl UniquePolycubeIterator for PolycubeProgressBarIter where T: UniquePolycubeIterator {} + impl AllUniquePolycubeIterator for PolycubeProgressBarIter where T: AllUniquePolycubeIterator {} +} diff --git a/rust/src/lib.rs b/rust/src/lib.rs index b45964d..2782875 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -1,6 +1,7 @@ #[cfg(test)] mod test; +pub mod iterator; pub mod pcube; pub mod naive_polycube; diff --git a/rust/src/naive_polycube/expander.rs b/rust/src/naive_polycube/expander.rs index ef3d2b7..d8a3104 100644 --- a/rust/src/naive_polycube/expander.rs +++ b/rust/src/naive_polycube/expander.rs @@ -6,7 +6,7 @@ use super::NaivePolyCube; type Sides = std::array::IntoIter<(usize, usize, usize), 6>; #[derive(Clone)] -struct ExpansionIterator { +pub struct ExpansionIterator { dim_1: usize, dim_2: usize, dim_3: usize, @@ -96,7 +96,7 @@ impl Iterator for ExpansionIterator { } impl NaivePolyCube { - pub fn expand(&self) -> impl Iterator + Clone { + pub fn expand(&self) -> ExpansionIterator { ExpansionIterator { dim_1: 1, dim_2: 1, diff --git a/rust/src/naive_polycube/mod.rs b/rust/src/naive_polycube/mod.rs index 502f604..1027907 100644 --- a/rust/src/naive_polycube/mod.rs +++ b/rust/src/naive_polycube/mod.rs @@ -1,15 +1,53 @@ //! A rather naive polycube implementation. -use std::collections::HashSet; +use std::{collections::HashSet, iter::FusedIterator}; -use indicatif::ProgressBar; use parking_lot::RwLock; -use crate::pcube::RawPCube; +use crate::{ + iterator::{ + AllPolycubeIterator, AllUniquePolycubeIterator, PolycubeIterator, UniquePolycubeIterator, + }, + pcube::RawPCube, +}; mod expander; mod rotations; +struct AllUniques { + uniques: std::collections::hash_set::IntoIter, + n: usize, + is_canonical: bool, +} + +impl Iterator for AllUniques { + type Item = RawPCube; + + fn next(&mut self) -> Option { + self.uniques.next().map(|v| RawPCube::from(v)) + } + + fn size_hint(&self) -> (usize, Option) { + self.uniques.size_hint() + } +} + +impl PolycubeIterator for AllUniques { + fn is_canonical(&self) -> bool { + self.is_canonical + } + + fn n_hint(&self) -> Option { + Some(self.n) + } +} + +impl FusedIterator for AllUniques {} +impl ExactSizeIterator for AllUniques {} +impl UniquePolycubeIterator for AllUniques {} +impl AllPolycubeIterator for AllUniques {} +impl AllUniquePolycubeIterator for AllUniques {} + /// A polycube, represented as three dimensions and an array of booleans. /// /// The array of booleans represents the cubes and their presence (if `true`) @@ -358,36 +396,82 @@ impl NaivePolyCube { cube_next } - /// Obtain a list of [`NaivePolyCube`]s representing all unique expansions of the - /// items in `from_set`. - /// - // TODO: turn this into an iterator that yield unique expansions? - pub fn unique_expansions<'a, I>(progress_bar: &ProgressBar, from_set: I) -> Vec + pub fn expansions(from_set: I) -> impl Iterator where - I: Iterator + ExactSizeIterator, + I: Iterator, { - let mut this_level = HashSet::new(); + struct AllExpansions { + current_expander: Option, + from_set: T, + } - for value in from_set { - for expansion in value.expand().map(|v| v.crop()) { - // Skip expansions that are already in the list. - if this_level.contains(&expansion) { - continue; + impl AllExpansions + where + T: Iterator, + { + fn new(mut from_set: T) -> Self { + let current_expander = from_set.next().map(|v| v.expand()); + Self { + from_set, + current_expander, } + } + } - let max = expansion.canonical_form(); - - let missing = !this_level.contains(&max); + impl Iterator for AllExpansions + where + T: Iterator, + { + type Item = NaivePolyCube; + + fn next(&mut self) -> Option { + while self.current_expander.is_some() { + if let Some(ref mut current_expander) = self.current_expander { + if let Some(next_expansion) = current_expander.next().map(|v| v.crop()) { + return Some(next_expansion); + } + } - if missing { - this_level.insert(max); + self.current_expander = self + .from_set + .next() + .map(|v| NaivePolyCube::from(v).expand()); } - } - progress_bar.inc(1); + None + } } - this_level.into_iter().collect() + impl FusedIterator for AllExpansions where T: Iterator {} + impl ExactSizeIterator for AllExpansions where T: ExactSizeIterator {} + + AllExpansions::new(from_set) + } + + /// Obtain a list of [`NaivePolyCube`]s representing all unique expansions of the + /// items in `from_set`. + pub fn unique_expansions(from_set: I) -> impl AllUniquePolycubeIterator + where + I: AllPolycubeIterator, + { + let out_n = from_set.n() + 1; + let mut uniques = HashSet::new(); + + Self::expansions(from_set.map(NaivePolyCube::from)).for_each(|v| { + if uniques.contains(&v) { + return; + } + + let max = v.canonical_form(); + + uniques.insert(max); + }); + + AllUniques { + uniques: uniques.into_iter(), + n: out_n, + is_canonical: false, + } } /// Check whether this cube is already cropped. @@ -508,52 +592,37 @@ impl NaivePolyCube { impl NaivePolyCube { // TODO: turn this into an iterator that yield unique expansions? - pub fn unique_expansions_rayon<'a, I>(bar: &ProgressBar, from_set: I) -> Vec + pub fn unique_expansions_rayon(from_set: I) -> impl AllUniquePolycubeIterator where - I: Iterator + ExactSizeIterator + Clone + Send + Sync, + I: AllPolycubeIterator + ExactSizeIterator + Send + Sync + 'static, { use rayon::prelude::*; - if from_set.len() == 0 { - return Vec::new(); - } - - let available_parallelism = num_cpus::get(); - - let chunk_size = (from_set.len() / available_parallelism) + 1; - let chunks = (from_set.len() + chunk_size - 1) / chunk_size; - - let chunk_iterator = (0..chunks).into_par_iter().map(|v| { - from_set - .clone() - .skip(v * chunk_size) - .take(chunk_size) - .into_iter() - }); + let next_n = from_set.n() + 1; let this_level = RwLock::new(HashSet::new()); - chunk_iterator.for_each(|v| { - for value in v { - for expansion in value.expand().map(|v| v.crop()) { - // Skip expansions that are already in the list. - if this_level.read().contains(&expansion) { - continue; - } + from_set.par_bridge().for_each(|v| { + for expansion in NaivePolyCube::from(v).expand().map(|v| v.crop()) { + // Skip expansions that are already in the list. + if this_level.read().contains(&expansion) { + continue; + } - let max = expansion.canonical_form(); + let max = expansion.canonical_form(); - let missing = !this_level.read().contains(&max); + let missing = !this_level.read().contains(&max); - if missing { - this_level.write().insert(max); - } + if missing { + this_level.write().insert(max); } - - bar.inc(1); } }); - this_level.into_inner().into_iter().collect() + AllUniques { + uniques: this_level.into_inner().into_iter(), + n: next_n, + is_canonical: false, + } } } diff --git a/rust/src/pcube/mod.rs b/rust/src/pcube/mod.rs index 1d73b18..8dc6175 100644 --- a/rust/src/pcube/mod.rs +++ b/rust/src/pcube/mod.rs @@ -3,6 +3,7 @@ use std::{ fs::File, io::{ErrorKind, Read, Seek, Write}, + iter::Peekable, path::Path, }; @@ -13,6 +14,10 @@ mod compression; pub use compression::Compression; use compression::{Reader, Writer}; +use crate::iterator::{ + AllPolycubeIterator, AllUniquePolycubeIterator, PolycubeIterator, UniquePolycubeIterator, +}; + const MAGIC: [u8; 4] = [0xCB, 0xEC, 0xCB, 0xEC]; /// A pcube file. @@ -22,7 +27,6 @@ pub struct PCubeFile where T: Read, { - had_error: bool, input: Reader, len: Option, cubes_read: usize, @@ -37,39 +41,14 @@ where fn size_hint(&self) -> (usize, Option) { if let Some(len) = self.len { - (0, Some(len)) + (len, Some(len)) } else { (0, None) } } fn next(&mut self) -> Option { - if self.had_error { - return None; - } - - let next_cube = RawPCube::unpack(&mut self.input); - - let next_cube = match (next_cube, self.len) { - (Err(_), None) => return None, - (Err(e), Some(expected)) => { - if expected == self.cubes_read { - return None; - } else { - self.had_error = true; - let msg = format!( - "Expected {expected} cubes, but failed to read after {} cubes. Error: {e}", - self.cubes_read - ); - return Some(Err(std::io::Error::new(ErrorKind::InvalidData, msg))); - } - } - (Ok(c), _) => c, - }; - - self.cubes_read += 1; - - Some(Ok(next_cube)) + self.next() } } @@ -155,9 +134,40 @@ where len, cubes_read: 0, cubes_are_canonical: canonicalized, - had_error: false, }) } + + pub fn next(&mut self) -> Option> { + let next_cube = RawPCube::unpack(&mut self.input); + + match (next_cube, self.len) { + (Ok(c), _) => { + self.cubes_read += 1; + Some(Ok(c)) + } + (Err(_), None) => return None, + (Err(e), Some(expected)) => { + if expected == self.cubes_read { + return None; + } else { + let msg = format!( + "Expected {expected} cubes, but failed to read after {} cubes. Error: {e}", + self.cubes_read + ); + return Some(Err(std::io::Error::new(ErrorKind::InvalidData, msg))); + } + } + } + } + + pub fn into_iter(self) -> impl PolycubeIterator { + IgnoreErrorIter::new(self) + } + + /// This is by no means guaranteed, but makes life a bit easier + pub fn assume_all_unique(self) -> AllUnique { + AllUnique::new(self) + } } impl PCubeFile { @@ -269,3 +279,131 @@ impl PCubeFile { Ok(()) } } + +struct IgnoreErrorIter +where + T: Read, +{ + inner: PCubeFile, +} + +impl IgnoreErrorIter +where + T: Read, +{ + pub fn new(inner: PCubeFile) -> Self { + Self { inner } + } +} + +impl Iterator for IgnoreErrorIter +where + T: Read, +{ + type Item = RawPCube; + + fn next(&mut self) -> Option { + self.inner.next().map(|v| v.ok()).flatten() + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +impl PolycubeIterator for IgnoreErrorIter +where + T: Read, +{ + fn is_canonical(&self) -> bool { + self.inner.canonical() + } + + fn n_hint(&self) -> Option { + None + } +} + +pub struct AllUnique +where + T: Read, +{ + n: usize, + canonical: bool, + inner: Peekable>, +} + +impl AllUnique +where + T: Read, +{ + pub fn new(inner: PCubeFile) -> Self { + let canonical = inner.canonical(); + let mut peekable = IgnoreErrorIter::new(inner).peekable(); + + let n = if let Some(peek) = peekable.peek() { + let mut n = 0; + let (x, y, z) = peek.dims(); + for x in 0..x { + for y in 0..y { + for z in 0..z { + if peek.get(x, y, z) { + n += 1; + } + } + } + } + + n + } else { + 0 + }; + + Self { + n, + canonical, + inner: peekable, + } + } +} + +impl Iterator for AllUnique +where + T: Read, +{ + type Item = RawPCube; + + fn next(&mut self) -> Option { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +impl PolycubeIterator for AllUnique +where + T: Read, +{ + fn is_canonical(&self) -> bool { + self.canonical + } + + fn n_hint(&self) -> Option { + Some(self.n()) + } +} + +impl UniquePolycubeIterator for AllUnique where T: Read {} + +impl AllPolycubeIterator for AllUnique +where + T: Read, +{ + fn n(&self) -> usize { + self.n + } +} + +impl AllUniquePolycubeIterator for AllUnique where T: Read {} diff --git a/rust/src/pcube/raw_pcube.rs b/rust/src/pcube/raw_pcube.rs index 65f8863..768fa7c 100644 --- a/rust/src/pcube/raw_pcube.rs +++ b/rust/src/pcube/raw_pcube.rs @@ -33,10 +33,14 @@ impl RawPCube { (self.dim_1, self.dim_2, self.dim_3) } - pub fn data(&self) -> &[u8] { + pub fn data(&self) -> &Vec { &self.data } + pub fn into_data(self) -> Vec { + self.data + } + pub fn new(dim_1: u8, dim_2: u8, dim_3: u8, data: Vec) -> Option { let len = (dim_1 as usize) * (dim_2 as usize) * (dim_3 as usize); let byte_len = (len + 7) / 8;