Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
7d03cdf
Add iterator traits
datdenkikniet Jul 23, 2023
4cb5cee
Update docs & fix iterators
datdenkikniet Jul 23, 2023
704ee4f
Add AllUnique iterator for pcube file
datdenkikniet Jul 23, 2023
041dc10
Completely trait-ified expansion for NaivePolyCube
datdenkikniet Jul 23, 2023
963fa8e
Also make rayon trait based
datdenkikniet Jul 23, 2023
3d017ce
Split it up a bit
datdenkikniet Jul 23, 2023
e3feebd
Yah yah
datdenkikniet Jul 23, 2023
40ac94e
Only crop once
datdenkikniet Jul 23, 2023
3f30a3a
Fixup rayon impl
datdenkikniet Jul 24, 2023
923483d
Remove crossbeam-channel
datdenkikniet Jul 24, 2023
7fbc137
Merge branch 'main' into traits
datdenkikniet Jul 24, 2023
ed49b1f
More iterators!
datdenkikniet Jul 24, 2023
5489dc8
Loop so we don't have to copy everything that last time
datdenkikniet Jul 24, 2023
0aaeb40
Just friggin par_bridge...
datdenkikniet Jul 24, 2023
653f0e9
Fewer bounds plz
datdenkikniet Jul 24, 2023
886d6be
Fix
datdenkikniet Jul 24, 2023
f88efdf
De-generify this a little
datdenkikniet Jul 24, 2023
5a4fc9a
save_to_cache
datdenkikniet Jul 24, 2023
45d3485
Fix unknown bar
datdenkikniet Jul 24, 2023
56ff9c8
Fix this a bit, for now
datdenkikniet Jul 24, 2023
7fe64c8
Let's divy this up a bit more nicely
datdenkikniet Jul 24, 2023
4993b57
Fixup for N <= 2
datdenkikniet Jul 24, 2023
53e03a7
Fix behavior for cacheless
datdenkikniet Jul 24, 2023
d2bbd73
Not sure where this came from
datdenkikniet Jul 25, 2023
4856d7e
Loosen this trait bound
datdenkikniet Jul 25, 2023
882718f
hashless: move into `impl` block
datdenkikniet Jul 25, 2023
c428b40
Rustify expand_ys a bit
datdenkikniet Jul 25, 2023
aa05067
Macros, anyone?
datdenkikniet Jul 25, 2023
94471eb
Using MatrixCol::*
datdenkikniet Jul 25, 2023
869be62
Move enumerate_hashless into `enumerate`
datdenkikniet Jul 25, 2023
1f3f966
Make sure hashless operates on canonical items
datdenkikniet Jul 25, 2023
118c3ce
Cleanup
datdenkikniet Jul 25, 2023
be8af4b
Cleanup some more
datdenkikniet Jul 25, 2023
ef6b460
Add common finish_bar
datdenkikniet Jul 25, 2023
f5c744d
Forward size hint
datdenkikniet Jul 25, 2023
d677c6b
Remove remaining when finished
datdenkikniet Jul 25, 2023
94fa435
Update docs
datdenkikniet Jul 26, 2023
d6c8fbc
Just go with MapStore for now
datdenkikniet Jul 26, 2023
9ea5276
Clarify min_mem
datdenkikniet Jul 26, 2023
b9be0ec
Perform expansions as iterator
datdenkikniet Jul 26, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 14 additions & 14 deletions rust/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion rust/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,4 @@ hashbrown = { version = "0.14", features = ["rayon"] }

[[bin]]
name = "opencubes"
path = "./src/cli.rs"
path = "./src/cli/cli.rs"
260 changes: 37 additions & 223 deletions rust/src/cli.rs → rust/src/cli/cli.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,46 @@
use std::{
collections::{BTreeMap, HashSet},
io::ErrorKind,
path::PathBuf,
time::{Duration, Instant},
};

use clap::{Args, Parser, Subcommand, ValueEnum};
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use opencubes::{
hashless,
naive_polycube::NaivePolyCube,
pcube::{PCubeFile, RawPCube},
pointlist, rotation_reduced,
};
use opencubes::{naive_polycube::NaivePolyCube, pcube::PCubeFile};
use rayon::prelude::{IntoParallelIterator, ParallelIterator};

mod enumerate;
use enumerate::enumerate;

fn finish_bar(bar: &ProgressBar, duration: Duration, expansions: usize, n: usize) {
let time = duration.as_micros();
let secs = time / 1_000_000;
let micros = time % 1_000_000;

if let Some(len) = bar.length() {
let pos_width = format!("{}", len).len();

let template = format!(
"[{{elapsed_precise}}] {{bar:40.cyan/blue}} {{pos:>{pos_width}}}/{{len}} {{msg}}"
);

bar.set_style(
ProgressStyle::with_template(&template)
.unwrap()
.progress_chars("#>-"),
);
}

bar.finish_with_message(format!(
"Done! Found {expansions} expansions (N = {n}) in {secs}.{micros} s"
));
}

fn unknown_bar() -> ProgressBar {
let style = ProgressStyle::with_template("[{elapsed_precise}] [{spinner:10.cyan/blue}] {msg}")
.unwrap()
.tick_strings(&[
">---------",
"=>--------",
"<=>-------",
"-<=>------",
Expand All @@ -29,19 +51,23 @@ fn unknown_bar() -> ProgressBar {
"------<=>-",
"-------<=>",
"--------<=",
"---------<",
"--------<=",
"-------<=>",
"------<=>-",
"----<=>---",
"-----<=>--",
"---<=>----",
"--<=>-----",
"-<=>------",
"<=>-------",
"=>--------",
"----------",
]);

ProgressBar::new(100).with_style(style)
let bar = ProgressBar::new(100).with_style(style);

bar.enable_steady_tick(Duration::from_millis(66));

bar
}

pub fn make_bar(len: u64) -> indicatif::ProgressBar {
Expand All @@ -50,7 +76,7 @@ pub fn make_bar(len: u64) -> indicatif::ProgressBar {
let pos_width = format!("{len}").len();

let template =
format!("[{{elapsed_precise}}] {{bar:40.cyan/blue}} {{pos:>{pos_width}}}/{{len}} remaining: [{{eta_precise}}] {{msg}}");
format!("[{{elapsed_precise}}] {{bar:40.cyan/blue}} {{pos:>{pos_width}}}/{{len}} {{msg}} remaining: [{{eta_precise}}]");

bar.set_style(
ProgressStyle::with_template(&template)
Expand Down Expand Up @@ -280,218 +306,6 @@ pub fn validate(opts: &ValidateArgs) -> std::io::Result<()> {
Ok(())
}

fn load_cache_file(n: usize) -> Option<PCubeFile> {
let name = format!("cubes_{n}.pcube");

match PCubeFile::new_file(&name) {
Ok(file) => Some(file),
Err(e) => {
if e.kind() == ErrorKind::InvalidData || e.kind() == ErrorKind::Other {
println!("Enountered invalid cache file {name}. Error: {e}.");
} else {
println!("Could not load cache file '{name}'. Error: {e}");
}
None
}
}
}

/// load closes cache file to n into a vec
/// returns a vec and the next order above the found cache file
fn load_cache(n: usize) -> (Vec<RawPCube>, usize) {
let calculate_from = 2;

for n in (calculate_from..n).rev() {
let name = format!("cubes_{n}.pcube");
let cache = if let Some(file) = load_cache_file(n) {
file
} else {
continue;
};

println!("Found cache for N = {n}. Loading data...");

if !cache.canonical() {
println!("Cached cubes are not canonical. Canonicalizing...")
}

let len = cache.len();

let mut error = None;
let mut total_loaded = 0;

let filter = |value| {
total_loaded += 1;
match value {
Ok(v) => Some(v),
Err(e) => {
error = Some(e);
None
}
}
};

let cached: HashSet<_> = cache.filter_map(filter).collect();

if let Some(e) = error {
println!("Error occured while loading {name}. Error: {e}");
} else {
let total_len = len.unwrap_or(total_loaded);

if total_len != cached.len() {
println!("There were non-unique cubes in the cache file. Continuing...")
}

return (cached.into_iter().collect(), n + 1);
}
}

println!("no cache file found reverting to start building from n=1");
let mut base = RawPCube::new_empty(1, 1, 1);
base.set(0, 0, 0, true);

let current = [base.clone()].to_vec();
//calculate from 2 because 1 is in the vec
(current, 2)
}

fn unique_expansions<F>(
mut expansion_fn: F,
use_cache: bool,
n: usize,
compression: Compression,
current: Vec<RawPCube>,
calculate_from: usize,
bar: &ProgressBar,
) -> Vec<NaivePolyCube>
where
F: FnMut(&ProgressBar, std::slice::Iter<'_, NaivePolyCube>) -> Vec<NaivePolyCube>,
{
if n == 0 {
return Vec::new();
}

let mut current = current
.into_iter()
.map(NaivePolyCube::from)
.map(|v| v.canonical_form())
.collect::<Vec<_>>();

for i in calculate_from..=n {
bar.set_length(current.len() as u64);
bar.set_message(format!("base polycubes expanded for N = {i}..."));

let start = Instant::now();

let next = expansion_fn(&bar, current.iter());

bar.set_message(format!(
"Found {} unique expansions (N = {i}) in {} ms.",
next.len(),
start.elapsed().as_millis(),
));

bar.finish();

if use_cache {
let name = &format!("cubes_{i}.pcube");
if !std::fs::File::open(name).is_ok() {
println!("Saving {} cubes to cache file", next.len());
PCubeFile::write_file(false, compression.into(), next.iter().map(Into::into), name)
.unwrap();
} else {
println!("Cache file already exists for N = {i}. Not overwriting.");
}
}

current = next;
}

current
}

pub fn enumerate(opts: &EnumerateOpts) {
let n = opts.n;
let cache = !opts.no_cache;

if n < 2 {
println!("n < 2 unsuported");
return;
}

let start = Instant::now();

let (seed_list, startn) = if cache {
load_cache(n)
} else {
let mut base = RawPCube::new_empty(1, 1, 1);
base.set(0, 0, 0, true);

let current = [base].to_vec();
//calculate from 2 because 1 is in the vec
(current, 2)
};
let bar = make_bar(seed_list.len() as u64);

//Select enumeration function to run
let cubes_len = match (opts.mode, opts.no_parallelism) {
(EnumerationMode::Standard, true) => {
let cubes = unique_expansions(
|bar, current: std::slice::Iter<'_, NaivePolyCube>| {
NaivePolyCube::unique_expansions(bar, current)
},
cache,
n,
opts.cache_compression,
seed_list,
startn,
&bar,
);
cubes.len()
}
(EnumerationMode::Standard, false) => {
let cubes = unique_expansions(
|bar, current: std::slice::Iter<'_, NaivePolyCube>| {
NaivePolyCube::unique_expansions_rayon(bar, current)
},
cache,
n,
opts.cache_compression,
seed_list,
startn,
&bar,
);
cubes.len()
}
(EnumerationMode::RotationReduced, para) => {
if n > 16 {
println!("n > 16 not supported for rotation reduced");
return;
}
if !para {
println!("no parallel implementation for rotation-reduced, running single threaded")
}
rotation_reduced::gen_polycubes(n, &bar)
}
(EnumerationMode::PointList, para) => {
if n > 16 {
println!("n > 16 not supported for point-list");
return;
}
let cubes = pointlist::gen_polycubes(n, cache, !para, seed_list, startn, &bar);
cubes.len()
}
(EnumerationMode::Hashless, para) => {
hashless::gen_polycubes(n, !para, seed_list, startn, &bar)
}
};

let duration = start.elapsed();

println!("Unique polycubes found for N = {n}: {cubes_len}.",);
println!("Duration: {} ms", duration.as_millis());
}

pub fn convert(opts: &ConvertArgs) {
if opts.output_path.is_some() && opts.path.len() > 1 {
println!("Cannot convert more than 1 file when output path is provided");
Expand Down
Loading