Skip to content

Commit

Permalink
make clippy happy
Browse files Browse the repository at this point in the history
  • Loading branch information
Rob Patro committed Jan 12, 2023
1 parent 09ffd62 commit 301e5e2
Show file tree
Hide file tree
Showing 7 changed files with 24 additions and 25 deletions.
12 changes: 6 additions & 6 deletions src/collate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ fn correct_unmapped_counts(
.expect("couldn't serialize corrected unmapped bc count.");
}

#[allow(clippy::too_many_arguments)]
#[allow(clippy::too_many_arguments, clippy::manual_clamp)]
pub fn collate_with_temp<P1, P2>(
input_dir: P1,
rad_dir: P2,
Expand Down Expand Up @@ -331,7 +331,7 @@ where

let cm_path = parent.join("collate.json");
let mut cm_file =
std::fs::File::create(&cm_path).context("could not create metadata file.")?;
std::fs::File::create(cm_path).context("could not create metadata file.")?;

let cm_info_string =
serde_json::to_string_pretty(&collate_meta).context("could not format json.")?;
Expand Down Expand Up @@ -536,7 +536,7 @@ where
let min_rec_len = 24usize; // smallest size an individual record can be loaded in memory
let max_rec = max_records as usize;
let num_buckets = temp_buckets.len();
let num_threads = n_workers as usize;
let num_threads = n_workers;
let loc_buffer_size = (min_rec_len + (most_ambig_record * 4_usize) - 4_usize).max(
(1000_usize.max((min_rec_len * max_rec) / (num_buckets * num_threads))).min(262_144_usize),
); //131072_usize);
Expand Down Expand Up @@ -682,7 +682,7 @@ where
let observed = temp_bucket.2.num_records_written.load(Ordering::SeqCst);
assert_eq!(expected, observed);

let md = std::fs::metadata(parent.join(&format!("bucket_{}.tmp", i)))?;
let md = std::fs::metadata(parent.join(format!("bucket_{}.tmp", i)))?;
let expected_bytes = temp_bucket.2.num_bytes_written.load(Ordering::SeqCst);
let observed_bytes = md.len();
assert_eq!(expected_bytes, observed_bytes);
Expand All @@ -691,7 +691,7 @@ where
//std::process::exit(1);

// to hold the temp buckets threads will process
let slack = ((n_workers / 2) as usize).max(1_usize);
let slack = (n_workers / 2).max(1_usize);
let temp_bucket_queue_size = slack + n_workers;
let fq = Arc::new(ArrayQueue::<(
u32,
Expand Down Expand Up @@ -740,7 +740,7 @@ where
buckets_remaining.fetch_sub(1, Ordering::SeqCst);
cmap.clear();

let fname = parent.join(&format!("bucket_{}.tmp", temp_bucket.2.bucket_id));
let fname = parent.join(format!("bucket_{}.tmp", temp_bucket.2.bucket_id));
// create a new handle for reading
let tfile = std::fs::File::open(&fname).expect("couldn't open temporary file.");
let mut treader = BufReader::new(tfile);
Expand Down
2 changes: 1 addition & 1 deletion src/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ where
let expected_bar_length = bam_bytes / ((buf_limit as u64) * 24);
// let expected_bar_length = 50u64 ;// bam_bytes / ((buf_limit as u64) * 24);

let pbar_inner = ProgressBar::new(expected_bar_length as u64);
let pbar_inner = ProgressBar::new(expected_bar_length);
pbar_inner.set_style(sty);
pbar_inner.tick();

Expand Down
6 changes: 3 additions & 3 deletions src/em.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub enum EmInitType {

#[allow(dead_code)]
fn mean(data: &[f64]) -> Option<f64> {
let sum = data.iter().sum::<f64>() as f64;
let sum = data.iter().sum::<f64>();
let count = data.len();

match count {
Expand All @@ -57,7 +57,7 @@ fn std_deviation(data: &[f64]) -> Option<f64> {
let variance = data
.iter()
.map(|value| {
let diff = data_mean - (*value as f64);
let diff = data_mean - *value;

diff * diff
})
Expand Down Expand Up @@ -389,7 +389,7 @@ pub fn em_optimize(

max_rel_diff = match rel_diff > max_rel_diff as f32 {
true => rel_diff as f64,
false => max_rel_diff as f64,
false => max_rel_diff,
};

if rel_diff > REL_DIFF_TOLERANCE {
Expand Down
6 changes: 3 additions & 3 deletions src/infer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ pub fn infer(
let num_genes = global_eq_classes.num_genes;

let usa_offsets = if usa_mode {
Some(((num_genes / 3) as usize, (2 * num_genes / 3) as usize))
Some((num_genes / 3, (2 * num_genes / 3)))
} else {
None
};
Expand Down Expand Up @@ -277,7 +277,7 @@ pub fn infer(

// fill out the triplet matrix in memory
for (ind, val) in expressed_ind.iter().zip(expressed_vec.iter()) {
writer.add_triplet(row_index as usize, *ind, *val);
writer.add_triplet(row_index, *ind, *val);
}
/*
writeln!(
Expand Down Expand Up @@ -391,7 +391,7 @@ pub fn infer(
let output_matrix_path = output_path.join("quants_mat.mtx");
let writer_deref = trimat.lock();
let writer = &*writer_deref.unwrap();
sprs::io::write_matrix_market(&output_matrix_path, writer)?;
sprs::io::write_matrix_market(output_matrix_path, writer)?;

Ok(())
}
1 change: 1 addition & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ fn gen_random_kmer(k: usize) -> String {
s
}

#[allow(clippy::manual_clamp)]
fn main() -> anyhow::Result<()> {
let num_hardware_threads = num_cpus::get() as u32;
let max_num_threads: String = (num_cpus::get() as u32).to_string();
Expand Down
2 changes: 1 addition & 1 deletion src/pugutils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ pub fn extract_graph(

// recall that we processed this eq class as a neighbor of eqid
hset[*eq2id as usize] = 1;
idxvec.push(*eq2id as u32);
idxvec.push(*eq2id);
let eq2 = &eqmap.eqc_info[*eq2id as usize];

// compare all the umis between eqid and eq2id
Expand Down
20 changes: 9 additions & 11 deletions src/quant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ fn write_eqc_counts(

// and write it to file.
let mtx_path = output_path.join("geqc_counts.mtx");
sprs::io::write_matrix_market(&mtx_path, &eqmat).context("could not write geqc_counts.mtx")?;
sprs::io::write_matrix_market(mtx_path, &eqmat).context("could not write geqc_counts.mtx")?;

// write the sets of genes that define each eqc
let gn_eq_path = output_path.join("gene_eqclass.txt.gz");
Expand Down Expand Up @@ -248,7 +248,7 @@ fn write_eqc_counts(
// offset for unspliced gene ids
let unspliced_offset = (num_genes / 3) as u32;
// offset for ambiguous gene ids
let ambig_offset = (2 * unspliced_offset) as u32;
let ambig_offset = 2 * unspliced_offset;
// to hold the gene labels as we write them.
let mut gl;

Expand Down Expand Up @@ -603,13 +603,12 @@ pub fn do_quantify<T: Read>(mut br: T, quant_opts: QuantOpts) -> anyhow::Result<
};

let usa_offsets = if usa_mode {
Some(((num_rows / 3) as usize, (2 * num_rows / 3) as usize))
Some((num_rows / 3, (2 * num_rows / 3)))
} else {
None
};

let trimat =
sprs::TriMatI::<f32, u32>::with_capacity((num_cells as usize, num_rows as usize), tmcap);
let trimat = sprs::TriMatI::<f32, u32>::with_capacity((num_cells as usize, num_rows), tmcap);

let bc_writer = Arc::new(Mutex::new(QuantOutputInfo {
barcode_file: BufWriter::new(bc_file),
Expand Down Expand Up @@ -676,11 +675,11 @@ pub fn do_quantify<T: Read>(mut br: T, quant_opts: QuantOpts) -> anyhow::Result<
EqMapType::GeneLevel => {
// get the max spliced gene ID and add 1 to get the unspliced ID
// and another 1 to get the size.
(gene_name_to_id
gene_name_to_id
.values()
.max()
.expect("gene name to id map should not be empty.")
+ 2) as u32
+ 2
}
};

Expand Down Expand Up @@ -870,8 +869,7 @@ pub fn do_quantify<T: Read>(mut br: T, quant_opts: QuantOpts) -> anyhow::Result<
let g = pugutils::extract_graph(&eq_map, pug_exact_umi, &log);
// for the PUG resolution algorithm, set the hasher
// that will be used based on the cell barcode.
let s =
ahash::RandomState::with_seeds(bc as u64, 7u64, 1u64, 8u64);
let s = ahash::RandomState::with_seeds(bc, 7u64, 1u64, 8u64);
let pug_stats = pugutils::get_num_molecules(
&g,
&eq_map,
Expand Down Expand Up @@ -1129,7 +1127,7 @@ pub fn do_quantify<T: Read>(mut br: T, quant_opts: QuantOpts) -> anyhow::Result<
} else {
// fill out the triplet matrix in memory
for (ind, val) in expressed_ind.iter().zip(expressed_vec.iter()) {
writer.trimat.add_triplet(row_index as usize, *ind, *val);
writer.trimat.add_triplet(row_index, *ind, *val);
}
}
writeln!(
Expand Down Expand Up @@ -1266,7 +1264,7 @@ pub fn do_quantify<T: Read>(mut br: T, quant_opts: QuantOpts) -> anyhow::Result<
// now remove it
fs::remove_file(&mat_path)?;
let mtx_path = output_matrix_path.join("quants_mat.mtx");
sprs::io::write_matrix_market(&mtx_path, &writer.trimat)?;
sprs::io::write_matrix_market(mtx_path, &writer.trimat)?;
}

let pb_msg = format!(
Expand Down

0 comments on commit 301e5e2

Please sign in to comment.