Skip to content

Commit

Permalink
make clippy happy
Browse files Browse the repository at this point in the history
  • Loading branch information
Rob Patro committed Jan 12, 2023
1 parent 7dd7324 commit 215a688
Show file tree
Hide file tree
Showing 8 changed files with 26 additions and 26 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- name: Checkout repository
uses: actions/checkout@v2.6.0
uses: actions/checkout@v3

- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
Expand All @@ -36,7 +36,7 @@ jobs:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2.6.0
- uses: actions/checkout@v3
- name: Build
run: cargo build --verbose
- name: Run tests
Expand All @@ -50,7 +50,7 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- name: Checkout repository
uses: actions/checkout@v2.6.0
uses: actions/checkout@v3

- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
Expand All @@ -70,7 +70,7 @@ jobs:
os: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2.6.0
- uses: actions/checkout@v3
- name: Install
run: cargo build --release
- name: sample run
Expand Down
14 changes: 7 additions & 7 deletions src/cellfilter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ fn process_unfiltered(
gpl_opts: &GenPermitListOpts,
) -> anyhow::Result<u64> {
let parent = std::path::Path::new(output_dir);
std::fs::create_dir_all(&parent)
std::fs::create_dir_all(parent)
.with_context(|| format!("couldn't create directory path {}", parent.display()))?;

// the smallest number of reads we'll allow per barcode
Expand Down Expand Up @@ -374,7 +374,7 @@ fn process_unfiltered(
);

let parent = std::path::Path::new(output_dir);
std::fs::create_dir_all(&parent).with_context(|| {
std::fs::create_dir_all(parent).with_context(|| {
format!(
"couldn't create path to output directory {}",
parent.display()
Expand Down Expand Up @@ -409,7 +409,7 @@ fn process_unfiltered(

let pm_path = parent.join("permit_map.bin");
let pm_file =
std::fs::File::create(&pm_path).context("could not create serialization file.")?;
std::fs::File::create(pm_path).context("could not create serialization file.")?;
let mut pm_writer = BufWriter::new(&pm_file);
bincode::serialize_into(&mut pm_writer, &hm)
.context("couldn't serialize permit list mapping.")?;
Expand All @@ -425,7 +425,7 @@ fn process_unfiltered(
});

let m_path = parent.join("generate_permit_list.json");
let mut m_file = std::fs::File::create(&m_path).context("could not create metadata file.")?;
let mut m_file = std::fs::File::create(m_path).context("could not create metadata file.")?;

let meta_info_string =
serde_json::to_string_pretty(&meta_info).context("could not format json.")?;
Expand Down Expand Up @@ -525,7 +525,7 @@ fn process_filtered(
}

let parent = std::path::Path::new(output_dir);
std::fs::create_dir_all(&parent).with_context(|| {
std::fs::create_dir_all(parent).with_context(|| {
format!(
"failed to create path to output location {}",
parent.display()
Expand All @@ -550,7 +550,7 @@ fn process_filtered(
};

let s_path = parent.join("permit_map.bin");
let s_file = std::fs::File::create(&s_path).context("could not create serialization file.")?;
let s_file = std::fs::File::create(s_path).context("could not create serialization file.")?;
let mut s_writer = BufWriter::new(&s_file);
bincode::serialize_into(&mut s_writer, &full_permit_list)
.context("couldn't serialize permit list.")?;
Expand All @@ -566,7 +566,7 @@ fn process_filtered(
});

let m_path = parent.join("generate_permit_list.json");
let mut m_file = std::fs::File::create(&m_path).context("could not create metadata file.")?;
let mut m_file = std::fs::File::create(m_path).context("could not create metadata file.")?;

let meta_info_string =
serde_json::to_string_pretty(&meta_info).context("could not format json.")?;
Expand Down
4 changes: 2 additions & 2 deletions src/collate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ fn correct_unmapped_counts(
unmapped_file: &std::path::Path,
parent: &std::path::Path,
) {
let i_file = File::open(&unmapped_file).unwrap();
let i_file = File::open(unmapped_file).unwrap();
let mut br = BufReader::new(i_file);

// enough to hold a key value pair (a u64 key and u32 value)
Expand All @@ -244,7 +244,7 @@ fn correct_unmapped_counts(
}

let s_path = parent.join("unmapped_bc_count_collated.bin");
let s_file = std::fs::File::create(&s_path).expect("could not create serialization file.");
let s_file = std::fs::File::create(s_path).expect("could not create serialization file.");
let mut s_writer = BufWriter::new(&s_file);
bincode::serialize_into(&mut s_writer, &unmapped_count)
.expect("couldn't serialize corrected unmapped bc count.");
Expand Down
2 changes: 1 addition & 1 deletion src/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ where
{
let oname = Path::new(rad_file.as_ref());
let parent = oname.parent().unwrap();
std::fs::create_dir_all(&parent).unwrap();
std::fs::create_dir_all(parent).unwrap();

if oname.exists() {
std::fs::remove_file(oname).expect("could not be deleted");
Expand Down
10 changes: 5 additions & 5 deletions src/em.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ pub(crate) fn em_update_subset(
}
}
} else {
let tidx = labels.get(0).expect("can't extract labels");
let tidx = labels.first().expect("can't extract labels");
alphas_out[*tidx as usize] += *count as f32;
}
}
Expand Down Expand Up @@ -167,7 +167,7 @@ pub(crate) fn em_update_subset_usa(
}
}
} else {
let tidx = labels.get(0).expect("can't extract labels");
let tidx = labels.first().expect("can't extract labels");
alphas_out[*tidx as usize] += *count as f32;
}
}
Expand All @@ -192,7 +192,7 @@ pub fn em_optimize_subset(
for (i, count) in cell_data {
let labels = eqclasses.refs_for_eqc(*i);
if labels.len() == 1 {
let idx = labels.get(0).expect("can't extract labels");
let idx = labels.first().expect("can't extract labels");
alphas_in[*idx as usize] += *count as f32;
unique_evidence[*idx as usize] = true;
} else {
Expand Down Expand Up @@ -319,7 +319,7 @@ pub fn em_update(
}
}
} else {
let tidx = labels.get(0).expect("can't extract labels");
let tidx = labels.first().expect("can't extract labels");
alphas_out[*tidx as usize] += *count as f32;
}
}
Expand All @@ -339,7 +339,7 @@ pub fn em_optimize(

for (labels, count) in eqclasses {
if labels.len() == 1 {
let idx = labels.get(0).expect("can't extract labels");
let idx = labels.first().expect("can't extract labels");
alphas_in[*idx as usize] += *count as f32;
unique_evidence[*idx as usize] = true;
} else {
Expand Down
6 changes: 3 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ fn main() -> anyhow::Result<()> {
.value_parser(pathbuf_file_exists_validator)
)
.group(ArgGroup::new("filter-method")
.args(&["knee-distance", "expect-cells", "force-cells", "valid-bc", "unfiltered-pl"])
.args(["knee-distance", "expect-cells", "force-cells", "valid-bc", "unfiltered-pl"])
.required(true)
)
.arg(
Expand All @@ -134,7 +134,7 @@ fn main() -> anyhow::Result<()> {
.arg(arg!(-r --"rad-dir" <RADFILE> "the directory containing the RAD file to be collated")
.required(true)
.value_parser(pathbuf_directory_exists_validator))
.arg(arg!(-t --threads <THREADS> "number of threads to use for processing").value_parser(value_parser!(u32)).default_value(max_num_collate_threads.clone()))
.arg(arg!(-t --threads <THREADS> "number of threads to use for processing").value_parser(value_parser!(u32)).default_value(max_num_collate_threads))
.arg(arg!(-c --compress "compress the output collated RAD file"))
.arg(arg!(-m --"max-records" <MAXRECORDS> "the maximum number of read records to keep in memory at once")
.value_parser(value_parser!(u32))
Expand Down Expand Up @@ -204,7 +204,7 @@ fn main() -> anyhow::Result<()> {
.required(true)
.value_parser(pathbuf_file_exists_validator))
.arg(arg!(-o --"output-dir" <OUTPUTDIR> "output directory where quantification results will be written").required(true).value_parser(value_parser!(PathBuf)))
.arg(arg!(-t --threads <THREADS> "number of threads to use for processing").value_parser(value_parser!(u32)).default_value(max_num_threads.clone()))
.arg(arg!(-t --threads <THREADS> "number of threads to use for processing").value_parser(value_parser!(u32)).default_value(max_num_threads))
.arg(arg!(--usa "flag specifying that input equivalence classes were computed in USA mode"))
.arg(arg!(--"quant-subset" <SFILE> "file containing list of barcodes to quantify, those not in this list will be ignored").value_parser(pathbuf_file_exists_validator))
.arg(arg!(--"use-mtx" "flag for writing output matrix in matrix market format (default)"))
Expand Down
4 changes: 2 additions & 2 deletions src/quant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ use libradicl::rad_types;

type BufferedGzFile = BufWriter<GzEncoder<fs::File>>;

#[derive(PartialEq, Debug, Clone, Copy, Serialize)]
#[derive(PartialEq, Eq, Debug, Clone, Copy, Serialize)]
pub enum SplicedAmbiguityModel {
PreferAmbiguity,
WinnerTakeAll,
Expand All @@ -71,7 +71,7 @@ impl FromStr for SplicedAmbiguityModel {
}
}

#[derive(PartialEq, Debug, Clone, Copy, Serialize)]
#[derive(PartialEq, Eq, Debug, Clone, Copy, Serialize)]
pub enum ResolutionStrategy {
Trivial,
CellRangerLike,
Expand Down
4 changes: 2 additions & 2 deletions src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ pub fn write_permit_list_freq(
bclen: u16,
permit_freq_map: &HashMap<u64, u64, ahash::RandomState>,
) -> Result<(), Box<dyn std::error::Error>> {
let output = std::fs::File::create(&o_path)?;
let output = std::fs::File::create(o_path)?;
let mut writer = BufWriter::new(&output);

{
Expand Down Expand Up @@ -731,7 +731,7 @@ pub fn is_velo_mode(input_dir: &PathBuf) -> bool {
}

#[allow(dead_code)]
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Eq)]
pub struct InternalVersionInfo {
pub major: u32,
pub minor: u32,
Expand Down

0 comments on commit 215a688

Please sign in to comment.