Skip to content

Commit

Permalink
fix: clippy linting
Browse files Browse the repository at this point in the history
  • Loading branch information
chrisdickinson committed Dec 4, 2019
1 parent 8084867 commit b971714
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 43 deletions.
12 changes: 5 additions & 7 deletions src/bin/eos.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
#![feature(async_closure)]
use anyhow::{self, bail};
use async_std::fs;
use async_std::prelude::*;
use colored::Colorize;
use digest::Digest;
use entropic_object_store::object::Object;
use entropic_object_store::stores::loose::LooseStore;
use entropic_object_store::stores::packed::PackedStore;
use entropic_object_store::stores::{ReadableStore, WritableStore};
use futures::future::join_all;
use futures::prelude::*;
use sha2::Sha256;
use std::path::{Path, PathBuf};
use std::path::PathBuf;
use std::str::FromStr;
use structopt::StructOpt;

Expand Down Expand Up @@ -60,7 +58,7 @@ async fn load_file<D: Digest + Send + Sync, S: WritableStore<D>>(
file: PathBuf,
) -> anyhow::Result<String> {
match fs::read(&file).await {
Err(e) => Ok(format!(
Err(_) => Ok(format!(
"{} failed to read {:?}",
"ERR:".black().on_red(),
file
Expand Down Expand Up @@ -92,7 +90,7 @@ async fn load_file<D: Digest + Send + Sync, S: WritableStore<D>>(

async fn cmd_add<D: Digest + Send + Sync, S: WritableStore<D>>(
store: S,
files: &Vec<PathBuf>,
files: &[PathBuf],
) -> anyhow::Result<()> {
let mut results = Vec::new();
for file in files.iter().filter_map(|file| file.canonicalize().ok()) {
Expand All @@ -105,7 +103,7 @@ async fn cmd_add<D: Digest + Send + Sync, S: WritableStore<D>>(
Ok(())
}

async fn cmd_get<S: ReadableStore>(store: S, hashes: &Vec<String>) -> anyhow::Result<()> {
async fn cmd_get<S: ReadableStore>(store: S, hashes: &[String]) -> anyhow::Result<()> {
let cksize = Sha256::new().result().len();
let valid_hashes: Vec<_> = hashes
.iter()
Expand All @@ -117,7 +115,7 @@ async fn cmd_get<S: ReadableStore>(store: S, hashes: &Vec<String>) -> anyhow::Re
Some(decoded)
})
.collect();
let cleaned_hashes: Vec<_> = valid_hashes.iter().map(|xs| hex::encode(xs)).collect();
let cleaned_hashes: Vec<_> = valid_hashes.iter().map(hex::encode).collect();

let mut results = Vec::new();
for hash in valid_hashes {
Expand Down
1 change: 0 additions & 1 deletion src/object.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use crate::errors::ObjectStoreError;
use std::fmt::{Display, Formatter, Result as FmtResult};

#[derive(Debug)]
Expand Down
40 changes: 18 additions & 22 deletions src/stores/loose.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
use crate::object::Object;
use crate::stores::{ReadableStore, WritableStore};
use anyhow::{self, bail};
use async_std::io::prelude::*;
use async_std::prelude::*;
use async_std::{fs, prelude::*, stream::Stream};
use async_std::{fs, stream::Stream};
use async_trait::async_trait;
use flate2::bufread::ZlibDecoder;
use flate2::write::ZlibEncoder;
use flate2::Compression;
use futures::future::join_all;
use rayon::prelude::*;
use sha2::Digest;
use std::io::prelude::*;
use std::io::Write;
use std::io::{BufRead, BufReader};
use std::marker::PhantomData;
Expand All @@ -33,10 +31,9 @@ impl<D: 'static + Digest + Send + Sync> LooseStore<D> {
}

// https://stackoverflow.com/a/18732276
pub(crate) async fn estimate_count() -> usize {
0
}

// pub(crate) async fn estimate_count() -> usize {
// unimplemented!()
// }
pub async fn to_packed_store(&self) -> anyhow::Result<()> {
// faster to do the dir listing synchronously
let entries = std::fs::read_dir(&self.location)?.filter_map(|xs| {
Expand Down Expand Up @@ -120,7 +117,7 @@ impl<D: 'static + Digest + Send + Sync> LooseStore<D> {
offs += size_bytes.len();

let mut enc = ZlibEncoder::new(Vec::new(), Compression::default());
enc.write_all(bytes);
enc.write_all(bytes)?;
let finished = &enc.finish()?;
offs += finished.len();
fd.write_all(finished).await?;
Expand All @@ -136,7 +133,6 @@ impl<D: 'static + Digest + Send + Sync> LooseStore<D> {
let mut object_idx: usize = 0;
while fanout_idx < 256 && object_idx < sorted.len() {
while sorted[object_idx].0[0] as usize != fanout_idx {
let bytes = (object_idx as u32).to_be_bytes();
fanout[fanout_idx] = (object_idx as u32).to_be();
fanout_idx += 1;
if fanout_idx == 256 {
Expand Down Expand Up @@ -174,10 +170,10 @@ impl<D: 'static + Digest + Send + Sync> LooseStore<D> {
fd.write_all(&version[..]).await?;
let fanout_bytes = unsafe { std::mem::transmute::<[u32; 256], [u8; 256 * 4]>(fanout) };
fd.write_all(&fanout_bytes[..]).await?;
for (hash, offset) in &sorted {
for (hash, _) in &sorted {
fd.write_all(&hash).await?;
}
for (hash, offset) in &sorted {
for (_, offset) in &sorted {
let offs_u32 = (**offset) as u32;
fd.write_all(&offs_u32.to_be_bytes()).await?;
}
Expand All @@ -196,7 +192,7 @@ impl<D> Stream for LooseObjectStream<D> {
type Item = Object<Vec<u8>>;
fn poll_next(
self: Pin<&mut Self>,
cx: &mut futures::task::Context,
_cx: &mut futures::task::Context,
) -> futures::task::Poll<Option<Self::Item>> {
unimplemented!();
}
Expand Down Expand Up @@ -256,13 +252,13 @@ impl<D: 'static + Digest + Send + Sync> WritableStore<D> for LooseStore<D> {

async fn add_stream<'a, S: Stream<Item = &'a [u8]> + Send>(
&mut self,
item: S,
size_hint: Option<usize>,
_item: S,
_size_hint: Option<usize>,
) -> anyhow::Result<()> {
unimplemented!()
}

async fn remove<T: Into<D> + Send>(&mut self, item: T) -> bool {
async fn remove<T: Into<D> + Send>(&mut self, _item: T) -> bool {
unimplemented!()
}

Expand Down Expand Up @@ -307,26 +303,26 @@ impl<D: 'static + Digest + Send + Sync> ReadableStore for LooseStore<D> {
let mut object = Vec::new();

// TODO: it would be nice to do this in a thread/threadpool!
BufRead::read_until(&mut reader, 0x20, &mut type_vec);
BufRead::read_until(&mut reader, 0, &mut size_vec);
BufRead::read_until(&mut reader, 0x20, &mut type_vec)?;
BufRead::read_until(&mut reader, 0, &mut size_vec)?;
std::io::copy(&mut reader, &mut object)?;

let str_size = std::str::from_utf8(&size_vec[..])?;
let size = str_size[..str_size.len() - 1].parse::<usize>()?;
if object.len() != size {
return bail!(
bail!(
"mismatched len: got {} bytes, expected {}",
object.len(),
size
);
)
}

return match std::str::from_utf8(&type_vec[..])? {
match std::str::from_utf8(&type_vec[..])? {
"blob " => Ok(Some(Object::Blob(object))),
"sign " => Ok(Some(Object::Signature(object))),
"vers " => Ok(Some(Object::Version(object))),
_ => bail!("Could not parse object type"),
};
}
}

async fn list(&self) -> Self::ObjectStream {
Expand All @@ -335,7 +331,7 @@ impl<D: 'static + Digest + Send + Sync> ReadableStore for LooseStore<D> {

async fn get_stream<'a, T: AsRef<[u8]> + Send, R: Stream<Item = &'a [u8]>>(
&self,
item: T,
_item: T,
) -> Option<R> {
unimplemented!()
}
Expand Down
6 changes: 2 additions & 4 deletions src/stores/multiple.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
use crate::object::Object;
use crate::stores::{ReadableStore, WritableStore};
use async_std::io::prelude::*;
use crate::stores::ReadableStore;
use async_std::stream::Stream;
use async_trait::async_trait;
use digest::Digest;

struct MultipleStore<R0: ReadableStore, R1: ReadableStore>(R0, R1);
struct FusedObjectStream;
Expand Down Expand Up @@ -31,7 +29,7 @@ impl<R0: ReadableStore + Send + Sync, R1: ReadableStore + Send + Sync> ReadableS

async fn get_stream<'a, T: AsRef<[u8]> + Send, R: Stream<Item = &'a [u8]>>(
&self,
item: T,
_item: T,
) -> Option<R> {
unimplemented!()
}
Expand Down
21 changes: 12 additions & 9 deletions src/stores/packed.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::object::Object;
use crate::stores::ReadableStore;
use anyhow::{self, bail};
use async_std::{fs, stream::Stream};
use async_std::stream::Stream;
use async_trait::async_trait;
use byteorder::{BigEndian, ReadBytesExt};
use digest::Digest;
Expand All @@ -12,7 +12,7 @@ use std::io::prelude::*;
use std::io::Read;
use std::io::{Cursor, Seek, SeekFrom, Write};
use std::marker::PhantomData;
use std::path::{Path, PathBuf};
use std::path::Path;

#[derive(Clone)]
pub struct PackedObjectStream<D> {
Expand All @@ -34,11 +34,11 @@ impl<D: Digest + Send + Sync> PackedIndex<D> {
let mut version = [0u8; 4];
input.read_exact(&mut version)?;

if (&magic != b"EIDX") {
if &magic != b"EIDX" {
bail!("invalid pack index");
}

if (version != unsafe { std::mem::transmute::<u32, [u8; 4]>(0u32.to_be()) }) {
if version != unsafe { std::mem::transmute::<u32, [u8; 4]>(0u32.to_be()) } {
bail!("unsupported pack index version");
}

Expand Down Expand Up @@ -177,11 +177,14 @@ pub fn packfile_read<R: BufRead, W: Write>(
}

match obj_type {
0...4 => {
0..=4 => {
let mut deflate_stream = ZlibDecoder::new(input);
std::io::copy(&mut deflate_stream, output)?;
let written = std::io::copy(&mut deflate_stream, output)?;
*read_bytes = 1 + count + deflate_stream.total_in();
return Ok(obj_type);
if written != size {
bail!("expected object of size {}, got object of size {}", size, written)
}
Ok(obj_type)
}

_ => {
Expand Down Expand Up @@ -229,7 +232,7 @@ impl<D: 'static + Digest + Send + Sync> ReadableStore for PackedStore<D> {
let (start, end) = maybe_bounds.unwrap();
match self.objects.read_bounds(start, end) {
Ok(x) => Ok(Some(x)),
Err(e) => bail!("failed"),
Err(e) => bail!(e),
}
}

Expand All @@ -239,7 +242,7 @@ impl<D: 'static + Digest + Send + Sync> ReadableStore for PackedStore<D> {

async fn get_stream<'a, T: AsRef<[u8]> + Send, R: Stream<Item = &'a [u8]>>(
&self,
item: T,
_item: T,
) -> Option<R> {
unimplemented!()
}
Expand Down

0 comments on commit b971714

Please sign in to comment.