Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cherry pick fix some clippy warnings to active_release #930

Merged
merged 1 commit into from
Nov 9, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion arrow/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl From<csv_crate::Error> for ArrowError {
csv_crate::ErrorKind::Io(error) => ArrowError::CsvError(error.to_string()),
csv_crate::ErrorKind::Utf8 { pos: _, err } => ArrowError::CsvError(format!(
"Encountered UTF-8 error while reading CSV file: {}",
err.to_string()
err
)),
csv_crate::ErrorKind::UnequalLengths {
expected_len, len, ..
Expand Down
2 changes: 1 addition & 1 deletion arrow/src/util/test_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ fn get_data_dir(udf_env: &str, submodule_data: &str) -> Result<PathBuf, Box<dyn
} else {
return Err(format!(
"the data dir `{}` defined by env {} not found",
pb.display().to_string(),
pb.display(),
udf_env
)
.into());
Expand Down
16 changes: 2 additions & 14 deletions parquet/src/data_type.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ use crate::util::{

/// Rust representation for logical type INT96, value is backed by an array of `u32`.
/// The type only takes 12 bytes, without extra padding.
#[derive(Clone, Debug, PartialOrd)]
#[derive(Clone, Debug, PartialOrd, Default)]
pub struct Int96 {
value: Option<[u32; 3]>,
}
Expand Down Expand Up @@ -75,12 +75,6 @@ impl Int96 {
}
}

impl Default for Int96 {
fn default() -> Self {
Self { value: None }
}
}

impl PartialEq for Int96 {
fn eq(&self, other: &Int96) -> bool {
match (&self.value, &other.value) {
Expand Down Expand Up @@ -109,7 +103,7 @@ impl fmt::Display for Int96 {

/// Rust representation for BYTE_ARRAY and FIXED_LEN_BYTE_ARRAY Parquet physical types.
/// Value is backed by a byte buffer.
#[derive(Clone)]
#[derive(Clone, Default)]
pub struct ByteArray {
data: Option<ByteBufferPtr>,
}
Expand Down Expand Up @@ -231,12 +225,6 @@ impl From<ByteBuffer> for ByteArray {
}
}

impl Default for ByteArray {
fn default() -> Self {
ByteArray { data: None }
}
}

impl PartialEq for ByteArray {
fn eq(&self, other: &ByteArray) -> bool {
match (&self.data, &other.data) {
Expand Down
4 changes: 2 additions & 2 deletions parquet/src/record/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ impl TreeBuilder {
.column_descr_ptr();
let col_reader = row_group_reader.get_column_reader(orig_index).unwrap();
let column = TripletIter::new(col_descr, col_reader, self.batch_size);
Reader::PrimitiveReader(field, column)
Reader::PrimitiveReader(field, Box::new(column))
} else {
match field.get_basic_info().converted_type() {
// List types
Expand Down Expand Up @@ -319,7 +319,7 @@ impl TreeBuilder {
/// Reader tree for record assembly
pub enum Reader {
// Primitive reader with type information and triplet iterator
PrimitiveReader(TypePtr, TripletIter),
PrimitiveReader(TypePtr, Box<TripletIter>),
// Optional reader with definition level of a parent and a reader
OptionReader(i16, Box<Reader>),
// Group (struct) reader with type information, definition level and list of child
Expand Down
2 changes: 1 addition & 1 deletion parquet/src/schema/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ impl<'a> Tokenizer<'a> {
pub fn from_str(string: &'a str) -> Self {
let vec = string
.split_whitespace()
.flat_map(|t| Self::split_token(t))
.flat_map(Self::split_token)
.collect();
Tokenizer {
tokens: vec,
Expand Down
5 changes: 1 addition & 4 deletions parquet_derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,7 @@ pub fn parquet_record_writer(input: proc_macro::TokenStream) -> proc_macro::Toke
Data::Union(_) => unimplemented!("Union currently is not supported"),
};

let field_infos: Vec<_> = fields
.iter()
.map(|f: &syn::Field| parquet_field::Field::from(f))
.collect();
let field_infos: Vec<_> = fields.iter().map(parquet_field::Field::from).collect();

let writer_snippets: Vec<proc_macro2::TokenStream> =
field_infos.iter().map(|x| x.writer_snippet()).collect();
Expand Down