Skip to content

Commit

Permalink
Fix warnings introduced by Rust/Clippy 1.57.0 (#992)
Browse files Browse the repository at this point in the history
* Remove needless borrows identified by clippy

https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow

* Remove muts that are no longer needed

* Derive Default instead of using an equivalent manual impl

Identified by clippy.

https://rust-lang.github.io/rust-clippy/master/index.html#derivable_impls

* Remove redundant closures

Identified by clippy.

https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure

* Allow dead code on a field Rust now identifies as never read
  • Loading branch information
carols10cents committed Dec 3, 2021
1 parent e9be49d commit 9fb2a5f
Show file tree
Hide file tree
Showing 12 changed files with 44 additions and 57 deletions.
12 changes: 6 additions & 6 deletions arrow/src/array/equal/utils.rs
Expand Up @@ -121,14 +121,14 @@ pub(super) fn child_logical_null_buffer(
let array_offset = parent_data.offset();
let bitmap_len = bit_util::ceil(parent_len * len, 8);
let mut buffer = MutableBuffer::from_len_zeroed(bitmap_len);
let mut null_slice = buffer.as_slice_mut();
let null_slice = buffer.as_slice_mut();
(array_offset..parent_len + array_offset).for_each(|index| {
let start = index * len;
let end = start + len;
let mask = parent_bitmap.is_set(index);
(start..end).for_each(|child_index| {
if mask && self_null_bitmap.is_set(child_index) {
bit_util::set_bit(&mut null_slice, child_index);
bit_util::set_bit(null_slice, child_index);
}
});
});
Expand All @@ -151,12 +151,12 @@ pub(super) fn child_logical_null_buffer(
// slow path
let array_offset = parent_data.offset();
let mut buffer = MutableBuffer::new_null(parent_len);
let mut null_slice = buffer.as_slice_mut();
let null_slice = buffer.as_slice_mut();
(0..parent_len).for_each(|index| {
if parent_bitmap.is_set(index + array_offset)
&& self_null_bitmap.is_set(index + array_offset)
{
bit_util::set_bit(&mut null_slice, index);
bit_util::set_bit(null_slice, index);
}
});
Some(buffer.into())
Expand All @@ -182,7 +182,7 @@ fn logical_list_bitmap<OffsetSize: OffsetSizeTrait>(
let offset_start = offsets.first().unwrap().to_usize().unwrap();
let offset_len = offsets.get(parent_data.len()).unwrap().to_usize().unwrap();
let mut buffer = MutableBuffer::new_null(offset_len - offset_start);
let mut null_slice = buffer.as_slice_mut();
let null_slice = buffer.as_slice_mut();

offsets
.windows(2)
Expand All @@ -194,7 +194,7 @@ fn logical_list_bitmap<OffsetSize: OffsetSizeTrait>(
let mask = parent_bitmap.is_set(index);
(start..end).for_each(|child_index| {
if mask && child_bitmap.is_set(child_index) {
bit_util::set_bit(&mut null_slice, child_index - offset_start);
bit_util::set_bit(null_slice, child_index - offset_start);
}
});
});
Expand Down
2 changes: 1 addition & 1 deletion arrow/src/array/transform/boolean.rs
Expand Up @@ -29,7 +29,7 @@ pub(super) fn build_extend(array: &ArrayData) -> Extend {
let buffer = &mut mutable.buffer1;
resize_for_bits(buffer, mutable.len + len);
set_bits(
&mut buffer.as_slice_mut(),
buffer.as_slice_mut(),
values,
mutable.len,
array.offset() + start,
Expand Down
10 changes: 1 addition & 9 deletions arrow/src/compute/kernels/take.rs
Expand Up @@ -296,22 +296,14 @@ where
}

/// Options that define how `take` should behave
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Default)]
pub struct TakeOptions {
/// Perform bounds check before taking indices from values.
/// If enabled, an `ArrowError` is returned if the indices are out of bounds.
/// If not enabled, and indices exceed bounds, the kernel will panic.
pub check_bounds: bool,
}

impl Default for TakeOptions {
fn default() -> Self {
Self {
check_bounds: false,
}
}
}

#[inline(always)]
fn maybe_usize<I: ArrowNativeType>(index: I) -> Result<usize> {
index
Expand Down
4 changes: 2 additions & 2 deletions arrow/src/compute/util.rs
Expand Up @@ -301,7 +301,7 @@ pub(super) mod tests {
values.append(&mut array);
} else {
list_null_count += 1;
bit_util::unset_bit(&mut list_bitmap.as_slice_mut(), idx);
bit_util::unset_bit(list_bitmap.as_slice_mut(), idx);
}
offset.push(values.len() as i64);
}
Expand Down Expand Up @@ -386,7 +386,7 @@ pub(super) mod tests {
values.extend(items.into_iter());
} else {
list_null_count += 1;
bit_util::unset_bit(&mut list_bitmap.as_slice_mut(), idx);
bit_util::unset_bit(list_bitmap.as_slice_mut(), idx);
values.extend(vec![None; length as usize].into_iter());
}
}
Expand Down
2 changes: 1 addition & 1 deletion arrow/src/datatypes/field.rs
Expand Up @@ -286,7 +286,7 @@ impl Field {
DataType::Struct(mut fields) => match map.get("children") {
Some(Value::Array(values)) => {
let struct_fields: Result<Vec<Field>> =
values.iter().map(|v| Field::from(v)).collect();
values.iter().map(Field::from).collect();
fields.append(&mut struct_fields?);
DataType::Struct(fields)
}
Expand Down
5 changes: 1 addition & 4 deletions arrow/src/datatypes/schema.rs
Expand Up @@ -227,10 +227,7 @@ impl Schema {
match *json {
Value::Object(ref schema) => {
let fields = if let Some(Value::Array(fields)) = schema.get("fields") {
fields
.iter()
.map(|f| Field::from(f))
.collect::<Result<_>>()?
fields.iter().map(Field::from).collect::<Result<_>>()?
} else {
return Err(ArrowError::ParseError(
"Schema fields should be an array".to_string(),
Expand Down
16 changes: 8 additions & 8 deletions arrow/src/ipc/writer.rs
Expand Up @@ -752,9 +752,9 @@ fn write_continuation<W: Write>(
/// Write array data to a vector of bytes
fn write_array_data(
array_data: &ArrayData,
mut buffers: &mut Vec<ipc::Buffer>,
mut arrow_data: &mut Vec<u8>,
mut nodes: &mut Vec<ipc::FieldNode>,
buffers: &mut Vec<ipc::Buffer>,
arrow_data: &mut Vec<u8>,
nodes: &mut Vec<ipc::FieldNode>,
offset: i64,
num_rows: usize,
null_count: usize,
Expand All @@ -775,11 +775,11 @@ fn write_array_data(
Some(buffer) => buffer.clone(),
};

offset = write_buffer(&null_buffer, &mut buffers, &mut arrow_data, offset);
offset = write_buffer(&null_buffer, buffers, arrow_data, offset);
}

array_data.buffers().iter().for_each(|buffer| {
offset = write_buffer(buffer, &mut buffers, &mut arrow_data, offset);
offset = write_buffer(buffer, buffers, arrow_data, offset);
});

if !matches!(array_data.data_type(), DataType::Dictionary(_, _)) {
Expand All @@ -788,9 +788,9 @@ fn write_array_data(
// write the nested data (e.g list data)
offset = write_array_data(
data_ref,
&mut buffers,
&mut arrow_data,
&mut nodes,
buffers,
arrow_data,
nodes,
offset,
data_ref.len(),
data_ref.null_count(),
Expand Down
Expand Up @@ -58,6 +58,7 @@ pub async fn scenario_setup(port: &str) -> Result {
pub struct AuthBasicProtoScenarioImpl {
username: Arc<str>,
password: Arc<str>,
#[allow(dead_code)]
peer_identity: Arc<Mutex<Option<String>>>,
}

Expand Down
16 changes: 8 additions & 8 deletions parquet/src/arrow/arrow_writer.rs
Expand Up @@ -143,9 +143,9 @@ fn get_col_writer(

#[allow(clippy::borrowed_box)]
fn write_leaves(
mut row_group_writer: &mut Box<dyn RowGroupWriter>,
row_group_writer: &mut Box<dyn RowGroupWriter>,
array: &arrow_array::ArrayRef,
mut levels: &mut Vec<LevelInfo>,
levels: &mut Vec<LevelInfo>,
) -> Result<()> {
match array.data_type() {
ArrowDataType::Null
Expand Down Expand Up @@ -173,7 +173,7 @@ fn write_leaves(
| ArrowDataType::LargeUtf8
| ArrowDataType::Decimal(_, _)
| ArrowDataType::FixedSizeBinary(_) => {
let mut col_writer = get_col_writer(&mut row_group_writer)?;
let mut col_writer = get_col_writer(row_group_writer)?;
write_leaf(
&mut col_writer,
array,
Expand All @@ -186,7 +186,7 @@ fn write_leaves(
// write the child list
let data = array.data();
let child_array = arrow_array::make_array(data.child_data()[0].clone());
write_leaves(&mut row_group_writer, &child_array, &mut levels)?;
write_leaves(row_group_writer, &child_array, levels)?;
Ok(())
}
ArrowDataType::Struct(_) => {
Expand All @@ -195,7 +195,7 @@ fn write_leaves(
.downcast_ref::<arrow_array::StructArray>()
.expect("Unable to get struct array");
for field in struct_array.columns() {
write_leaves(&mut row_group_writer, field, &mut levels)?;
write_leaves(row_group_writer, field, levels)?;
}
Ok(())
}
Expand All @@ -204,15 +204,15 @@ fn write_leaves(
.as_any()
.downcast_ref::<arrow_array::MapArray>()
.expect("Unable to get map array");
write_leaves(&mut row_group_writer, &map_array.keys(), &mut levels)?;
write_leaves(&mut row_group_writer, &map_array.values(), &mut levels)?;
write_leaves(row_group_writer, &map_array.keys(), levels)?;
write_leaves(row_group_writer, &map_array.values(), levels)?;
Ok(())
}
ArrowDataType::Dictionary(_, value_type) => {
// cast dictionary to a primitive
let array = arrow::compute::cast(array, value_type)?;

let mut col_writer = get_col_writer(&mut row_group_writer)?;
let mut col_writer = get_col_writer(row_group_writer)?;
write_leaf(
&mut col_writer,
&array,
Expand Down
14 changes: 7 additions & 7 deletions parquet/src/record/reader.rs
Expand Up @@ -106,7 +106,7 @@ impl TreeBuilder {
fn reader_tree(
&self,
field: TypePtr,
mut path: &mut Vec<String>,
path: &mut Vec<String>,
mut curr_def_level: i16,
mut curr_rep_level: i16,
paths: &HashMap<ColumnPath, usize>,
Expand Down Expand Up @@ -160,7 +160,7 @@ impl TreeBuilder {
// Support for backward compatible lists
let reader = self.reader_tree(
repeated_field,
&mut path,
path,
curr_def_level,
curr_rep_level,
paths,
Expand All @@ -180,7 +180,7 @@ impl TreeBuilder {

let reader = self.reader_tree(
child_field,
&mut path,
path,
curr_def_level + 1,
curr_rep_level + 1,
paths,
Expand Down Expand Up @@ -235,7 +235,7 @@ impl TreeBuilder {
);
let key_reader = self.reader_tree(
key_type.clone(),
&mut path,
path,
curr_def_level + 1,
curr_rep_level + 1,
paths,
Expand All @@ -245,7 +245,7 @@ impl TreeBuilder {
let value_type = &key_value_type.get_fields()[1];
let value_reader = self.reader_tree(
value_type.clone(),
&mut path,
path,
curr_def_level + 1,
curr_rep_level + 1,
paths,
Expand Down Expand Up @@ -278,7 +278,7 @@ impl TreeBuilder {

let reader = self.reader_tree(
Arc::new(required_field),
&mut path,
path,
curr_def_level,
curr_rep_level,
paths,
Expand All @@ -298,7 +298,7 @@ impl TreeBuilder {
for child in field.get_fields() {
let reader = self.reader_tree(
child.clone(),
&mut path,
path,
curr_def_level,
curr_rep_level,
paths,
Expand Down
4 changes: 2 additions & 2 deletions parquet/src/util/bit_util.rs
Expand Up @@ -383,8 +383,8 @@ impl BitWriter {
// TODO: should we return `Result` for this func?
return false;
}
let mut ptr = result.unwrap();
memcpy_value(&val, num_bytes, &mut ptr);
let ptr = result.unwrap();
memcpy_value(&val, num_bytes, ptr);
true
}

Expand Down
15 changes: 6 additions & 9 deletions parquet_derive/src/parquet_field.rs
Expand Up @@ -769,7 +769,7 @@ mod test {
};

let fields = extract_fields(snippet);
let processed: Vec<_> = fields.iter().map(|field| Field::from(field)).collect();
let processed: Vec<_> = fields.iter().map(Field::from).collect();

let column_writers: Vec<_> = processed
.iter()
Expand Down Expand Up @@ -800,7 +800,7 @@ mod test {
};

let fields = extract_fields(snippet);
let processed: Vec<_> = fields.iter().map(|field| Field::from(field)).collect();
let processed: Vec<_> = fields.iter().map(Field::from).collect();
assert_eq!(processed.len(), 3);

assert_eq!(
Expand Down Expand Up @@ -840,8 +840,7 @@ mod test {
};

let fields = extract_fields(snippet);
let converted_fields: Vec<_> =
fields.iter().map(|field| Type::from(field)).collect();
let converted_fields: Vec<_> = fields.iter().map(Type::from).collect();
let inner_types: Vec<_> = converted_fields
.iter()
.map(|field| field.inner_type())
Expand Down Expand Up @@ -878,8 +877,7 @@ mod test {
};

let fields = extract_fields(snippet);
let converted_fields: Vec<_> =
fields.iter().map(|field| Type::from(field)).collect();
let converted_fields: Vec<_> = fields.iter().map(Type::from).collect();
let physical_types: Vec<_> = converted_fields
.iter()
.map(|ty| ty.physical_type())
Expand Down Expand Up @@ -911,8 +909,7 @@ mod test {
};

let fields = extract_fields(snippet);
let converted_fields: Vec<_> =
fields.iter().map(|field| Type::from(field)).collect();
let converted_fields: Vec<_> = fields.iter().map(Type::from).collect();

assert_eq!(
converted_fields,
Expand All @@ -938,7 +935,7 @@ mod test {
};

let fields = extract_fields(snippet);
let types: Vec<_> = fields.iter().map(|field| Type::from(field)).collect();
let types: Vec<_> = fields.iter().map(Type::from).collect();

assert_eq!(
types,
Expand Down

0 comments on commit 9fb2a5f

Please sign in to comment.