Skip to content

Commit

Permalink
ARROW-11603: [Rust] Fix Clippy Lints for Rust 1.50
Browse files Browse the repository at this point in the history
# Rationale:

CI uses "stable" rust. 1.50 stable was released today: https://blog.rust-lang.org/2021/02/11/Rust-1.50.0.html

The new clippy is pickier resulting in many clippy warnings such as https://github.com/apache/arrow/pull/9469/checks?check_run_id=1881854256

We need to get CI back green

# Changes
Based on based on #9475 from @nevi-me, this PR aims to get the CI green as soon as possible.

Ideally, we would fix the actual lint problems, However, when I tried to do so the lints propagated into a significant change -- as clippy says to remove the `Result` but then there are a bunch of call sites that then also need t be changed

I want to get CI back clean as soon as possible so I just hammered through and cleaned up as best I could as well as sprinkling in various `#[allow(clippy::unnecessary_wraps)]` as necessary to get a clean run.

My rationale is that the code is no worse than it was before. Though it could be better!

Closes #9476 from alamb/ARROW-11602-lints

Authored-by: Neville Dipale <nevilledips@gmail.com>
Signed-off-by: Jorge C. Leitao <jorgecarleitao@gmail.com>
  • Loading branch information
nevi-me authored and jorgecarleitao committed Feb 11, 2021
1 parent 7c5cf92 commit 356c300
Show file tree
Hide file tree
Showing 45 changed files with 80 additions and 106 deletions.
4 changes: 1 addition & 3 deletions rust/arrow/examples/read_csv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,10 @@ use std::sync::Arc;

use arrow::csv;
use arrow::datatypes::{DataType, Field, Schema};
use arrow::error::Result;
#[cfg(feature = "prettyprint")]
use arrow::util::pretty::print_batches;

fn main() -> Result<()> {
fn main() {
let schema = Schema::new(vec![
Field::new("city", DataType::Utf8, false),
Field::new("lat", DataType::Float64, false),
Expand All @@ -41,5 +40,4 @@ fn main() -> Result<()> {
{
print_batches(&[_batch]).unwrap();
}
Ok(())
}
4 changes: 1 addition & 3 deletions rust/arrow/examples/read_csv_infer_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,11 @@
extern crate arrow;

use arrow::csv;
use arrow::error::Result;
#[cfg(feature = "prettyprint")]
use arrow::util::pretty::print_batches;
use std::fs::File;

fn main() -> Result<()> {
fn main() {
let file = File::open("test/data/uk_cities_with_headers.csv").unwrap();
let builder = csv::ReaderBuilder::new()
.has_header(true)
Expand All @@ -34,5 +33,4 @@ fn main() -> Result<()> {
{
print_batches(&[_batch]).unwrap();
}
Ok(())
}
2 changes: 2 additions & 0 deletions rust/arrow/src/array/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1499,6 +1499,7 @@ impl FieldData {
}

/// Appends a single value to this `FieldData`'s `values_buffer`.
#[allow(clippy::unnecessary_wraps)]
fn append_to_values_buffer<T: ArrowPrimitiveType>(
&mut self,
v: T::Native,
Expand All @@ -1521,6 +1522,7 @@ impl FieldData {
}

/// Appends a null to this `FieldData`.
#[allow(clippy::unnecessary_wraps)]
fn append_null<T: ArrowPrimitiveType>(&mut self) -> Result<()> {
if let Some(b) = &mut self.bitmap_builder {
let values_buffer = self
Expand Down
3 changes: 1 addition & 2 deletions rust/arrow/src/array/transform/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1075,7 +1075,7 @@ mod tests {
}

#[test]
fn test_fixed_size_binary_append() -> Result<()> {
fn test_fixed_size_binary_append() {
let a = vec![Some(vec![1, 2]), Some(vec![3, 4]), Some(vec![5, 6])];
let a = FixedSizeBinaryArray::from(a).data();

Expand Down Expand Up @@ -1118,7 +1118,6 @@ mod tests {
];
let expected = FixedSizeBinaryArray::from(expected).data();
assert_eq!(&result, expected.as_ref());
Ok(())
}

/*
Expand Down
4 changes: 1 addition & 3 deletions rust/arrow/src/buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1619,7 +1619,7 @@ mod tests {
}

#[test]
fn test_mutable_equal() -> Result<()> {
fn test_mutable_equal() {
let mut buf = MutableBuffer::new(1);
let mut buf2 = MutableBuffer::new(1);

Expand All @@ -1632,8 +1632,6 @@ mod tests {

buf2.reserve(65);
assert!(buf != buf2);

Ok(())
}

#[test]
Expand Down
5 changes: 5 additions & 0 deletions rust/arrow/src/compute/kernels/cast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -821,6 +821,7 @@ const EPOCH_DAYS_FROM_CE: i32 = 719_163;
/// Arrays should have the same primitive data type, otherwise this should fail.
/// We do not perform this check on primitive data types as we only use this
/// function internally, where it is guaranteed to be infallible.
#[allow(clippy::unnecessary_wraps)]
fn cast_array_data<TO>(array: &ArrayRef, to_type: DataType) -> Result<ArrayRef>
where
TO: ArrowNumericType,
Expand All @@ -838,6 +839,7 @@ where
}

/// Convert Array into a PrimitiveArray of type, and apply numeric cast
#[allow(clippy::unnecessary_wraps)]
fn cast_numeric_arrays<FROM, TO>(from: &ArrayRef) -> Result<ArrayRef>
where
FROM: ArrowNumericType,
Expand Down Expand Up @@ -869,6 +871,7 @@ where
}

/// Cast numeric types to Utf8
#[allow(clippy::unnecessary_wraps)]
fn cast_numeric_to_string<FROM>(array: &ArrayRef) -> Result<ArrayRef>
where
FROM: ArrowNumericType,
Expand All @@ -893,6 +896,7 @@ where
}

/// Cast numeric types to Utf8
#[allow(clippy::unnecessary_wraps)]
fn cast_string_to_numeric<T>(from: &ArrayRef) -> Result<ArrayRef>
where
T: ArrowNumericType,
Expand Down Expand Up @@ -959,6 +963,7 @@ where
/// Cast Boolean types to numeric
///
/// `false` returns 0 while `true` returns 1
#[allow(clippy::unnecessary_wraps)]
fn cast_bool_to_numeric<TO>(from: &ArrayRef) -> Result<ArrayRef>
where
TO: ArrowNumericType,
Expand Down
6 changes: 2 additions & 4 deletions rust/arrow/src/compute/kernels/concat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,20 +77,18 @@ mod tests {
use std::sync::Arc;

#[test]
fn test_concat_empty_vec() -> Result<()> {
fn test_concat_empty_vec() {
let re = concat(&[]);
assert!(re.is_err());
Ok(())
}

#[test]
fn test_concat_incompatible_datatypes() -> Result<()> {
fn test_concat_incompatible_datatypes() {
let re = concat(&[
&PrimitiveArray::<Int64Type>::from(vec![Some(-1), Some(2), None]),
&StringArray::from(vec![Some("hello"), Some("bar"), Some("world")]),
]);
assert!(re.is_err());
Ok(())
}

#[test]
Expand Down
4 changes: 2 additions & 2 deletions rust/arrow/src/compute/kernels/length.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ use crate::{
};
use std::sync::Arc;

#[allow(clippy::unnecessary_wraps)]
fn length_string<OffsetSize>(array: &Array, data_type: DataType) -> Result<ArrayRef>
where
OffsetSize: OffsetSizeTrait,
Expand Down Expand Up @@ -178,11 +179,10 @@ mod tests {

/// Tests that length is not valid for u64.
#[test]
fn wrong_type() -> Result<()> {
fn wrong_type() {
let array: UInt64Array = vec![1u64].into();

assert!(length(&array).is_err());
Ok(())
}

/// Tests with an offset
Expand Down
4 changes: 4 additions & 0 deletions rust/arrow/src/compute/kernels/sort.rs
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,7 @@ impl Default for SortOptions {
}

/// Sort primitive values
#[allow(clippy::unnecessary_wraps)]
fn sort_boolean(
values: &ArrayRef,
value_indices: Vec<u32>,
Expand Down Expand Up @@ -316,6 +317,7 @@ fn sort_boolean(
}

/// Sort primitive values
#[allow(clippy::unnecessary_wraps)]
fn sort_primitive<T, F>(
values: &ArrayRef,
value_indices: Vec<u32>,
Expand Down Expand Up @@ -446,6 +448,7 @@ fn sort_string_dictionary<T: ArrowDictionaryKeyType>(

/// shared implementation between dictionary encoded and plain string arrays
#[inline]
#[allow(clippy::unnecessary_wraps)]
fn sort_string_helper<'a, A: Array, F>(
values: &'a A,
value_indices: Vec<u32>,
Expand Down Expand Up @@ -481,6 +484,7 @@ where
Ok(UInt32Array::from(valid_indices))
}

#[allow(clippy::unnecessary_wraps)]
fn sort_list<S, T>(
values: &ArrayRef,
value_indices: Vec<u32>,
Expand Down
1 change: 1 addition & 0 deletions rust/arrow/src/compute/kernels/substring.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ use crate::{
};
use std::sync::Arc;

#[allow(clippy::unnecessary_wraps)]
fn generic_substring<OffsetSize: StringOffsetSizeTrait>(
array: &GenericStringArray<OffsetSize>,
start: OffsetSize,
Expand Down
2 changes: 2 additions & 0 deletions rust/arrow/src/compute/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ use std::ops::Add;
/// Combines the null bitmaps of two arrays using a bitwise `and` operation.
///
/// This function is useful when implementing operations on higher level arrays.
#[allow(clippy::unnecessary_wraps)]
pub(super) fn combine_option_bitmap(
left_data: &ArrayDataRef,
right_data: &ArrayDataRef,
Expand Down Expand Up @@ -60,6 +61,7 @@ pub(super) fn combine_option_bitmap(
/// Compares the null bitmaps of two arrays using a bitwise `or` operation.
///
/// This function is useful when implementing operations on higher level arrays.
#[allow(clippy::unnecessary_wraps)]
pub(super) fn compare_option_bitmap(
left_data: &ArrayDataRef,
right_data: &ArrayDataRef,
Expand Down
3 changes: 1 addition & 2 deletions rust/arrow/src/csv/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1153,7 +1153,7 @@ mod tests {
}

#[test]
fn test_bounded() -> Result<()> {
fn test_bounded() {
let schema = Schema::new(vec![Field::new("int", DataType::UInt32, false)]);
let data = vec![
vec!["0"],
Expand Down Expand Up @@ -1196,7 +1196,6 @@ mod tests {
assert_eq!(a, &UInt32Array::from(vec![4, 5]));

assert!(csv.next().is_none());
Ok(())
}

#[test]
Expand Down
2 changes: 2 additions & 0 deletions rust/arrow/src/json/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -764,6 +764,7 @@ impl Decoder {
}

#[inline(always)]
#[allow(clippy::unnecessary_wraps)]
fn build_string_dictionary_builder<T>(
&self,
row_len: usize,
Expand Down Expand Up @@ -835,6 +836,7 @@ impl Decoder {
Ok(Arc::new(builder.finish()))
}

#[allow(clippy::unnecessary_wraps)]
fn build_primitive_array<T: ArrowPrimitiveType>(
&self,
rows: &[Value],
Expand Down
2 changes: 1 addition & 1 deletion rust/datafusion/src/execution/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ impl ExecutionContext {
// create a query planner
let state = self.state.lock().unwrap().clone();
let query_planner = SqlToRel::new(&state);
Ok(query_planner.statement_to_plan(&statements[0])?)
query_planner.statement_to_plan(&statements[0])
}

/// Registers a variable provider within this context.
Expand Down
4 changes: 1 addition & 3 deletions rust/datafusion/src/logical_plan/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ mod tests {
}

#[test]
fn stringified_plan() -> Result<()> {
fn stringified_plan() {
let stringified_plan =
StringifiedPlan::new(PlanType::LogicalPlan, "...the plan...");
assert!(stringified_plan.should_display(true));
Expand All @@ -488,7 +488,5 @@ mod tests {
);
assert!(stringified_plan.should_display(true));
assert!(!stringified_plan.should_display(false));

Ok(())
}
}
2 changes: 2 additions & 0 deletions rust/datafusion/src/logical_plan/dfschema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,9 +237,11 @@ where
Self: Sized,
{
/// Attempt to create a DSSchema
#[allow(clippy::wrong_self_convention)]
fn to_dfschema(self) -> Result<DFSchema>;

/// Attempt to create a DSSchemaRef
#[allow(clippy::wrong_self_convention)]
fn to_dfschema_ref(self) -> Result<DFSchemaRef> {
Ok(Arc::new(self.to_dfschema()?))
}
Expand Down
3 changes: 1 addition & 2 deletions rust/datafusion/src/logical_plan/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1137,11 +1137,10 @@ mod tests {
}

#[test]
fn case_when_different_literal_then_types() -> Result<()> {
fn case_when_different_literal_then_types() {
let maybe_expr = when(col("state").eq(lit("CO")), lit(303))
.when(col("state").eq(lit("NY")), lit("212"))
.end();
assert!(maybe_expr.is_err());
Ok(())
}
}
5 changes: 1 addition & 4 deletions rust/datafusion/src/logical_plan/operators.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,11 +111,10 @@ impl ops::Div for Expr {

#[cfg(test)]
mod tests {
use crate::error::Result;
use crate::prelude::lit;

#[test]
fn test_operators() -> Result<()> {
fn test_operators() {
assert_eq!(
format!("{:?}", lit(1u32) + lit(2u32)),
"UInt32(1) Plus UInt32(2)"
Expand All @@ -132,7 +131,5 @@ mod tests {
format!("{:?}", lit(1u32) / lit(2u32)),
"UInt32(1) Divide UInt32(2)"
);

Ok(())
}
}
4 changes: 1 addition & 3 deletions rust/datafusion/src/optimizer/hash_build_probe_order.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ mod tests {
}

#[test]
fn test_swap_order() -> Result<()> {
fn test_swap_order() {
let lp_left = LogicalPlan::TableScan {
table_name: "left".to_string(),
projection: None,
Expand All @@ -245,7 +245,5 @@ mod tests {

assert!(should_swap_join_order(&lp_left, &lp_right));
assert!(!should_swap_join_order(&lp_right, &lp_left));

Ok(())
}
}
6 changes: 2 additions & 4 deletions rust/datafusion/src/physical_plan/aggregates.rs
Original file line number Diff line number Diff line change
Expand Up @@ -205,10 +205,9 @@ mod tests {
}

#[test]
fn test_sum_no_utf8() -> Result<()> {
fn test_sum_no_utf8() {
let observed = return_type(&AggregateFunction::Sum, &[DataType::Utf8]);
assert!(observed.is_err());
Ok(())
}

#[test]
Expand Down Expand Up @@ -239,9 +238,8 @@ mod tests {
}

#[test]
fn test_avg_no_utf8() -> Result<()> {
fn test_avg_no_utf8() {
let observed = return_type(&AggregateFunction::Avg, &[DataType::Utf8]);
assert!(observed.is_err());
Ok(())
}
}
Loading

0 comments on commit 356c300

Please sign in to comment.