Skip to content

Commit

Permalink
Clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
Brent Gardner committed Aug 2, 2022
1 parent 50f4205 commit 9783099
Show file tree
Hide file tree
Showing 14 changed files with 46 additions and 48 deletions.
10 changes: 4 additions & 6 deletions datafusion/core/src/avro_to_arrow/arrow_array_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,12 +101,10 @@ impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
"Failed to parse avro value: {:?}",
e
))),
other => {
return Err(ArrowError::ParseError(format!(
"Row needs to be of type object, got: {:?}",
other
)))
}
other => Err(ArrowError::ParseError(format!(
"Row needs to be of type object, got: {:?}",
other
))),
})
.collect::<ArrowResult<Vec<Vec<(String, Value)>>>>()?;
if rows.is_empty() {
Expand Down
6 changes: 3 additions & 3 deletions datafusion/core/src/datasource/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -652,16 +652,16 @@ mod tests {

async fn put_multipart(
&self,
location: &Path,
_location: &Path,
) -> object_store::Result<(MultipartId, Box<dyn AsyncWrite + Unpin + Send>)>
{
Err(object_store::Error::NotImplemented)
}

async fn abort_multipart(
&self,
location: &Path,
multipart_id: &MultipartId,
_location: &Path,
_multipart_id: &MultipartId,
) -> object_store::Result<()> {
Err(object_store::Error::NotImplemented)
}
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_plan/repartition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -933,7 +933,7 @@ mod tests {
let items_set: HashSet<&str> = items_vec.iter().copied().collect();
assert_eq!(items_vec.len(), items_set.len());
let source_str_set: HashSet<&str> =
(&["foo", "bar", "frob", "baz", "goo", "gar", "grob", "gaz"])
["foo", "bar", "frob", "baz", "goo", "gar", "grob", "gaz"]
.iter()
.copied()
.collect();
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/scheduler/plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use crate::scheduler::pipeline::{
};

/// Identifies the [`Pipeline`] within the [`PipelinePlan`] to route output to
#[derive(Debug, Clone, Copy, PartialEq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct OutputLink {
/// The index of the [`Pipeline`] in [`PipelinePlan`] to route output to
pub pipeline: usize,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/scheduler/task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ impl Task {
let partition = self.waker.partition;

let waker = futures::task::waker_ref(&self.waker);
let mut cx = Context::from_waker(&*waker);
let mut cx = Context::from_waker(&waker);

let pipelines = &self.context.pipelines;
let routable = &pipelines[node];
Expand Down
20 changes: 19 additions & 1 deletion datafusion/core/tests/path_partition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,10 @@ use datafusion::{
use datafusion_common::ScalarValue;
use futures::stream::BoxStream;
use futures::{stream, StreamExt};
use object_store::{path::Path, GetResult, ListResult, ObjectMeta, ObjectStore};
use object_store::{
path::Path, GetResult, ListResult, MultipartId, ObjectMeta, ObjectStore,
};
use tokio::io::AsyncWrite;

#[tokio::test]
async fn parquet_distinct_partition_col() -> Result<()> {
Expand Down Expand Up @@ -516,6 +519,21 @@ impl ObjectStore for MirroringObjectStore {
unimplemented!()
}

async fn put_multipart(
&self,
_location: &Path,
) -> object_store::Result<(MultipartId, Box<dyn AsyncWrite + Unpin + Send>)> {
unimplemented!()
}

async fn abort_multipart(
&self,
_location: &Path,
_multipart_id: &MultipartId,
) -> object_store::Result<()> {
unimplemented!()
}

async fn get(&self, location: &Path) -> object_store::Result<GetResult> {
self.files.iter().find(|x| *x == location.as_ref()).unwrap();
let path = std::path::PathBuf::from(&self.mirrored_file);
Expand Down
10 changes: 4 additions & 6 deletions datafusion/expr/src/binary_rule.rs
Original file line number Diff line number Diff line change
Expand Up @@ -399,12 +399,10 @@ pub fn is_signed_numeric(dt: &DataType) -> bool {
/// Determine if a DataType is numeric or not
pub fn is_numeric(dt: &DataType) -> bool {
is_signed_numeric(dt)
|| match dt {
DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 => {
true
}
_ => false,
}
|| matches!(
dt,
DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64
)
}

/// Determine if at least of one of lhs and rhs is numeric, and the other must be NULL or numeric
Expand Down
6 changes: 3 additions & 3 deletions datafusion/optimizer/src/decorrelate_scalar_subquery.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ impl DecorrelateScalarSubquery {
_ => return Ok(()),
};
let subquery =
self.optimize(&*subquery.subquery, optimizer_config)?;
self.optimize(&subquery.subquery, optimizer_config)?;
let subquery = Arc::new(subquery);
let subquery = Subquery { subquery };
let res = SubqueryInfo::new(subquery, expr, *op, lhs);
Expand Down Expand Up @@ -163,7 +163,7 @@ fn optimize_scalar(
"optimizing:\n{}",
query_info.query.subquery.display_indent()
);
let proj = Projection::try_from_plan(&*query_info.query.subquery)
let proj = Projection::try_from_plan(&query_info.query.subquery)
.map_err(|e| context!("scalar subqueries must have a projection", e))?;
let proj = only_or_err(proj.expr.as_slice())
.map_err(|e| context!("exactly one expression should be projected", e))?;
Expand All @@ -173,7 +173,7 @@ fn optimize_scalar(
.map_err(|e| context!("Exactly one input is expected. Is this a join?", e))?;
let aggr = Aggregate::try_from_plan(sub_input)
.map_err(|e| context!("scalar subqueries must aggregate a value", e))?;
let filter = Filter::try_from_plan(&*aggr.input).map_err(|e| {
let filter = Filter::try_from_plan(&aggr.input).map_err(|e| {
context!("scalar subqueries must have a filter to be correlated", e)
})?;

Expand Down
3 changes: 1 addition & 2 deletions datafusion/optimizer/src/decorrelate_where_exists.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,7 @@ impl DecorrelateWhereExists {
for it in filters.iter() {
match it {
Expr::Exists { subquery, negated } => {
let subquery =
self.optimize(&*subquery.subquery, optimizer_config)?;
let subquery = self.optimize(&subquery.subquery, optimizer_config)?;
let subquery = Arc::new(subquery);
let subquery = Subquery { subquery };
let subquery = SubqueryInfo::new(subquery.clone(), *negated);
Expand Down
5 changes: 2 additions & 3 deletions datafusion/optimizer/src/decorrelate_where_in.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,7 @@ impl DecorrelateWhereIn {
subquery,
negated,
} => {
let subquery =
self.optimize(&*subquery.subquery, optimizer_config)?;
let subquery = self.optimize(&subquery.subquery, optimizer_config)?;
let subquery = Arc::new(subquery);
let subquery = Subquery { subquery };
let subquery =
Expand Down Expand Up @@ -132,7 +131,7 @@ fn optimize_where_in(
outer_other_exprs: &[Expr],
optimizer_config: &mut OptimizerConfig,
) -> datafusion_common::Result<LogicalPlan> {
let proj = Projection::try_from_plan(&*query_info.query.subquery)
let proj = Projection::try_from_plan(&query_info.query.subquery)
.map_err(|e| context!("a projection is required", e))?;
let mut subqry_input = proj.input.clone();
let proj = only_or_err(proj.expr.as_slice())
Expand Down
12 changes: 2 additions & 10 deletions datafusion/optimizer/src/simplify_expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,15 +159,7 @@ fn is_false(expr: &Expr) -> bool {

/// returns true if `haystack` looks like (needle OP X) or (X OP needle)
fn is_op_with(target_op: Operator, haystack: &Expr, needle: &Expr) -> bool {
match haystack {
Expr::BinaryExpr { left, op, right }
if op == &target_op
&& (needle == left.as_ref() || needle == right.as_ref()) =>
{
true
}
_ => false,
}
matches!(haystack, Expr::BinaryExpr { left, op, right } if op == &target_op && (needle == left.as_ref() || needle == right.as_ref()))
}

/// returns the contained boolean value in `expr` as
Expand Down Expand Up @@ -1903,7 +1895,7 @@ mod tests {
let optimized_plan = rule
.optimize(plan, &mut config)
.expect("failed to optimize plan");
return format!("{:?}", optimized_plan);
format!("{:?}", optimized_plan)
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-expr/src/type_coercion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ mod tests {
Schema::new(
t.iter()
.enumerate()
.map(|(i, t)| Field::new(&*format!("c{}", i), t.clone(), true))
.map(|(i, t)| Field::new(&format!("c{}", i), t.clone(), true))
.collect(),
)
};
Expand Down
2 changes: 1 addition & 1 deletion datafusion/proto/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ mod roundtrip_tests {
pub expr: ::core::option::Option<crate::protobuf::LogicalExprNode>,
}

#[derive(Clone, PartialEq, ::prost::Message)]
#[derive(Clone, PartialEq, Eq, ::prost::Message)]
pub struct TopKExecProto {
#[prost(uint64, tag = "1")]
pub k: u64,
Expand Down
12 changes: 3 additions & 9 deletions datafusion/sql/src/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -368,9 +368,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let table_ref: TableReference = table_name.as_str().into();

// check if table_name exists
if let Err(e) = self.schema_provider.get_table_provider(table_ref) {
return Err(e);
}
let _ = self.schema_provider.get_table_provider(table_ref)?;

if self.has_table("information_schema", "tables") {
let sql = format!("SELECT column_name, data_type, is_nullable \
Expand Down Expand Up @@ -2270,9 +2268,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let table_name = normalize_sql_object_name(sql_table_name);
let table_ref: TableReference = table_name.as_str().into();

if let Err(e) = self.schema_provider.get_table_provider(table_ref) {
return Err(e);
}
let _ = self.schema_provider.get_table_provider(table_ref)?;

// Figure out the where clause
let columns = vec!["table_name", "table_schema", "table_catalog"].into_iter();
Expand Down Expand Up @@ -2317,9 +2313,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let table_name = normalize_sql_object_name(sql_table_name);
let table_ref: TableReference = table_name.as_str().into();

if let Err(e) = self.schema_provider.get_table_provider(table_ref) {
return Err(e);
}
let _ = self.schema_provider.get_table_provider(table_ref)?;

// Figure out the where clause
let columns = vec!["table_name", "table_schema", "table_catalog"].into_iter();
Expand Down

0 comments on commit 9783099

Please sign in to comment.