From 2b5c63a098802a21967b47147f1f37d239208671 Mon Sep 17 00:00:00 2001 From: LJ Date: Thu, 20 Mar 2025 10:04:34 -0700 Subject: [PATCH] Fix more clippy warnings. --- src/base/value.rs | 14 +++++++------- src/builder/analyzer.rs | 2 +- src/builder/flow_builder.rs | 15 ++++++++++++--- src/execution/evaluator.rs | 4 ++-- src/execution/indexer.rs | 4 ++-- src/execution/memoization.rs | 6 +++--- src/ops/factory_bases.rs | 12 ++++++------ src/ops/storages/postgres.rs | 14 ++++++-------- src/py/convert.rs | 2 +- src/setup/states.rs | 27 +++++++++++++-------------- src/utils/fingerprint.rs | 4 ++-- 11 files changed, 55 insertions(+), 49 deletions(-) diff --git a/src/base/value.rs b/src/base/value.rs index 922fd6c95..1addfc384 100644 --- a/src/base/value.rs +++ b/src/base/value.rs @@ -334,7 +334,7 @@ impl> From> for BasicValue { } impl BasicValue { - pub fn to_key(self) -> Result { + pub fn into_key(self) -> Result { let result = match self { BasicValue::Bytes(v) => KeyValue::Bytes(v), BasicValue::Str(v) => KeyValue::Str(v), @@ -473,13 +473,13 @@ impl Value { matches!(self, Value::Null) } - pub fn to_key(self) -> Result { + pub fn into_key(self) -> Result { let result = match self { - Value::Basic(v) => v.to_key()?, + Value::Basic(v) => v.into_key()?, Value::Struct(v) => KeyValue::Struct( v.fields .into_iter() - .map(|v| v.to_key()) + .map(|v| v.into_key()) .collect::>>()?, ), Value::Null | Value::Collection(_) | Value::Table(_) | Value::List(_) => { @@ -661,7 +661,7 @@ where }) } - pub fn from_json<'a>(value: serde_json::Value, fields_schema: &[FieldSchema]) -> Result { + pub fn from_json(value: serde_json::Value, fields_schema: &[FieldSchema]) -> Result { match value { serde_json::Value::Array(v) => { if v.len() != fields_schema.len() { @@ -821,7 +821,7 @@ where })?, &key_field.value_type.typ, )? - .to_key()?; + .into_key()?; let values = FieldValues::from_json_values( fields_iter.zip(field_vals_iter), )?; @@ -839,7 +839,7 @@ where )?), &key_field.value_type.typ, )? - .to_key()?; + .into_key()?; let values = FieldValues::from_json_object(v, fields_iter)?; Ok((key, values.into())) } diff --git a/src/builder/analyzer.rs b/src/builder/analyzer.rs index f7eb02c8b..ab244a3b2 100644 --- a/src/builder/analyzer.rs +++ b/src/builder/analyzer.rs @@ -1,6 +1,6 @@ use std::collections::{BTreeMap, HashSet}; use std::sync::Mutex; -use std::{collections::HashMap, future::Future, pin::Pin, sync::Arc, u32}; +use std::{collections::HashMap, future::Future, pin::Pin, sync::Arc}; use super::plan::*; use crate::execution::db_tracking_setup; diff --git a/src/builder/flow_builder.rs b/src/builder/flow_builder.rs index e1261a2d1..054b58b13 100644 --- a/src/builder/flow_builder.rs +++ b/src/builder/flow_builder.rs @@ -408,9 +408,11 @@ impl FlowBuilder { flow_ctx: &self.flow_inst_context, }; let mut root_data_scope = self.root_data_scope.lock().unwrap(); - let _ = analyzer_ctx + + let analyzed = analyzer_ctx .analyze_source_op(&mut root_data_scope, source_op.clone(), None, None) .into_py_result()?; + std::mem::drop(analyzed); let result = Self::last_field_to_data_slice(&root_data_scope, self.root_data_scope_ref.clone()) @@ -498,7 +500,10 @@ impl FlowBuilder { op: spec, }), }; - let _ = analyzer_ctx.analyze_reactive_op(scope, &reactive_op, parent_scopes)?; + + let analyzed = + analyzer_ctx.analyze_reactive_op(scope, &reactive_op, parent_scopes)?; + std::mem::drop(analyzed); reactive_ops.push(reactive_op); let result = Self::last_field_to_data_slice(scope.data, common_scope.clone()) @@ -537,7 +542,11 @@ impl FlowBuilder { collector_name: collector.name.clone(), }), }; - let _ = analyzer_ctx.analyze_reactive_op(scope, &reactive_op, parent_scopes)?; + + let analyzed = + analyzer_ctx.analyze_reactive_op(scope, &reactive_op, parent_scopes)?; + std::mem::drop(analyzed); + reactive_ops.push(reactive_op); Ok(()) }, diff --git a/src/execution/evaluator.rs b/src/execution/evaluator.rs index 7d02535cb..a26d67bb1 100644 --- a/src/execution/evaluator.rs +++ b/src/execution/evaluator.rs @@ -325,7 +325,7 @@ async fn evaluate_op_scope( .fingerprinter .clone() .with(&input_values)? - .to_fingerprint(); + .into_fingerprint(); Some(cache.get( key, &op.function_exec_info.output_type, @@ -426,7 +426,7 @@ async fn evaluate_op_scope( Ok(()) } -pub async fn evaluate_source_entry<'a>( +pub async fn evaluate_source_entry( plan: &ExecutionPlan, source_op_idx: usize, schema: &schema::DataSchema, diff --git a/src/execution/indexer.rs b/src/execution/indexer.rs index 73968dddf..199a1c215 100644 --- a/src/execution/indexer.rs +++ b/src/execution/indexer.rs @@ -222,7 +222,7 @@ async fn precommit_source_tracking_info( let curr_fp = Some( Fingerprinter::default() .with(&field_values)? - .to_fingerprint(), + .into_fingerprint(), ); let existing_target_keys = target_info.existing_keys_info.remove(&primary_key_json); @@ -437,7 +437,7 @@ pub async fn evaluation_cache_on_existing_data( )) } -pub async fn update_source_entry<'a>( +pub async fn update_source_entry( plan: &ExecutionPlan, source_op_idx: usize, schema: &schema::DataSchema, diff --git a/src/execution/memoization.rs b/src/execution/memoization.rs index 58bfb2fb4..b32b6f593 100644 --- a/src/execution/memoization.rs +++ b/src/execution/memoization.rs @@ -133,10 +133,10 @@ impl EvaluationCache { } } -pub async fn evaluate_with_cell<'a, Fut>( - cell: Option<&'a CacheEntryCell>, +pub async fn evaluate_with_cell( + cell: Option<&CacheEntryCell>, compute: impl FnOnce() -> Fut, -) -> Result> +) -> Result> where Fut: Future>, { diff --git a/src/ops/factory_bases.rs b/src/ops/factory_bases.rs index f44d78f1d..28cbea6e7 100644 --- a/src/ops/factory_bases.rs +++ b/src/ops/factory_bases.rs @@ -26,8 +26,8 @@ pub struct ResolvedOpArg { pub trait ResolvedOpArgExt: Sized { fn expect_type(self, expected_type: &ValueType) -> Result; - fn value<'a>(&self, args: &'a Vec) -> Result<&'a value::Value>; - fn take_value(&self, args: &mut Vec) -> Result; + fn value<'a>(&self, args: &'a [value::Value]) -> Result<&'a value::Value>; + fn take_value(&self, args: &mut [value::Value]) -> Result; } impl ResolvedOpArgExt for ResolvedOpArg { @@ -43,7 +43,7 @@ impl ResolvedOpArgExt for ResolvedOpArg { Ok(self) } - fn value<'a>(&self, args: &'a Vec) -> Result<&'a value::Value> { + fn value<'a>(&self, args: &'a [value::Value]) -> Result<&'a value::Value> { if self.idx >= args.len() { api_bail!( "Two few arguments, {} provided, expected at least {} for `{}`", @@ -55,7 +55,7 @@ impl ResolvedOpArgExt for ResolvedOpArg { Ok(&args[self.idx]) } - fn take_value(&self, args: &mut Vec) -> Result { + fn take_value(&self, args: &mut [value::Value]) -> Result { if self.idx >= args.len() { api_bail!( "Two few arguments, {} provided, expected at least {} for `{}`", @@ -73,7 +73,7 @@ impl ResolvedOpArgExt for Option { self.map(|arg| arg.expect_type(expected_type)).transpose() } - fn value<'a>(&self, args: &'a Vec) -> Result<&'a value::Value> { + fn value<'a>(&self, args: &'a [value::Value]) -> Result<&'a value::Value> { Ok(self .as_ref() .map(|arg| arg.value(args)) @@ -81,7 +81,7 @@ impl ResolvedOpArgExt for Option { .unwrap_or(&value::Value::Null)) } - fn take_value(&self, args: &mut Vec) -> Result { + fn take_value(&self, args: &mut [value::Value]) -> Result { Ok(self .as_ref() .map(|arg| arg.take_value(args)) diff --git a/src/ops/storages/postgres.rs b/src/ops/storages/postgres.rs index 7570adea5..dcedb5cc1 100644 --- a/src/ops/storages/postgres.rs +++ b/src/ops/storages/postgres.rs @@ -46,12 +46,10 @@ fn key_value_fields_iter<'a>( fn convertible_to_pgvector(vec_schema: &VectorTypeSchema) -> bool { if vec_schema.dimension.is_some() { - match &*vec_schema.element_type { - BasicValueType::Float32 => true, - BasicValueType::Float64 => true, - BasicValueType::Int64 => true, - _ => false, - } + matches!( + *vec_schema.element_type, + BasicValueType::Float32 | BasicValueType::Float64 | BasicValueType::Int64 + ) } else { false } @@ -468,8 +466,8 @@ pub struct SetupState { impl SetupState { fn new( table_id: &TableId, - key_fields_schema: &Vec, - value_fields_schema: &Vec, + key_fields_schema: &[FieldSchema], + value_fields_schema: &[FieldSchema], index_options: &IndexOptions, ) -> Self { Self { diff --git a/src/py/convert.rs b/src/py/convert.rs index c1a5174d8..84d009695 100644 --- a/src/py/convert.rs +++ b/src/py/convert.rs @@ -199,7 +199,7 @@ pub fn value_from_py_object<'py>( .into_iter() .map(|v| { let mut iter = v.fields.into_iter(); - let key = iter.next().unwrap().to_key().into_py_result()?; + let key = iter.next().unwrap().into_key().into_py_result()?; Ok(( key, value::ScopeValue(value::FieldValues { diff --git a/src/setup/states.rs b/src/setup/states.rs index 173637230..7dce70f04 100644 --- a/src/setup/states.rs +++ b/src/setup/states.rs @@ -1,3 +1,16 @@ +/// Concepts: +/// - Resource: some setup that needs to be tracked and maintained. +/// - Setup State: current state of a resource. +/// - Staging Change: states changes that may not be really applied yet. +/// - Combined Setup State: Setup State + Staging Change. +/// - Status Check: information about changes that are being applied / need to be applied. +/// +/// Resource hierarchy: +/// - [resource: setup metadata table] /// - Flow +/// - [resource: metadata] +/// - [resource: tracking table] +/// - Target +/// - [resource: target-specific stuff] use anyhow::Result; use axum::async_trait; use indenter::indented; @@ -15,20 +28,6 @@ use crate::execution::db_tracking_setup; const INDENT: &str = " "; -/// Concepts: -/// - Resource: some setup that needs to be tracked and maintained. -/// - Setup State: current state of a resource. -/// - Staging Change: states changes that may not be really applied yet. -/// - Combined Setup State: Setup State + Staging Change. -/// - Status Check: information about changes that are being applied / need to be applied. -/// -/// Resource hierarchy: -/// - [resource: setup metadata table] /// - Flow -/// - [resource: metadata] -/// - [resource: tracking table] -/// - Target -/// - [resource: target-specific stuff] - pub trait StateMode: Clone + Copy { type State: Debug + Clone; type DefaultState: Debug + Clone + Default; diff --git a/src/utils/fingerprint.rs b/src/utils/fingerprint.rs index 5efd6e4fe..ba5b42d7c 100644 --- a/src/utils/fingerprint.rs +++ b/src/utils/fingerprint.rs @@ -34,7 +34,7 @@ impl serde::ser::Error for FingerprinterError { pub struct Fingerprint([u8; 16]); impl Fingerprint { - pub fn to_base64(&self) -> String { + pub fn to_base64(self) -> String { BASE64_STANDARD.encode(self.0) } @@ -77,7 +77,7 @@ pub struct Fingerprinter { } impl Fingerprinter { - pub fn to_fingerprint(self) -> Fingerprint { + pub fn into_fingerprint(self) -> Fingerprint { Fingerprint(self.hasher.finalize().into()) }