Skip to content

Commit

Permalink
chore: clippy fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Solomon committed Jan 2, 2024
1 parent 8053fb2 commit 2d49b02
Show file tree
Hide file tree
Showing 23 changed files with 132 additions and 123 deletions.
2 changes: 1 addition & 1 deletion dozer-cli/src/cli/helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ async fn load_config(
ignore_pipe: bool,
) -> Result<(Config, Vec<String>), CliError> {
let read_stdin = atty::isnt(Stream::Stdin) && !ignore_pipe;
let first_config_path = config_url_or_paths.get(0);
let first_config_path = config_url_or_paths.first();
match first_config_path {
None => Err(ConfigurationFilePathNotProvided),
Some(path) => {
Expand Down
9 changes: 8 additions & 1 deletion dozer-cli/src/simple/orchestrator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,14 @@ impl SimpleOrchestrator {
match sink {
SinkConfig::Snowflake(config) => {
#[cfg(not(feature = "snowflake"))]
panic!("Dozer must be compiled with the \"snowflake\" feature to run the Snowflake sink");
{
let _ = shutdown;
let _ = config;
let _ = app_server_url;
futures.push(futures::future::ready(Ok::<(), OrchestrationError>(())));

panic!("Dozer must be compiled with the \"snowflake\" feature to run the Snowflake sink");
}
#[cfg(feature = "snowflake")]
{
let mut sink = dozer_sinks::snowflake::SnowflakeSink::new(
Expand Down
2 changes: 1 addition & 1 deletion dozer-ingestion/ethereum/src/log/connector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ impl EthLogConnector {
.map(|t| vec![H256::from_str(t).unwrap()])
.collect();
builder.topics(
topics.get(0).cloned(),
topics.first().cloned(),
topics.get(1).cloned(),
topics.get(2).cloned(),
topics.get(3).cloned(),
Expand Down
2 changes: 1 addition & 1 deletion dozer-ingestion/ethereum/src/log/helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ pub fn decode_event(
// Topics 0, 1, 2 should be name, buyer, seller in most cases
let name = log
.topics
.get(0)
.first()
.expect("name is expected")
.to_owned()
.to_string();
Expand Down
4 changes: 2 additions & 2 deletions dozer-ingestion/kafka/src/debezium/mapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ mod tests {
fields_map.insert("weight".to_string(), weight_struct);

let fields = convert_value_to_schema(value, &schema, &fields_map).unwrap();
assert_eq!(*fields.get(0).unwrap(), Field::from(1));
assert_eq!(*fields.first().unwrap(), Field::from(1));
assert_eq!(*fields.get(1).unwrap(), Field::from("Product".to_string()));
assert_eq!(
*fields.get(2).unwrap(),
Expand Down Expand Up @@ -440,7 +440,7 @@ mod tests {
fields_map.insert("name".to_string(), name_struct);

let fields = convert_value_to_schema(value, &schema, &fields_map).unwrap();
assert_eq!(*fields.get(0).unwrap(), Field::from(1));
assert_eq!(*fields.first().unwrap(), Field::from(1));
assert_eq!(*fields.get(1).unwrap(), Field::Null);
}
}
2 changes: 1 addition & 1 deletion dozer-ingestion/kafka/src/debezium/schema_registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ impl SchemaRegistry {
let sr_settings = SrSettings::new(schema_registry_url);
match table_names {
None => Ok(vec![]),
Some(tables) => match tables.get(0) {
Some(tables) => match tables.first() {
None => Ok(vec![]),
Some(table) => {
let key_result =
Expand Down
66 changes: 33 additions & 33 deletions dozer-ingestion/mysql/src/binlog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1016,6 +1016,39 @@ impl<'a> BinlogRowsEvent<'a> {
}
}

trait ByteSliceExt {
fn trim_start(&self) -> &[u8];
fn starts_with_case_insensitive(&self, prefix: &[u8]) -> bool;
}

impl ByteSliceExt for [u8] {
fn trim_start(&self) -> &[u8] {
for i in 0..self.len() {
if !self[i].is_ascii_whitespace() {
return &self[i..];
}
}
&[]
}

fn starts_with_case_insensitive(&self, prefix: &[u8]) -> bool {
if self.len() < prefix.len() {
false
} else {
self[..prefix.len()].eq_ignore_ascii_case(prefix)
}
}
}

fn object_name_to_string(object_name: &sqlparser::ast::ObjectName) -> String {
object_name
.0
.iter()
.map(|ident| ident.value.as_str())
.collect::<Vec<_>>()
.join(".")
}

#[cfg(test)]
mod tests {

Expand Down Expand Up @@ -1186,36 +1219,3 @@ mod tests {
);
}
}

trait ByteSliceExt {
fn trim_start(&self) -> &[u8];
fn starts_with_case_insensitive(&self, prefix: &[u8]) -> bool;
}

impl ByteSliceExt for [u8] {
fn trim_start(&self) -> &[u8] {
for i in 0..self.len() {
if !self[i].is_ascii_whitespace() {
return &self[i..];
}
}
&[]
}

fn starts_with_case_insensitive(&self, prefix: &[u8]) -> bool {
if self.len() < prefix.len() {
false
} else {
self[..prefix.len()].eq_ignore_ascii_case(prefix)
}
}
}

fn object_name_to_string(object_name: &sqlparser::ast::ObjectName) -> String {
object_name
.0
.iter()
.map(|ident| ident.value.as_str())
.collect::<Vec<_>>()
.join(".")
}
14 changes: 7 additions & 7 deletions dozer-ingestion/object-store/src/tests/local_storage_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ async fn test_get_schema_of_parquet() {

let connector = ObjectStoreConnector::new(local_storage);
let (_, schemas) = connector.list_all_schemas().await.unwrap();
let schema = schemas.get(0).unwrap();
let schema = schemas.first().unwrap();

let fields = schema.schema.fields.clone();
assert_eq!(fields.get(0).unwrap().typ, FieldType::Int);
assert_eq!(fields.first().unwrap().typ, FieldType::Int);
assert_eq!(fields.get(1).unwrap().typ, FieldType::Boolean);
assert_eq!(fields.get(2).unwrap().typ, FieldType::Int);
assert_eq!(fields.get(3).unwrap().typ, FieldType::Int);
Expand All @@ -45,10 +45,10 @@ async fn test_get_schema_of_csv() {

let connector = ObjectStoreConnector::new(local_storage);
let (_, schemas) = connector.list_all_schemas().await.unwrap();
let schema = schemas.get(0).unwrap();
let schema = schemas.first().unwrap();

let fields = schema.schema.fields.clone();
assert_eq!(fields.get(0).unwrap().typ, FieldType::Int);
assert_eq!(fields.first().unwrap().typ, FieldType::Int);
assert_eq!(fields.get(1).unwrap().typ, FieldType::String);
assert_eq!(fields.get(2).unwrap().typ, FieldType::String);
assert_eq!(fields.get(3).unwrap().typ, FieldType::Int);
Expand Down Expand Up @@ -213,7 +213,7 @@ fn test_csv_read() {
test_type_conversion!(values, 7, Field::String(_));
test_type_conversion!(values, 8, Field::String(_));

if let Field::Int(id) = values.get(0).unwrap() {
if let Field::Int(id) = values.first().unwrap() {
if *id == 2 || *id == 12 {
test_type_conversion!(values, 9, Field::Float(_));
} else {
Expand Down Expand Up @@ -269,7 +269,7 @@ fn test_csv_read_marker() {
test_type_conversion!(values, 7, Field::String(_));
test_type_conversion!(values, 8, Field::String(_));

if let Field::Int(id) = values.get(0).unwrap() {
if let Field::Int(id) = values.first().unwrap() {
if *id == 2 || *id == 12 {
test_type_conversion!(values, 9, Field::Float(_));
} else {
Expand Down Expand Up @@ -325,7 +325,7 @@ fn test_csv_read_only_one_marker() {
test_type_conversion!(values, 7, Field::String(_));
test_type_conversion!(values, 8, Field::String(_));

if let Field::Int(id) = values.get(0).unwrap() {
if let Field::Int(id) = values.first().unwrap() {
if *id == 2 || *id == 12 {
test_type_conversion!(values, 9, Field::Float(_));
} else {
Expand Down
4 changes: 2 additions & 2 deletions dozer-ingestion/postgres/src/replication_slot_helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl ReplicationSlotHelper {
.map_err(PostgresConnectorError::FetchReplicationSlotError)?;

Ok(matches!(
slot_query_row.get(0),
slot_query_row.first(),
Some(SimpleQueryMessage::Row(_))
))
}
Expand All @@ -75,7 +75,7 @@ impl ReplicationSlotHelper {
.await
.map_err(PostgresConnectorError::FetchReplicationSlotError)?;

let column_index = if let Some(SimpleQueryMessage::Row(row)) = slots.get(0) {
let column_index = if let Some(SimpleQueryMessage::Row(row)) = slots.first() {
row.columns().iter().position(|c| c.name() == "slot_name")
} else {
None
Expand Down
44 changes: 22 additions & 22 deletions dozer-ingestion/postgres/src/schema/sorter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,11 +127,11 @@ mod tests {
];

let result = sort_fields(&postgres_table, &expected_order).unwrap();
assert_eq!(result.get(0).unwrap().0.name, "first field");
assert_eq!(result.first().unwrap().0.name, "first field");
assert_eq!(result.get(1).unwrap().0.name, "second field");
assert_eq!(result.get(2).unwrap().0.name, "third field");

assert!(result.get(0).unwrap().1);
assert!(result.first().unwrap().1);
assert!(!result.get(1).unwrap().1);
assert!(!result.get(2).unwrap().1);
}
Expand All @@ -153,28 +153,28 @@ mod tests {

let result = sort_schemas(expected_table_order, &mapped_tables).unwrap();
assert_eq!(
result.get(0).unwrap().1.fields().get(0).unwrap().name,
result.first().unwrap().1.fields().first().unwrap().name,
postgres_table.get_field(0).unwrap().name
);
assert_eq!(
result.get(0).unwrap().1.fields().get(1).unwrap().name,
result.first().unwrap().1.fields().get(1).unwrap().name,
postgres_table.get_field(1).unwrap().name
);
assert_eq!(
result.get(0).unwrap().1.fields().get(2).unwrap().name,
result.first().unwrap().1.fields().get(2).unwrap().name,
postgres_table.get_field(2).unwrap().name
);

assert_eq!(
result.get(0).unwrap().1.is_index_field(0),
result.first().unwrap().1.is_index_field(0),
postgres_table.is_index_field(0)
);
assert_eq!(
result.get(0).unwrap().1.is_index_field(1),
result.first().unwrap().1.is_index_field(1),
postgres_table.is_index_field(1)
);
assert_eq!(
result.get(0).unwrap().1.is_index_field(2),
result.first().unwrap().1.is_index_field(2),
postgres_table.is_index_field(2)
);
}
Expand All @@ -197,10 +197,10 @@ mod tests {

let result = sort_schemas(expected_table_order, &mapped_tables).unwrap();
assert_eq!(
&result.get(0).unwrap().1.fields().get(0).unwrap().name,
columns_order.get(0).unwrap()
&result.first().unwrap().1.fields().first().unwrap().name,
columns_order.first().unwrap()
);
assert_eq!(result.get(0).unwrap().1.fields().len(), 1);
assert_eq!(result.first().unwrap().1.fields().len(), 1);
}

#[test]
Expand All @@ -225,18 +225,18 @@ mod tests {

let result = sort_schemas(expected_table_order, &mapped_tables).unwrap();
assert_eq!(
&result.get(0).unwrap().1.fields().get(0).unwrap().name,
columns_order.get(0).unwrap()
&result.first().unwrap().1.fields().first().unwrap().name,
columns_order.first().unwrap()
);
assert_eq!(
&result.get(0).unwrap().1.fields().get(1).unwrap().name,
&result.first().unwrap().1.fields().get(1).unwrap().name,
columns_order.get(1).unwrap()
);
assert_eq!(
&result.get(0).unwrap().1.fields().get(2).unwrap().name,
&result.first().unwrap().1.fields().get(2).unwrap().name,
columns_order.get(2).unwrap()
);
assert_eq!(result.get(0).unwrap().1.fields().len(), 3);
assert_eq!(result.first().unwrap().1.fields().len(), 3);
}

#[test]
Expand Down Expand Up @@ -277,20 +277,20 @@ mod tests {
];

let result = sort_schemas(expected_table_order, &mapped_tables).unwrap();
let first_table_after_sort = result.get(0).unwrap();
let first_table_after_sort = result.first().unwrap();
let second_table_after_sort = result.get(1).unwrap();

assert_eq!(
first_table_after_sort.0 .1,
expected_table_order.get(0).unwrap().name
expected_table_order.first().unwrap().name
);
assert_eq!(
second_table_after_sort.0 .1,
expected_table_order.get(1).unwrap().name
);
assert_eq!(
&first_table_after_sort.1.fields().get(0).unwrap().name,
columns_order_1.get(0).unwrap()
&first_table_after_sort.1.fields().first().unwrap().name,
columns_order_1.first().unwrap()
);
assert_eq!(
&first_table_after_sort.1.fields().get(1).unwrap().name,
Expand All @@ -301,8 +301,8 @@ mod tests {
columns_order_1.get(2).unwrap()
);
assert_eq!(
&second_table_after_sort.1.fields().get(0).unwrap().name,
columns_order_2.get(0).unwrap()
&second_table_after_sort.1.fields().first().unwrap().name,
columns_order_2.first().unwrap()
);
assert_eq!(
&second_table_after_sort.1.fields().get(1).unwrap().name,
Expand Down
6 changes: 3 additions & 3 deletions dozer-ingestion/postgres/src/schema/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ async fn test_connector_get_tables() {
let schema_helper = SchemaHelper::new(client.postgres_config.clone(), None);
let result = schema_helper.get_tables(None).await.unwrap();

let table = result.get(0).unwrap();
let table = result.first().unwrap();
assert_eq!(table_name, table.name);
assert!(assert_vec_eq(
&[
Expand Down Expand Up @@ -76,7 +76,7 @@ async fn test_connector_get_schema_with_selected_columns() {
};
let result = schema_helper.get_tables(Some(&[table_info])).await.unwrap();

let table = result.get(0).unwrap();
let table = result.first().unwrap();
assert_eq!(table_name, table.name);
assert!(assert_vec_eq(
&["name".to_string(), "id".to_string()],
Expand Down Expand Up @@ -109,7 +109,7 @@ async fn test_connector_get_schema_without_selected_columns() {
};
let result = schema_helper.get_tables(Some(&[table_info])).await.unwrap();

let table = result.get(0).unwrap();
let table = result.first().unwrap();
assert_eq!(table_name, table.name.clone());
assert!(assert_vec_eq(
&[
Expand Down
2 changes: 2 additions & 0 deletions dozer-sinks/snowflake/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -336,6 +336,7 @@ impl SnowflakeSink {
let mut delete = Vec::new();
let mut insert = Vec::new();

#[derive(Debug, Clone, Copy)]
enum OpKind {
Delete,
Insert,
Expand Down Expand Up @@ -370,6 +371,7 @@ impl SnowflakeSink {
OpKind::Update | OpKind::None => (),
}
previous_op_kind = OpKind::None;
let _ = previous_op_kind;
}};
}

Expand Down
2 changes: 1 addition & 1 deletion dozer-sql/expression/src/execution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@ impl Expression {
results,
else_result: _,
} => {
let typ = results.get(0).unwrap().get_type(schema)?;
let typ = results.first().unwrap().get_type(schema)?;
Ok(ExpressionType::new(
typ.return_type,
true,
Expand Down

0 comments on commit 2d49b02

Please sign in to comment.