Skip to content

Commit

Permalink
clippy (#2349)
Browse files Browse the repository at this point in the history
* fix clippy

* fix clippy

* fix duplicate imports
  • Loading branch information
PSeitz authored Apr 9, 2024
1 parent 1e9fc51 commit 74940e9
Show file tree
Hide file tree
Showing 44 changed files with 27 additions and 74 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ repository = "https://github.com/quickwit-oss/tantivy"
readme = "README.md"
keywords = ["search", "information", "retrieval"]
edition = "2021"
rust-version = "1.62"
rust-version = "1.63"
exclude = ["benches/*.json", "benches/*.txt"]

[dependencies]
Expand Down
2 changes: 1 addition & 1 deletion columnar/src/column_index/merge/shuffled.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ mod tests {
#[test]
fn test_merge_column_index_optional_shuffle() {
let optional_index: ColumnIndex = OptionalIndex::for_test(2, &[0]).into();
let column_indexes = vec![optional_index, ColumnIndex::Full];
let column_indexes = [optional_index, ColumnIndex::Full];
let row_addrs = vec![
RowAddr {
segment_ord: 0u32,
Expand Down
4 changes: 2 additions & 2 deletions columnar/src/column_values/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ pub trait ColumnValues<T: PartialOrd = u64>: Send + Sync + DowncastSync {
let out_and_idx_chunks = output
.chunks_exact_mut(4)
.into_remainder()
.into_iter()
.iter_mut()
.zip(indexes.chunks_exact(4).remainder());
for (out, idx) in out_and_idx_chunks {
*out = self.get_val(*idx);
Expand All @@ -102,7 +102,7 @@ pub trait ColumnValues<T: PartialOrd = u64>: Send + Sync + DowncastSync {
let out_and_idx_chunks = output
.chunks_exact_mut(4)
.into_remainder()
.into_iter()
.iter_mut()
.zip(indexes.chunks_exact(4).remainder());
for (out, idx) in out_and_idx_chunks {
*out = Some(self.get_val(*idx));
Expand Down
2 changes: 1 addition & 1 deletion columnar/src/column_values/u128_based/compact_space/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ impl CompactSpace {
.binary_search_by_key(&compact, |range_mapping| range_mapping.compact_start)
// Correctness: Overflow. The first range starts at compact space 0, the error from
// binary search can never be 0
.map_or_else(|e| e - 1, |v| v);
.unwrap_or_else(|e| e - 1);

let range_mapping = &self.ranges_mapping[pos];
let diff = compact - range_mapping.compact_start;
Expand Down
3 changes: 1 addition & 2 deletions common/src/serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,8 +290,7 @@ impl<'a> BinarySerializable for Cow<'a, [u8]> {
#[cfg(test)]
pub mod test {

use super::{VInt, *};
use crate::serialize::BinarySerializable;
use super::*;
pub fn fixed_size_test<O: BinarySerializable + FixedSize + Default>() {
let mut buffer = Vec::new();
O::default().serialize(&mut buffer).unwrap();
Expand Down
4 changes: 1 addition & 3 deletions src/aggregation/bucket/histogram/histogram.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
use std::cmp::Ordering;

use columnar::ColumnType;
use itertools::Itertools;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tantivy_bitpacker::minmax;
Expand All @@ -17,7 +15,7 @@ use crate::aggregation::intermediate_agg_result::{
IntermediateHistogramBucketEntry,
};
use crate::aggregation::segment_agg_result::{
build_segment_agg_collector, AggregationLimits, SegmentAggregationCollector,
build_segment_agg_collector, SegmentAggregationCollector,
};
use crate::aggregation::*;
use crate::TantivyError;
Expand Down
3 changes: 0 additions & 3 deletions src/aggregation/bucket/range.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::fmt::Debug;
use std::ops::Range;

use columnar::{ColumnType, MonotonicallyMappableToU64};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};

Expand Down Expand Up @@ -450,7 +449,6 @@ pub(crate) fn range_to_key(range: &Range<u64>, field_type: &ColumnType) -> crate
#[cfg(test)]
mod tests {

use columnar::MonotonicallyMappableToU64;
use serde_json::Value;

use super::*;
Expand All @@ -459,7 +457,6 @@ mod tests {
exec_request, exec_request_with_query, get_test_index_2_segments,
get_test_index_with_num_docs,
};
use crate::aggregation::AggregationLimits;

pub fn get_collector_from_ranges(
ranges: Vec<RangeAggregationRange>,
Expand Down
1 change: 0 additions & 1 deletion src/aggregation/metric/percentiles.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::fmt::Debug;

use columnar::ColumnType;
use serde::{Deserialize, Serialize};

use super::*;
Expand Down
1 change: 0 additions & 1 deletion src/aggregation/metric/stats.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use columnar::ColumnType;
use serde::{Deserialize, Serialize};

use super::*;
Expand Down
1 change: 0 additions & 1 deletion src/aggregation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -417,7 +417,6 @@ mod tests {
use time::OffsetDateTime;

use super::agg_req::Aggregations;
use super::segment_agg_result::AggregationLimits;
use super::*;
use crate::indexer::NoMergePolicy;
use crate::query::{AllQuery, TermQuery};
Expand Down
2 changes: 1 addition & 1 deletion src/collector/histogram_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ mod tests {
use super::{add_vecs, HistogramCollector, HistogramComputer};
use crate::schema::{Schema, FAST};
use crate::time::{Date, Month};
use crate::{doc, query, DateTime, Index};
use crate::{query, DateTime, Index};

#[test]
fn test_add_histograms_simple() {
Expand Down
6 changes: 1 addition & 5 deletions src/collector/tests.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,11 @@
use columnar::{BytesColumn, Column};

use super::*;
use crate::collector::{Count, FilterCollector, TopDocs};
use crate::index::SegmentReader;
use crate::query::{AllQuery, QueryParser};
use crate::schema::{Schema, FAST, TEXT};
use crate::time::format_description::well_known::Rfc3339;
use crate::time::OffsetDateTime;
use crate::{
doc, DateTime, DocAddress, DocId, Index, Score, Searcher, SegmentOrdinal, TantivyDocument,
};
use crate::{DateTime, DocAddress, Index, Searcher, TantivyDocument};

pub const TEST_COLLECTOR_WITH_SCORE: TestCollector = TestCollector {
compute_score: true,
Expand Down
1 change: 0 additions & 1 deletion src/directory/composite_file.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::collections::HashMap;
use std::io::{self, Read, Write};
use std::iter::ExactSizeIterator;
use std::ops::Range;

use common::{BinarySerializable, CountingWriter, HasLen, VInt};
Expand Down
2 changes: 1 addition & 1 deletion src/directory/directory.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::io::Write;
use std::marker::{Send, Sync};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
Expand Down Expand Up @@ -40,6 +39,7 @@ impl RetryPolicy {
/// The `DirectoryLock` is an object that represents a file lock.
///
/// It is associated with a lock file, that gets deleted on `Drop.`
#[allow(dead_code)]
pub struct DirectoryLock(Box<dyn Send + Sync + 'static>);

struct DirectoryLockGuard {
Expand Down
2 changes: 1 addition & 1 deletion src/directory/tests.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::io::Write;
use std::mem;
use std::path::{Path, PathBuf};
use std::path::Path;
use std::sync::atomic::Ordering::SeqCst;
use std::sync::atomic::{AtomicBool, AtomicUsize};
use std::sync::Arc;
Expand Down
1 change: 1 addition & 0 deletions src/directory/watch_event_router.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ pub struct WatchCallbackList {
/// file change is detected.
#[must_use = "This `WatchHandle` controls the lifetime of the watch and should therefore be used."]
#[derive(Clone)]
#[allow(dead_code)]
pub struct WatchHandle(Arc<WatchCallback>);

impl WatchHandle {
Expand Down
2 changes: 1 addition & 1 deletion src/fastfield/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ mod tests {
use std::ops::{Range, RangeInclusive};
use std::path::Path;

use columnar::{Column, MonotonicallyMappableToU64, StrColumn};
use columnar::StrColumn;
use common::{ByteCount, HasLen, TerminatingWrite};
use once_cell::sync::Lazy;
use rand::prelude::SliceRandom;
Expand Down
2 changes: 1 addition & 1 deletion src/index/segment_id.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::cmp::{Ord, Ordering};
use std::cmp::Ordering;
use std::error::Error;
use std::fmt;
use std::str::FromStr;
Expand Down
4 changes: 2 additions & 2 deletions src/index/segment_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -516,8 +516,8 @@ impl fmt::Debug for SegmentReader {
mod test {
use super::*;
use crate::index::Index;
use crate::schema::{Schema, SchemaBuilder, Term, STORED, TEXT};
use crate::{DocId, IndexWriter};
use crate::schema::{SchemaBuilder, Term, STORED, TEXT};
use crate::IndexWriter;

#[test]
fn test_merge_field_meta_data_same() {
Expand Down
1 change: 1 addition & 0 deletions src/indexer/flat_map_with_buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ where
}
}

#[allow(dead_code)]
pub trait FlatMapWithBufferIter: Iterator {
/// Function similar to `flat_map`, but allows reusing a shared `Vec`.
fn flat_map_with_buffer<F, T>(self, fill_buffer: F) -> FlatMapWithBuffer<T, F, Self>
Expand Down
1 change: 0 additions & 1 deletion src/indexer/log_merge_policy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,6 @@ mod tests {

use super::*;
use crate::index::SegmentMetaInventory;
use crate::indexer::merge_policy::MergePolicy;
use crate::schema::INDEXED;
use crate::{schema, SegmentId};

Expand Down
1 change: 0 additions & 1 deletion src/indexer/merge_policy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ impl MergePolicy for NoMergePolicy {
pub mod tests {

use super::*;
use crate::index::{SegmentId, SegmentMeta};

/// `MergePolicy` useful for test purposes.
///
Expand Down
2 changes: 1 addition & 1 deletion src/indexer/merger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -576,7 +576,7 @@ impl IndexMerger {
//
// Overall the reliable way to know if we have actual frequencies loaded or not
// is to check whether the actual decoded array is empty or not.
if has_term_freq != !postings.block_cursor.freqs().is_empty() {
if has_term_freq == postings.block_cursor.freqs().is_empty() {
return Err(DataCorruption::comment_only(
"Term freqs are inconsistent across segments",
)
Expand Down
2 changes: 0 additions & 2 deletions src/postings/compression/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ pub fn compressed_block_size(num_bits: u8) -> usize {
pub struct BlockEncoder {
bitpacker: BitPacker4x,
pub output: [u8; COMPRESSED_BLOCK_MAX_SIZE],
pub output_len: usize,
}

impl Default for BlockEncoder {
Expand All @@ -28,7 +27,6 @@ impl BlockEncoder {
BlockEncoder {
bitpacker: BitPacker4x::new(),
output: [0u8; COMPRESSED_BLOCK_MAX_SIZE],
output_len: 0,
}
}

Expand Down
2 changes: 0 additions & 2 deletions src/postings/skip.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::convert::TryInto;

use crate::directory::OwnedBytes;
use crate::postings::compression::{compressed_block_size, COMPRESSION_BLOCK_SIZE};
use crate::query::Bm25Weight;
Expand Down
1 change: 0 additions & 1 deletion src/postings/term_info.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::io;
use std::iter::ExactSizeIterator;
use std::ops::Range;

use common::{BinarySerializable, FixedSize};
Expand Down
2 changes: 1 addition & 1 deletion src/query/exist_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ mod tests {
use crate::query::exist_query::ExistsQuery;
use crate::query::{BooleanQuery, RangeQuery};
use crate::schema::{Facet, FacetOptions, Schema, FAST, INDEXED, STRING, TEXT};
use crate::{doc, Index, Searcher};
use crate::{Index, Searcher};

#[test]
fn test_exists_query_simple() -> crate::Result<()> {
Expand Down
2 changes: 1 addition & 1 deletion src/query/fuzzy_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ pub struct FuzzyTermQuery {
distance: u8,
/// Should a transposition cost 1 or 2?
transposition_cost_one: bool,
///
/// is a starts with query
prefix: bool,
}

Expand Down
2 changes: 1 addition & 1 deletion src/query/range_query/range_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ mod tests {
use crate::schema::{
Field, IntoIpv6Addr, Schema, TantivyDocument, FAST, INDEXED, STORED, TEXT,
};
use crate::{doc, Index, IndexWriter};
use crate::{Index, IndexWriter};

#[test]
fn test_range_query_simple() -> crate::Result<()> {
Expand Down
2 changes: 1 addition & 1 deletion src/query/term_query/term_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ mod tests {
use crate::collector::{Count, TopDocs};
use crate::query::{Query, QueryParser, TermQuery};
use crate::schema::{IndexRecordOption, IntoIpv6Addr, Schema, INDEXED, STORED};
use crate::{doc, Index, IndexWriter, Term};
use crate::{Index, IndexWriter, Term};

#[test]
fn search_ip_test() {
Expand Down
3 changes: 1 addition & 2 deletions src/query/vec_docset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ impl HasLen for VecDocSet {
pub mod tests {

use super::*;
use crate::docset::{DocSet, COLLECT_BLOCK_BUFFER_LEN};
use crate::DocId;
use crate::docset::COLLECT_BLOCK_BUFFER_LEN;

#[test]
pub fn test_vec_postings() {
Expand Down
1 change: 0 additions & 1 deletion src/schema/document/de.rs
Original file line number Diff line number Diff line change
Expand Up @@ -819,7 +819,6 @@ mod tests {
use crate::schema::document::existing_type_impls::JsonObjectIter;
use crate::schema::document::se::BinaryValueSerializer;
use crate::schema::document::{ReferenceValue, ReferenceValueLeaf};
use crate::schema::OwnedValue;

fn serialize_value<'a>(value: ReferenceValue<'a, &'a serde_json::Value>) -> Vec<u8> {
let mut writer = Vec::new();
Expand Down
1 change: 0 additions & 1 deletion src/schema/document/default_document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,6 @@ impl DocParsingError {

#[cfg(test)]
mod tests {
use crate::schema::document::default_document::TantivyDocument;
use crate::schema::*;

#[test]
Expand Down
4 changes: 1 addition & 3 deletions src/schema/document/owned_value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -443,9 +443,7 @@ impl<'a> Iterator for ObjectMapIter<'a> {
mod tests {
use super::*;
use crate::schema::{BytesOptions, Schema};
use crate::time::format_description::well_known::Rfc3339;
use crate::time::OffsetDateTime;
use crate::{DateTime, Document, TantivyDocument};
use crate::{Document, TantivyDocument};

#[test]
fn test_parse_bytes_doc() {
Expand Down
1 change: 0 additions & 1 deletion src/schema/field_entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,6 @@ impl FieldEntry {

#[cfg(test)]
mod tests {
use serde_json;

use super::*;
use crate::schema::{Schema, TextFieldIndexing, TEXT};
Expand Down
4 changes: 0 additions & 4 deletions src/schema/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,8 @@ use serde::de::{SeqAccess, Visitor};
use serde::ser::SerializeSeq;
use serde::{Deserialize, Deserializer, Serialize, Serializer};

use super::ip_options::IpAddrOptions;
use super::*;
use crate::json_utils::split_json_path;
use crate::schema::bytes_options::BytesOptions;
use crate::TantivyError;

/// Tantivy has a very strict schema.
Expand Down Expand Up @@ -421,9 +419,7 @@ mod tests {

use matches::{assert_matches, matches};
use pretty_assertions::assert_eq;
use serde_json;

use crate::schema::document::Value;
use crate::schema::field_type::ValueParsingError;
use crate::schema::schema::DocParsingError::InvalidJson;
use crate::schema::*;
Expand Down
1 change: 0 additions & 1 deletion src/schema/term.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use std::convert::TryInto;
use std::hash::{Hash, Hasher};
use std::net::Ipv6Addr;
use std::{fmt, str};
Expand Down
1 change: 0 additions & 1 deletion src/store/compression_lz4_block.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use core::convert::TryInto;
use std::io::{self};
use std::mem;

Expand Down
6 changes: 0 additions & 6 deletions src/store/compressors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,6 @@ use std::io;

use serde::{Deserialize, Deserializer, Serialize};

pub trait StoreCompressor {
fn compress(&self, uncompressed: &[u8], compressed: &mut Vec<u8>) -> io::Result<()>;
fn decompress(&self, compressed: &[u8], decompressed: &mut Vec<u8>) -> io::Result<()>;
fn get_compressor_id() -> u8;
}

/// Compressor can be used on `IndexSettings` to choose
/// the compressor used to compress the doc store.
///
Expand Down
Loading

0 comments on commit 74940e9

Please sign in to comment.