Skip to content

Commit

Permalink
Turn global unused into local
Browse files Browse the repository at this point in the history
  • Loading branch information
tgross35 committed Oct 17, 2023
1 parent ba9632b commit e46bced
Show file tree
Hide file tree
Showing 16 changed files with 51 additions and 69 deletions.
14 changes: 6 additions & 8 deletions zspell/src/affix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,9 @@ pub use self::types::{
CompoundPattern, CompoundSyllable, Conversion, Encoding, FlagType, PartOfSpeech, Phonetic,
RuleType,
};
use crate::dict::{AfxRule, DictEntry, FlagValue};
use crate::error::{BuildError, Error, ParseError, ParseErrorKind};
use crate::morph::MorphInfo;
use crate::parser_affix::{parse_affix, AffixNode, ParsedRule, ParsedRuleGroup};
use crate::dict::{AfxRule, FlagValue};
use crate::error::{BuildError, Error, ParseError};
use crate::parser_affix::{parse_affix, AffixNode, ParsedRuleGroup};

/// A representation of an affix file
#[derive(Clone, Debug, PartialEq, Eq)]
Expand Down Expand Up @@ -275,7 +274,7 @@ impl ParsedCfg {
let name_str = node.name_str();
match node {
AffixNode::Encoding(v) => res.encoding = v,
AffixNode::FlagType(v) => (),
AffixNode::FlagType(_) => (),
AffixNode::ComplexPrefixes => res.complex_prefixes = true,
AffixNode::Language(v) => res.lang = v,
AffixNode::IgnoreChars(v) => res.ignore_chars = v,
Expand Down Expand Up @@ -349,11 +348,11 @@ impl ParsedCfg {
}
AffixNode::AfxInputConversion(v) => res.input_conversions = v,
AffixNode::AfxOutputConversion(v) => res.output_conversions = v,
AffixNode::AfxLemmaPresentFlag(v) => {
AffixNode::AfxLemmaPresentFlag(_) => {
warnings.push(format!("flag {name_str} is deprecated"));
}
AffixNode::AfxNeededFlag(v) => res.afx_needed_flag = Some(res.convert_flag(&v)?),
AffixNode::AfxPseudoRootFlag(v) => {
AffixNode::AfxPseudoRootFlag(_) => {
warnings.push(format!("flag {name_str} is deprecated"));
}
AffixNode::AfxSubstandardFlag(v) => {
Expand Down Expand Up @@ -408,7 +407,6 @@ impl ParsedCfg {
];

let mut map: BTreeMap<u32, FlagValue> = BTreeMap::new();
let mut morphs: Vec<MorphInfo> = Vec::new();

for (key, value) in keysets
.iter()
Expand Down
9 changes: 5 additions & 4 deletions zspell/src/affix/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::fmt::Display;
use lazy_static::lazy_static;
use regex::Regex;

use crate::error::{BuildError, ParseErrorKind};
use crate::error::ParseErrorKind;

lazy_static! {
static ref RE_COMPOUND_PATTERN: Regex = Regex::new(
Expand Down Expand Up @@ -142,7 +142,6 @@ impl FlagType {

fn parse_chars_long(chars: [char; 2]) -> Result<u32, ParseErrorKind> {
if chars.iter().any(|ch| !ch.is_ascii()) {
let char_str: String = chars.iter().collect();
Err(ParseErrorKind::FlagParse(Self::Long))
} else {
Ok(u32::from(u16::from_ne_bytes([
Expand Down Expand Up @@ -223,6 +222,7 @@ pub struct CompoundPattern {
/* Method implementations */

impl Phonetic {
#[allow(unused)]
pub(crate) fn new(pattern: &str, replace: &str) -> Self {
Self {
pattern: pattern.to_owned(),
Expand All @@ -232,6 +232,7 @@ impl Phonetic {
}

impl Conversion {
#[allow(unused)]
pub(crate) fn new(input: &str, output: &str, bidirectional: bool) -> Self {
Self {
input: input.to_owned(),
Expand Down Expand Up @@ -331,7 +332,7 @@ impl TryFrom<&str> for Phonetic {
type Error = ParseErrorKind;

fn try_from(value: &str) -> Result<Self, Self::Error> {
let mut split: Vec<_> = value.split_whitespace().collect();
let split: Vec<_> = value.split_whitespace().collect();
if split.len() != 2 {
return Err(ParseErrorKind::Phonetic(split.len()));
}
Expand Down Expand Up @@ -364,7 +365,7 @@ impl TryFrom<&str> for CompoundSyllable {

/// Format: `COMPOUNDSYLLABLE count vowels`
fn try_from(value: &str) -> Result<Self, ParseErrorKind> {
let mut split: Vec<_> = value.split_whitespace().collect();
let split: Vec<_> = value.split_whitespace().collect();
if split.len() != 2 {
return Err(ParseErrorKind::CompoundSyllableCount(split.len()));
}
Expand Down
15 changes: 7 additions & 8 deletions zspell/src/dict.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ mod parser;
mod rule;
mod types;

use std::borrow::Borrow;
use std::collections::BTreeMap;
use std::sync::Arc;

Expand All @@ -24,7 +23,7 @@ use crate::affix::FlagType;
use crate::error::{BuildError, Error, WordNotFoundError};
use crate::helpers::StrWrapper;
use crate::morph::MorphInfo;
use crate::{suggestions, ParsedCfg};
use crate::ParsedCfg;

/// Main dictionary object used for spellchecking and suggestions
///
Expand Down Expand Up @@ -182,7 +181,7 @@ impl Dictionary {
&'d self,
input: &'a str,
) -> impl Iterator<Item = (usize, &'a str)> + 'd {
word_splitter(input).filter(|(idx, w)| !self.check_word(w))
word_splitter(input).filter(|(_idx, w)| !self.check_word(w))
}

/// **UNSTABLE** Suggest a word at given indices. Feature gated behind
Expand Down Expand Up @@ -221,8 +220,8 @@ impl Dictionary {
.keys()
.filter_map(|key| try_levenshtein(key, word, 1).map(|lim| (lim, key.as_ref())))
.collect();
suggestions.sort_unstable_by_key(|(k, v)| *k);
Err(suggestions.iter().take(10).map(|(k, v)| *v).collect())
suggestions.sort_unstable_by_key(|(k, _v)| *k);
Err(suggestions.iter().take(10).map(|(_k, v)| *v).collect())
}

/// **UNSTABLE** Generate the stems for a single word. Feature gated behind
Expand Down Expand Up @@ -270,7 +269,7 @@ impl Dictionary {
/// Returns a dummy error if the word is not found
#[inline]
#[cfg(feature = "unstable-analysis")]
pub fn analyze_word(&self, word: &str) -> Result<Vec<MorphInfo>, WordNotFoundError> {
pub fn analyze_word(&self, _word: &str) -> Result<Vec<MorphInfo>, WordNotFoundError> {
todo!()
}

Expand Down Expand Up @@ -394,12 +393,12 @@ impl Dictionary {
fn update_personal(
&mut self,
entries: &[PersonalEntry],
dict: &[DictEntry],
_dict: &[DictEntry],
) -> Result<(), Error> {
// FIXME: don't take `dict` as an argument, use our existing hashmaps
self.wordlist.0.reserve(entries.len() * 2);
for entry in entries {
if let Some(friend) = &entry.friend {
if let Some(_friend) = &entry.friend {
// Find the friend in our dictionary, find its source affixes
// let flags = dict.iter().find(|d| &d.stem() == friend).map(|d| &d.flags);
todo!()
Expand Down
19 changes: 7 additions & 12 deletions zspell/src/dict/helpers.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,10 @@
use std::borrow::Borrow;
use std::fmt::Debug;
use std::sync::Arc;

use hashbrown::HashSet;
use unicode_segmentation::UnicodeSegmentation;

use super::rule::AfxRule;
use super::{FlagValue, WordList};
use crate::affix::{FlagType, RuleType};
use super::WordList;
use crate::dict::types::{Meta, Source};
use crate::error::BuildError;
use crate::Error;

// pub(super) fn analyze_flags

Expand Down Expand Up @@ -43,7 +37,7 @@ pub(super) fn create_affixed_word_map(
}

for &rule in suffix_rules {
for (idx, result) in rule.apply_patterns(stem) {
for (_idx, result) in rule.apply_patterns(stem) {
let meta = Meta::new(stem_rc.clone(), Source::Affix(rule.clone()));
let meta_vec = dest.0.entry_ref(result.as_str()).or_insert_with(Vec::new);
meta_vec.push(meta);
Expand Down Expand Up @@ -83,7 +77,7 @@ mod tests {
use pretty_assertions::assert_eq;

use super::*;
use crate::dict::rule::AfxRulePattern;
use crate::affix::RuleType;

#[test]
fn test_create_words() {
Expand Down Expand Up @@ -151,7 +145,7 @@ mod tests {
let stem_rc = Arc::from(*word);
create_affixed_word_map(pfxs, sfxs, &stem_rc, &stem_rc, &mut dest);

let mut tmp: Vec<(Box<str>, _)> = dest.0.into_iter().collect();
let tmp: Vec<(Box<str>, _)> = dest.0.into_iter().collect();
let mut result: Vec<_> = tmp.iter().map(|(s, _)| s.as_ref()).collect();
let mut expected: Vec<_> = (*expected_slice).to_owned();
result.sort_unstable();
Expand All @@ -167,8 +161,9 @@ mod tests {
#[test]
fn test_word_splitter() {
let s = "the quick brown. Fox Jum-ped --\t where? 'over' (the) very--lazy dog";
let v: Vec<_> = dbg!(word_splitter(s).collect());
let v: Vec<_> = dbg!(s.split_word_bound_indices().collect());
let _: Vec<_> = dbg!(word_splitter(s).collect());
let _: Vec<_> = dbg!(s.split_word_bound_indices().collect());
// FIXME: do something with these
}
}

Expand Down
3 changes: 3 additions & 0 deletions zspell/src/dict/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ impl DictEntry {
Ok(Self { stem, flags, morph })
}

#[allow(unused)]
pub fn stem(&self) -> &str {
&self.stem
}
Expand Down Expand Up @@ -126,6 +127,7 @@ pub struct PersonalEntry {
}

impl PersonalEntry {
#[allow(unused)]
pub(crate) fn new(
stem: &str,
friend: Option<&str>,
Expand Down Expand Up @@ -174,6 +176,7 @@ pub struct ParsedPersonalMeta {
}

impl ParsedPersonalMeta {
#[allow(unused)]
pub(crate) fn new<S: AsRef<str>>(friend: Option<S>, morph: Vec<MorphInfo>) -> Self {
Self {
friend: friend.map(|s| s.as_ref().to_owned()),
Expand Down
19 changes: 7 additions & 12 deletions zspell/src/dict/rule.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,12 @@
//! Implementation for a stored rule

use std::hash::Hash;
use std::ops::Deref;
use std::sync::Arc;

use regex::Regex;

use crate::affix::{ParsedCfg, RuleType};
use crate::error::BuildError;
use crate::helpers::{compile_re_pattern, ReWrapper};
use crate::helpers::ReWrapper;
use crate::morph::MorphInfo;
use crate::parser_affix::ParsedRuleGroup;
use crate::Error;

/// A single rule group
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
Expand All @@ -29,8 +24,8 @@ impl AfxRule {
affixes: &[&str],
patterns: &[&str],
can_combine: bool,
strip: Option<&str>,
condition: Option<&str>,
_strip: Option<&str>,
_condition: Option<&str>,
) -> Self {
let mut ret = Self {
kind,
Expand All @@ -41,7 +36,7 @@ impl AfxRule {
.collect(),
};
for (idx, pat) in patterns.iter().enumerate() {
ret.patterns[idx].set_pattern(pat, kind);
ret.patterns[idx].set_pattern(pat, kind).unwrap();
}
ret
}
Expand All @@ -51,15 +46,15 @@ impl AfxRule {
/// NOTE: returns a vec reference and `Self`'s morph vec will be empty!
/// Needs construction wherever the Arc target is
// PERF: bench with & without vec reference instead of output
pub fn from_parsed_group(cfg: &ParsedCfg, group: &ParsedRuleGroup) -> Self {
pub fn from_parsed_group(_cfg: &ParsedCfg, group: &ParsedRuleGroup) -> Self {
let mut ret = Self {
kind: group.kind,
can_combine: group.can_combine,
patterns: Vec::with_capacity(group.rules.len()),
};

for rule in &group.rules {
let mut morph_info: Vec<Arc<MorphInfo>> = rule
let morph_info: Vec<Arc<MorphInfo>> = rule
.morph_info
.iter()
.map(|m| Arc::new(m.clone()))
Expand Down Expand Up @@ -128,7 +123,7 @@ impl AfxRulePattern {
/// Helper for testing, sets the condition based on a kind
#[cfg(test)]
pub fn set_pattern(&mut self, condition: &str, kind: RuleType) -> Result<(), regex::Error> {
self.condition = compile_re_pattern(condition, kind)?;
self.condition = crate::helpers::compile_re_pattern(condition, kind)?;
Ok(())
}

Expand Down
3 changes: 1 addition & 2 deletions zspell/src/dict/tests.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
//! Tests for a dict file

use std::fs;
use std::path::PathBuf;

use pretty_assertions::assert_eq;
use util::workspace_root;

use super::parser::{parse_personal_dict, DictEntry};
use super::parser::DictEntry;
use super::*;
use crate::morph::MorphInfo;

Expand Down
5 changes: 1 addition & 4 deletions zspell/src/dict/types.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,8 @@
use std::borrow::Borrow;
use std::sync::Arc;

use hashbrown::Equivalent;

use super::parser::ParsedPersonalMeta;
use super::rule::AfxRule;
use crate::morph::MorphInfo;
use crate::parser_affix::ParsedRule;

/// Additional information attached to an entry in a dictionary
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
Expand Down Expand Up @@ -91,6 +87,7 @@ impl PersonalMeta {
}

#[cfg(test)]
#[allow(unused)]
mod tests {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
Expand Down
3 changes: 1 addition & 2 deletions zspell/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
//! [`Error`] is the main error type for this crate, all other types of errors
//! will fall under it.

use core::prelude::v1;
use std::fmt::Display;
use std::num::ParseIntError;

Expand Down Expand Up @@ -212,6 +211,7 @@ impl ParseError {
}

impl ParseErrorKind {
#[allow(unused)]
fn help_msg(&self) -> Option<&'static str> {
match self {
ParseErrorKind::Boolean => {
Expand Down Expand Up @@ -321,7 +321,6 @@ impl Display for ParseErrorKind {
ParseErrorKind::CompoundSyllableCount(n) => write!(f, "expected 2 items but got {n}"),
ParseErrorKind::CompoundSyllableParse(e) => write!(f, "unable to parse integer: {e}"),
ParseErrorKind::Regex(e) => e.fmt(f),
ParseErrorKind::AffixHeader => todo!(),
ParseErrorKind::Personal => write!(f, "error parsing entry in personal dictionary"),
ParseErrorKind::InvalidFlag => {
write!(f, "expected a single alphanumeric flag (4 bytes maximum)")
Expand Down
1 change: 0 additions & 1 deletion zspell/src/helpers.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
//! Various functions that are helpful throughout the crate

use core::fmt::Display;
use std::borrow::Borrow;
use std::hash::Hash;
use std::ops::Deref;
use std::rc::Rc;
Expand Down
1 change: 0 additions & 1 deletion zspell/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@
//! [Hunspell]: http://hunspell.github.io/
//! [CLI docs]: https://pluots.github.io/zspell/
#![forbid(unsafe_code)]
#![allow(unused)]
#![warn(clippy::pedantic)]
// #![warn(clippy::cargo)]
#![warn(clippy::nursery)]
Expand Down
3 changes: 1 addition & 2 deletions zspell/src/parser_affix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ use crate::affix::{
CompoundPattern, CompoundSyllable, Conversion, Encoding, FlagType, Phonetic, RuleType,
};
use crate::error::{ParseError, ParseErrorKind};
use crate::helpers::convertu32;
use crate::morph::MorphInfo;

/// Characters considered line enders
Expand Down Expand Up @@ -719,7 +718,7 @@ pub fn parse_affix(s: &str) -> Result<Vec<AffixNode>, ParseError> {
let mut nlines: u32 = 1;

'outer: while !working.is_empty() {
'inner: for (ix, parse_fn) in ALL_PARSERS.iter().enumerate() {
for (_idx, parse_fn) in ALL_PARSERS.iter().enumerate() {
let tmp = parse_fn(working).map_err(|e| e.add_offset_ret(nlines, 0))?;
if let Some((node, residual, nl)) = tmp {
nlines += nl;
Expand Down
Loading

0 comments on commit e46bced

Please sign in to comment.