From 0b2232ad88ffe38a665b90d714366b93b0ad7905 Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 15 May 2024 18:56:50 +0200 Subject: [PATCH] =?UTF-8?q?Remove=20#[allow(=E2=80=A6)]?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- numbat/src/tokenizer.rs | 1 - numbat/src/typechecker.rs | 1 - 2 files changed, 2 deletions(-) diff --git a/numbat/src/tokenizer.rs b/numbat/src/tokenizer.rs index 5f122cfc..906042a4 100644 --- a/numbat/src/tokenizer.rs +++ b/numbat/src/tokenizer.rs @@ -705,7 +705,6 @@ pub fn tokenize(input: &str, code_source_id: usize) -> Result> { } #[cfg(test)] -#[allow(clippy::type_complexity)] fn tokenize_reduced(input: &str) -> Result, String> { Ok(tokenize(input, 0) .map_err(|e| { diff --git a/numbat/src/typechecker.rs b/numbat/src/typechecker.rs index 1ebc1a08..6d9a30ae 100644 --- a/numbat/src/typechecker.rs +++ b/numbat/src/typechecker.rs @@ -1,4 +1,3 @@ -#![allow(clippy::result_large_err)] use std::{ collections::{HashMap, HashSet}, error::Error,