Skip to content

Commit

Permalink
Generate ImportMap from module path to imported dependencies (#3243)
Browse files Browse the repository at this point in the history
  • Loading branch information
chanman3388 committed Apr 4, 2023
1 parent 76e111c commit 10504eb
Show file tree
Hide file tree
Showing 13 changed files with 269 additions and 89 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

74 changes: 70 additions & 4 deletions crates/ruff/src/checkers/imports.rs
@@ -1,10 +1,12 @@
//! Lint rules based on import analysis.

use std::borrow::Cow;
use std::path::Path;

use rustpython_parser::ast::Suite;
use rustpython_parser::ast::{StmtKind, Suite};

use ruff_diagnostics::Diagnostic;
use ruff_python_ast::helpers::to_module_path;
use ruff_python_ast::imports::{ImportMap, ModuleImport};
use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
use ruff_python_ast::visitor::Visitor;

Expand All @@ -14,6 +16,66 @@ use crate::rules::isort;
use crate::rules::isort::track::{Block, ImportTracker};
use crate::settings::{flags, Settings};

fn extract_import_map(path: &Path, package: Option<&Path>, blocks: &[&Block]) -> Option<ImportMap> {
let Some(package) = package else {
return None;
};
let Some(module_path) = to_module_path(package, path) else {
return None;
};

let num_imports = blocks.iter().map(|block| block.imports.len()).sum();
let mut module_imports = Vec::with_capacity(num_imports);
for stmt in blocks.iter().flat_map(|block| &block.imports) {
match &stmt.node {
StmtKind::Import { names } => {
module_imports.extend(names.iter().map(|name| {
ModuleImport::new(
name.node.name.clone(),
stmt.location,
stmt.end_location.unwrap(),
)
}));
}
StmtKind::ImportFrom {
module,
names,
level,
} => {
let level = level.unwrap_or(0);
let module = if let Some(module) = module {
if level == 0 {
Cow::Borrowed(module)
} else {
if module_path.len() <= level {
continue;
}
let prefix = module_path[..module_path.len() - level].join(".");
Cow::Owned(format!("{prefix}.{module}"))
}
} else {
if module_path.len() <= level {
continue;
}
Cow::Owned(module_path[..module_path.len() - level].join("."))
};
module_imports.extend(names.iter().map(|name| {
ModuleImport::new(
format!("{}.{}", module, name.node.name),
name.location,
name.end_location.unwrap(),
)
}));
}
_ => panic!("Expected StmtKind::Import | StmtKind::ImportFrom"),
}
}

let mut import_map = ImportMap::default();
import_map.insert(module_path.join("."), module_imports);
Some(import_map)
}

#[allow(clippy::too_many_arguments)]
pub fn check_imports(
python_ast: &Suite,
Expand All @@ -25,7 +87,7 @@ pub fn check_imports(
autofix: flags::Autofix,
path: &Path,
package: Option<&Path>,
) -> Vec<Diagnostic> {
) -> (Vec<Diagnostic>, Option<ImportMap>) {
// Extract all imports from the AST.
let tracker = {
let mut tracker = ImportTracker::new(locator, directives, path);
Expand All @@ -52,5 +114,9 @@ pub fn check_imports(
&blocks, python_ast, locator, stylist, settings, autofix,
));
}
diagnostics

// Extract import map.
let imports = extract_import_map(path, package, &blocks);

(diagnostics, imports)
}
106 changes: 63 additions & 43 deletions crates/ruff/src/linter.rs
Expand Up @@ -9,6 +9,7 @@ use rustpython_parser::lexer::LexResult;
use rustpython_parser::ParseError;

use ruff_diagnostics::Diagnostic;
use ruff_python_ast::imports::ImportMap;
use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
use ruff_python_stdlib::path::is_python_stub_file;

Expand Down Expand Up @@ -51,6 +52,15 @@ impl<T> LinterResult<T> {

pub type FixTable = FxHashMap<Rule, usize>;

pub struct FixerResult<'a> {
/// The result returned by the linter, after applying any fixes.
pub result: LinterResult<(Vec<Message>, Option<ImportMap>)>,
/// The resulting source code, after applying any fixes.
pub transformed: Cow<'a, str>,
/// The number of fixes applied for each [`Rule`].
pub fixed: FixTable,
}

/// Generate `Diagnostic`s from the source code contents at the
/// given `Path`.
#[allow(clippy::too_many_arguments)]
Expand All @@ -66,9 +76,10 @@ pub fn check_path(
settings: &Settings,
noqa: flags::Noqa,
autofix: flags::Autofix,
) -> LinterResult<Vec<Diagnostic>> {
) -> LinterResult<(Vec<Diagnostic>, Option<ImportMap>)> {
// Aggregate all diagnostics.
let mut diagnostics = vec![];
let mut imports = None;
let mut error = None;

// Collect doc lines. This requires a rare mix of tokens (for comments) and AST
Expand Down Expand Up @@ -142,7 +153,7 @@ pub fn check_path(
));
}
if use_imports {
diagnostics.extend(check_imports(
let (import_diagnostics, module_imports) = check_imports(
&python_ast,
locator,
indexer,
Expand All @@ -152,7 +163,9 @@ pub fn check_path(
autofix,
path,
package,
));
);
imports = module_imports;
diagnostics.extend(import_diagnostics);
}
if use_doc_lines {
doc_lines.extend(doc_lines_from_ast(&python_ast));
Expand Down Expand Up @@ -240,7 +253,7 @@ pub fn check_path(
}
}

LinterResult::new(diagnostics, error)
LinterResult::new((diagnostics, imports), error)
}

const MAX_ITERATIONS: usize = 100;
Expand Down Expand Up @@ -297,7 +310,7 @@ pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings
// Add any missing `# noqa` pragmas.
add_noqa(
path,
&diagnostics,
&diagnostics.0,
&contents,
indexer.commented_lines(),
&directives.noqa_line_for,
Expand All @@ -314,7 +327,7 @@ pub fn lint_only(
settings: &Settings,
noqa: flags::Noqa,
autofix: flags::Autofix,
) -> LinterResult<Vec<Message>> {
) -> LinterResult<(Vec<Message>, Option<ImportMap>)> {
// Tokenize once.
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);

Expand Down Expand Up @@ -348,20 +361,23 @@ pub fn lint_only(

// Convert from diagnostics to messages.
let path_lossy = path.to_string_lossy();
result.map(|diagnostics| {
diagnostics
.into_iter()
.map(|diagnostic| {
let source = if settings.show_source {
Some(Source::from_diagnostic(&diagnostic, &locator))
} else {
None
};
let lineno = diagnostic.location.row();
let noqa_row = *directives.noqa_line_for.get(&lineno).unwrap_or(&lineno);
Message::from_diagnostic(diagnostic, path_lossy.to_string(), source, noqa_row)
})
.collect()
result.map(|(messages, imports)| {
(
messages
.into_iter()
.map(|diagnostic| {
let source = if settings.show_source {
Some(Source::from_diagnostic(&diagnostic, &locator))
} else {
None
};
let lineno = diagnostic.location.row();
let noqa_row = *directives.noqa_line_for.get(&lineno).unwrap_or(&lineno);
Message::from_diagnostic(diagnostic, path_lossy.to_string(), source, noqa_row)
})
.collect(),
imports,
)
})
}

Expand All @@ -373,7 +389,7 @@ pub fn lint_fix<'a>(
package: Option<&Path>,
noqa: flags::Noqa,
settings: &Settings,
) -> Result<(LinterResult<Vec<Message>>, Cow<'a, str>, FixTable)> {
) -> Result<FixerResult<'a>> {
let mut transformed = Cow::Borrowed(contents);

// Track the number of fixed errors across iterations.
Expand Down Expand Up @@ -448,7 +464,7 @@ This indicates a bug in `{}`. If you could open an issue at:
}

// Apply autofix.
if let Some((fixed_contents, applied)) = fix_file(&result.data, &locator) {
if let Some((fixed_contents, applied)) = fix_file(&result.data.0, &locator) {
if iterations < MAX_ITERATIONS {
// Count the number of fixed errors.
for (rule, count) in applied {
Expand Down Expand Up @@ -488,29 +504,33 @@ This indicates a bug in `{}`. If you could open an issue at:

// Convert to messages.
let path_lossy = path.to_string_lossy();
return Ok((
result.map(|diagnostics| {
diagnostics
.into_iter()
.map(|diagnostic| {
let source = if settings.show_source {
Some(Source::from_diagnostic(&diagnostic, &locator))
} else {
None
};
let lineno = diagnostic.location.row();
let noqa_row = *directives.noqa_line_for.get(&lineno).unwrap_or(&lineno);
Message::from_diagnostic(
diagnostic,
path_lossy.to_string(),
source,
noqa_row,
)
})
.collect()
return Ok(FixerResult {
result: result.map(|(messages, imports)| {
(
messages
.into_iter()
.map(|diagnostic| {
let source = if settings.show_source {
Some(Source::from_diagnostic(&diagnostic, &locator))
} else {
None
};
let lineno = diagnostic.location.row();
let noqa_row =
*directives.noqa_line_for.get(&lineno).unwrap_or(&lineno);
Message::from_diagnostic(
diagnostic,
path_lossy.to_string(),
source,
noqa_row,
)
})
.collect(),
imports,
)
}),
transformed,
fixed,
));
});
}
}
3 changes: 2 additions & 1 deletion crates/ruff/src/rules/pandas_vet/mod.rs
Expand Up @@ -31,7 +31,8 @@ mod tests {
let directives =
directives::extract_directives(&tokens, directives::Flags::from_settings(&settings));
let LinterResult {
data: diagnostics, ..
data: (diagnostics, _imports),
..
} = check_path(
Path::new("<filename>"),
None,
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff/src/rules/pyflakes/mod.rs
Expand Up @@ -258,7 +258,7 @@ mod tests {
let directives =
directives::extract_directives(&tokens, directives::Flags::from_settings(&settings));
let LinterResult {
data: mut diagnostics,
data: (mut diagnostics, _imports),
..
} = check_path(
Path::new("<filename>"),
Expand Down
5 changes: 3 additions & 2 deletions crates/ruff/src/test.rs
Expand Up @@ -31,7 +31,7 @@ pub fn test_path(path: impl AsRef<Path>, settings: &Settings) -> Result<Vec<Diag
let directives =
directives::extract_directives(&tokens, directives::Flags::from_settings(settings));
let LinterResult {
data: mut diagnostics,
data: (mut diagnostics, _imports),
..
} = check_path(
&path,
Expand Down Expand Up @@ -66,7 +66,8 @@ pub fn test_path(path: impl AsRef<Path>, settings: &Settings) -> Result<Vec<Diag
let directives =
directives::extract_directives(&tokens, directives::Flags::from_settings(settings));
let LinterResult {
data: diagnostics, ..
data: (diagnostics, _imports),
..
} = check_path(
&path,
None,
Expand Down
1 change: 1 addition & 0 deletions crates/ruff_cli/Cargo.toml
Expand Up @@ -25,6 +25,7 @@ doc = false
ruff = { path = "../ruff", features = ["clap"] }
ruff_cache = { path = "../ruff_cache" }
ruff_diagnostics = { path = "../ruff_diagnostics" }
ruff_python_ast = { path = "../ruff_python_ast" }

annotate-snippets = { version = "0.9.1", features = ["color"] }
anyhow = { workspace = true }
Expand Down

0 comments on commit 10504eb

Please sign in to comment.