Skip to content

Commit

Permalink
Tweaks
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh committed Jan 10, 2023
1 parent c1e15be commit d1558fc
Show file tree
Hide file tree
Showing 6 changed files with 130 additions and 88 deletions.
1 change: 1 addition & 0 deletions foo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"%s" % "Hello, world!"
6 changes: 6 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,9 @@ license-files = [
"LICENSE",
"licenses/*",
]

[tool.isort]
add_imports = "from __future__ import annotations"

[tool.ruff.isort]
required-imports = ["from __future__ import annotations"]
48 changes: 13 additions & 35 deletions src/checkers/imports.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,37 +13,6 @@ use crate::settings::{flags, Settings};
use crate::source_code_locator::SourceCodeLocator;
use crate::source_code_style::SourceCodeStyleDetector;

fn organize_imports(
tracker: &ImportTracker,
locator: &SourceCodeLocator,
settings: &Settings,
stylist: &SourceCodeStyleDetector,
autofix: flags::Autofix,
package: Option<&Path>,
) -> Vec<Diagnostic> {
let mut diagnostics = vec![];
for block in tracker.iter() {
if !block.imports.is_empty() {
if let Some(diagnostic) =
isort::rules::organize_imports(block, locator, settings, stylist, autofix, package)
{
diagnostics.push(diagnostic);
}
}
}
diagnostics
}

fn add_required_imports(
contents: &str,
tracker: &ImportTracker,
settings: &Settings,
autofix: flags::Autofix,
) -> Vec<Diagnostic> {
let blocks: Vec<&Block> = tracker.iter().collect();
isort::rules::add_required_imports(contents, &blocks, settings, autofix)
}

#[allow(clippy::too_many_arguments)]
pub fn check_imports(
contents: &str,
Expand All @@ -69,16 +38,25 @@ pub fn check_imports(
}
tracker
};
let blocks: Vec<&Block> = tracker.iter().collect();

// Enforce import rules.
let mut diagnostics = vec![];
if settings.enabled.contains(&RuleCode::I001) {
diagnostics.extend(organize_imports(
&tracker, locator, settings, stylist, autofix, package,
));
for block in &blocks {
if !block.imports.is_empty() {
if let Some(diagnostic) = isort::rules::organize_imports(
block, locator, settings, stylist, autofix, package,
) {
diagnostics.push(diagnostic);
}
}
}
}
if settings.enabled.contains(&RuleCode::I002) {
diagnostics.extend(add_required_imports(contents, &tracker, settings, autofix));
diagnostics.extend(isort::rules::add_required_imports(
contents, &blocks, settings, autofix,
));
}
diagnostics
}
23 changes: 7 additions & 16 deletions src/directives.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,17 +90,11 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
let mut exclusions: IntSet<usize> = IntSet::default();
let mut splits: Vec<usize> = Vec::default();
let mut skip_file: bool = false;
let mut off: Option<Location> = None;
let mut last: Option<Location> = None;
for &(start, ref tok, end) in lxr.iter().flatten() {
last = Some(end);

// No need to keep processing, but we do need to determine the last token.
if skip_file {
continue;
}

let Tok::Comment(comment_text) = tok else {
continue;
};
Expand All @@ -112,7 +106,10 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
if comment_text == "# isort: split" {
splits.push(start.row());
} else if comment_text == "# isort: skip_file" || comment_text == "# isort:skip_file" {
skip_file = true;
return IsortDirectives {
skip_file: true,
..IsortDirectives::default()
};
} else if off.is_some() {
if comment_text == "# isort: on" {
if let Some(start) = off {
Expand Down Expand Up @@ -142,7 +139,7 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
IsortDirectives {
exclusions,
splits,
skip_file,
..IsortDirectives::default()
}
}

Expand Down Expand Up @@ -281,10 +278,7 @@ x = 1
y = 2
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
assert_eq!(
extract_isort_directives(&lxr).exclusions,
IntSet::from_iter([1, 2, 3, 4])
);
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());

let contents = "# isort: off
x = 1
Expand All @@ -293,10 +287,7 @@ y = 2
# isort: skip_file
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
assert_eq!(
extract_isort_directives(&lxr).exclusions,
IntSet::from_iter([1, 2, 3, 4, 5, 6])
);
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
}

#[test]
Expand Down
137 changes: 100 additions & 37 deletions src/isort/rules/add_required_imports.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,15 @@ struct ImportFrom<'a> {
level: Option<&'a usize>,
}

struct Import<'a> {
name: Alias<'a>,
}

enum AnyImport<'a> {
Import(Import<'a>),
ImportFrom(ImportFrom<'a>),
}

impl fmt::Display for ImportFrom<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "from ")?;
Expand All @@ -32,30 +41,63 @@ impl fmt::Display for ImportFrom<'_> {
if let Some(module) = self.module {
write!(f, "{module}")?;
}
write!(f, " import {}", self.name.name)
write!(f, " import {}", self.name.name)?;
Ok(())
}
}

impl fmt::Display for Import<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "import {}", self.name.name)?;
if let Some(as_name) = self.name.as_name {
write!(f, " as {as_name}")?;
}
Ok(())
}
}

fn has_required_import(block: &Block, required_import: &ImportFrom) -> bool {
block.imports.iter().any(|import| {
let StmtKind::ImportFrom {
module,
names,
level,
} = &import.node else {
return false;
};

module.as_deref() == required_import.module
&& level.as_ref() == required_import.level
&& names.iter().any(|alias| {
impl fmt::Display for AnyImport<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
AnyImport::Import(import) => write!(f, "{import}"),
AnyImport::ImportFrom(import_from) => write!(f, "{import_from}"),
}
}
}

fn contains(block: &Block, required_import: &AnyImport) -> bool {
block.imports.iter().any(|import| match required_import {
AnyImport::Import(required_import) => {
let StmtKind::Import {
names,
} = &import.node else {
return false;
};
names.iter().any(|alias| {
alias.node.name == required_import.name.name
&& alias.node.asname.as_deref() == required_import.name.as_name
})
}
AnyImport::ImportFrom(required_import) => {
let StmtKind::ImportFrom {
module,
names,
level,
} = &import.node else {
return false;
};
module.as_deref() == required_import.module
&& level.as_ref() == required_import.level
&& names.iter().any(|alias| {
alias.node.name == required_import.name.name
&& alias.node.asname.as_deref() == required_import.name.as_name
})
}
})
}

/// Find the first token that isn't a docstring, comment, or whitespace.
/// Find the end of the first token that isn't a docstring, comment, or
/// whitespace.
fn find_splice_location(contents: &str) -> Location {
let mut splice = Location::default();
for (.., tok, end) in lexer::make_tokenizer(contents).flatten() {
Expand All @@ -69,7 +111,7 @@ fn find_splice_location(contents: &str) -> Location {
}

fn add_required_import(
required_import: &ImportFrom,
required_import: &AnyImport,
contents: &str,
blocks: &[&Block],
settings: &Settings,
Expand All @@ -79,7 +121,7 @@ fn add_required_import(
if blocks
.iter()
.filter(|block| !block.nested)
.any(|block| has_required_import(block, required_import))
.any(|block| contains(block, required_import))
{
return None;
}
Expand Down Expand Up @@ -131,26 +173,47 @@ pub fn add_required_imports(
error!("Expected require import to contain a single statement: `{}`", required_import);
return vec![];
}
let StmtKind::ImportFrom { module, names, level } = &body[0].node else {
error!("Expected required import to be in import-from style: `{}`", required_import);
return vec![];
};
names.iter().filter_map(|name| {
add_required_import(
&ImportFrom {
module: module.as_ref().map(String::as_str),
name: Alias {
name: name.node.name.as_str(),
as_name: name.node.asname.as_deref(),
},
level: level.as_ref(),
},
contents,
blocks,
settings,
autofix,
)
}).collect()

match &body[0].node {
StmtKind::ImportFrom { module, names, level } => {
names.iter().filter_map(|name| {
add_required_import(
&AnyImport::ImportFrom(ImportFrom {
module: module.as_ref().map(String::as_str),
name: Alias {
name: name.node.name.as_str(),
as_name: name.node.asname.as_deref(),
},
level: level.as_ref(),
}),
contents,
blocks,
settings,
autofix,
)
}).collect()
}
StmtKind::Import { names } => {
names.iter().filter_map(|name| {
add_required_import(
&AnyImport::Import(Import {
name: Alias {
name: name.node.name.as_str(),
as_name: name.node.asname.as_deref(),
},
}),
contents,
blocks,
settings,
autofix,
)
}).collect()
}
_ => {
error!("Expected required import to be in import-from style: `{}`", required_import);
vec![]
}
}
})
.collect()
}
3 changes: 3 additions & 0 deletions tests/black_compatibility_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,11 @@ fn test_ruff_black_compatibility() -> Result<()> {
&codes,
];

println!("ruff args: {:?}", ruff_args);

for entry in paths {
let path = entry.path();
println!("path: {}", path.display());
run_test(path, &blackd, &ruff_args).context(format!("Testing {}", path.display()))?;
}

Expand Down

0 comments on commit d1558fc

Please sign in to comment.