Skip to content

Commit

Permalink
Tweaks
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh committed Jan 10, 2023
1 parent 8168c0a commit 0762960
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 17 deletions.
6 changes: 6 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,9 @@ license-files = [
"LICENSE",
"licenses/*",
]

[tool.isort]
add_imports = "from __future__ import annotations"

[tool.ruff.isort]
required-imports = ["from __future__ import annotations"]
23 changes: 7 additions & 16 deletions src/directives.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,17 +90,11 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
let mut exclusions: IntSet<usize> = IntSet::default();
let mut splits: Vec<usize> = Vec::default();
let mut skip_file: bool = false;
let mut off: Option<Location> = None;
let mut last: Option<Location> = None;
for &(start, ref tok, end) in lxr.iter().flatten() {
last = Some(end);

// No need to keep processing, but we do need to determine the last token.
if skip_file {
continue;
}

let Tok::Comment(comment_text) = tok else {
continue;
};
Expand All @@ -112,7 +106,10 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
if comment_text == "# isort: split" {
splits.push(start.row());
} else if comment_text == "# isort: skip_file" || comment_text == "# isort:skip_file" {
skip_file = true;
return IsortDirectives {
skip_file: true,
..IsortDirectives::default()
};
} else if off.is_some() {
if comment_text == "# isort: on" {
if let Some(start) = off {
Expand Down Expand Up @@ -142,7 +139,7 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
IsortDirectives {
exclusions,
splits,
skip_file,
..IsortDirectives::default()
}
}

Expand Down Expand Up @@ -281,10 +278,7 @@ x = 1
y = 2
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
assert_eq!(
extract_isort_directives(&lxr).exclusions,
IntSet::from_iter([1, 2, 3, 4])
);
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());

let contents = "# isort: off
x = 1
Expand All @@ -293,10 +287,7 @@ y = 2
# isort: skip_file
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
assert_eq!(
extract_isort_directives(&lxr).exclusions,
IntSet::from_iter([1, 2, 3, 4, 5, 6])
);
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
}

#[test]
Expand Down
12 changes: 11 additions & 1 deletion src/isort/rules/add_required_imports.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,15 @@ struct ImportFrom<'a> {
level: Option<&'a usize>,
}

struct Import<'a> {
name: Alias<'a>,
}

enum AnyImport<'a> {
Import(Import<'a>),
ImportFrom(ImportFrom<'a>),
}

impl fmt::Display for ImportFrom<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "from ")?;
Expand Down Expand Up @@ -55,7 +64,8 @@ fn has_required_import(block: &Block, required_import: &ImportFrom) -> bool {
})
}

/// Find the first token that isn't a docstring, comment, or whitespace.
/// Find the end of the first token that isn't a docstring, comment, or
/// whitespace.
fn find_splice_location(contents: &str) -> Location {
let mut splice = Location::default();
for (.., tok, end) in lexer::make_tokenizer(contents).flatten() {
Expand Down

0 comments on commit 0762960

Please sign in to comment.