Skip to content

Commit

Permalink
Update Q000-002 with the new f-string tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
dhruvmanila committed Sep 22, 2023
1 parent 587aa71 commit 1d2fcf3
Show file tree
Hide file tree
Showing 3 changed files with 83 additions and 13 deletions.
2 changes: 1 addition & 1 deletion crates/ruff_linter/src/checkers/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ pub(crate) fn check_tokens(
Rule::BadQuotesMultilineString,
Rule::BadQuotesDocstring,
]) {
flake8_quotes::rules::from_tokens(&mut diagnostics, tokens, locator, settings);
flake8_quotes::rules::check_string_quotes(&mut diagnostics, tokens, locator, settings);
}

if settings.rules.any_enabled(&[
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use ruff_python_index::Indexer;
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::Tok;
use ruff_text_size::TextRange;
use ruff_text_size::{TextRange, TextSize};

use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation};
Expand Down Expand Up @@ -354,8 +355,62 @@ fn strings(
diagnostics
}

/// A builder for the f-string range.
///
/// For now, this is limited to the outermost f-string and doesn't support
/// nested f-strings.
#[derive(Debug, Default)]
struct FStringRangeBuilder {
start_location: TextSize,
end_location: TextSize,
nesting: u32,
}

impl FStringRangeBuilder {
fn visit_token(&mut self, token: &Tok, range: TextRange) {
match token {
Tok::FStringStart => {
if self.nesting == 0 {
self.start_location = range.start();
}
self.nesting += 1;
}
Tok::FStringEnd => {
// We can assume that this will never overflow because we know
// that the program once parsed to a valid AST which means that
// the start and end tokens for f-strings are balanced.
self.nesting -= 1;
if self.nesting == 0 {
self.end_location = range.end();
}
}
_ => {}
}
}

/// Returns `true` if the lexer is currently inside of a f-string.
///
/// It'll return `false` once the `FStringEnd` token for the outermost
/// f-string is visited.
const fn in_fstring(&self) -> bool {
self.nesting > 0
}

/// Returns the complete range of the previously visited f-string.
///
/// This method should only be called once the lexer is outside of any
/// f-string otherwise it might return an invalid range.
///
/// It doesn't consume the builder because there can be multiple f-strings
/// throughout the source code.
fn finish(&self) -> TextRange {
debug_assert!(!self.in_fstring());
TextRange::new(self.start_location, self.end_location)
}
}

/// Generate `flake8-quote` diagnostics from a token stream.
pub(crate) fn from_tokens(
pub(crate) fn check_string_quotes(
diagnostics: &mut Vec<Diagnostic>,
lxr: &[LexResult],
locator: &Locator,
Expand All @@ -365,7 +420,13 @@ pub(crate) fn from_tokens(
// concatenation, and should thus be handled as a single unit.
let mut sequence = vec![];
let mut state_machine = StateMachine::default();
let mut fstring_range_builder = FStringRangeBuilder::default();
for &(ref tok, range) in lxr.iter().flatten() {
fstring_range_builder.visit_token(tok, range);
if fstring_range_builder.in_fstring() {
continue;
}

let is_docstring = state_machine.consume(tok);

// If this is a docstring, consume the existing sequence, then consume the
Expand All @@ -379,14 +440,23 @@ pub(crate) fn from_tokens(
diagnostics.push(diagnostic);
}
} else {
if tok.is_string() {
// If this is a string, add it to the sequence.
sequence.push(range);
} else if !matches!(tok, Tok::Comment(..) | Tok::NonLogicalNewline) {
// Otherwise, consume the sequence.
if !sequence.is_empty() {
diagnostics.extend(strings(locator, &sequence, settings));
sequence.clear();
match tok {
Tok::String { .. } => {
// If this is a string, add it to the sequence.
sequence.push(range);
}
Tok::FStringEnd => {
// If this is the end of an f-string, add the entire f-string
// range to the sequence.
sequence.push(fstring_range_builder.finish());
}
Tok::Comment(..) | Tok::NonLogicalNewline => continue,
_ => {
// Otherwise, consume the sequence.
if !sequence.is_empty() {
diagnostics.extend(strings(locator, &sequence, settings));
sequence.clear();
}
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions crates/ruff_linter/src/rules/flake8_quotes/rules/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
pub(crate) use avoidable_escaped_quote::*;
pub(crate) use from_tokens::*;
pub(crate) use check_string_quotes::*;

mod avoidable_escaped_quote;
mod from_tokens;
mod check_string_quotes;

0 comments on commit 1d2fcf3

Please sign in to comment.