Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve parser error messaging around mistakes in/around version specifiers #662

Merged
merged 3 commits into from Jan 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
25 changes: 22 additions & 3 deletions src/packaging/_parser.py
Expand Up @@ -163,7 +163,11 @@ def _parse_extras(tokenizer: Tokenizer) -> List[str]:
if not tokenizer.check("LEFT_BRACKET", peek=True):
return []

with tokenizer.enclosing_tokens("LEFT_BRACKET", "RIGHT_BRACKET"):
with tokenizer.enclosing_tokens(
"LEFT_BRACKET",
"RIGHT_BRACKET",
around="extras",
):
tokenizer.consume("WS")
extras = _parse_extras_list(tokenizer)
tokenizer.consume("WS")
Expand Down Expand Up @@ -203,7 +207,11 @@ def _parse_specifier(tokenizer: Tokenizer) -> str:
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
| WS? version_many WS?
"""
with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"):
with tokenizer.enclosing_tokens(
"LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS",
around="version specifier",
):
tokenizer.consume("WS")
parsed_specifiers = _parse_version_many(tokenizer)
tokenizer.consume("WS")
Expand All @@ -217,7 +225,14 @@ def _parse_version_many(tokenizer: Tokenizer) -> str:
"""
parsed_specifiers = ""
while tokenizer.check("SPECIFIER"):
span_start = tokenizer.position
parsed_specifiers += tokenizer.read().text
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
tokenizer.raise_syntax_error(
".* suffix can only be used with `==` or `!=` operators",
span_start=span_start,
span_end=tokenizer.position + 1,
)
tokenizer.consume("WS")
if not tokenizer.check("COMMA"):
break
Expand Down Expand Up @@ -254,7 +269,11 @@ def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:

tokenizer.consume("WS")
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"):
with tokenizer.enclosing_tokens(
"LEFT_PARENTHESIS",
"RIGHT_PARENTHESIS",
around="marker expression",
):
tokenizer.consume("WS")
marker: MarkerAtom = _parse_marker(tokenizer)
tokenizer.consume("WS")
Expand Down
9 changes: 6 additions & 3 deletions src/packaging/_tokenizer.py
Expand Up @@ -78,6 +78,7 @@ def __str__(self) -> str:
"AT": r"\@",
"URL": r"[^ \t]+",
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
"VERSION_PREFIX_TRAIL": r"\.\*",
"WS": r"[ \t]+",
"END": r"$",
}
Expand Down Expand Up @@ -167,21 +168,23 @@ def raise_syntax_error(
)

@contextlib.contextmanager
def enclosing_tokens(self, open_token: str, close_token: str) -> Iterator[bool]:
def enclosing_tokens(
self, open_token: str, close_token: str, *, around: str
) -> Iterator[None]:
if self.check(open_token):
open_position = self.position
self.read()
else:
open_position = None

yield open_position is not None
yield

if open_position is None:
return

if not self.check(close_token):
self.raise_syntax_error(
f"Expected closing {close_token}",
f"Expected matching {close_token} for {open_token}, after {around}",
span_start=open_position,
)

Expand Down
42 changes: 39 additions & 3 deletions tests/test_requirements.py
Expand Up @@ -279,11 +279,28 @@ def test_error_when_parens_not_closed_correctly(self) -> None:
# THEN
assert ctx.exconly() == (
"packaging.requirements.InvalidRequirement: "
"Expected closing RIGHT_PARENTHESIS\n"
"Expected matching RIGHT_PARENTHESIS for LEFT_PARENTHESIS, "
"after version specifier\n"
" name (>= 1.0\n"
" ~~~~~~~^"
)

def test_error_when_prefix_match_is_used_incorrectly(self) -> None:
# GIVEN
to_parse = "black (>=20.*) ; extra == 'format'"

# WHEN
with pytest.raises(InvalidRequirement) as ctx:
Requirement(to_parse)

# THEN
assert ctx.exconly() == (
"packaging.requirements.InvalidRequirement: "
".* suffix can only be used with `==` or `!=` operators\n"
" black (>=20.*) ; extra == 'format'\n"
" ~~~~~^"
)

def test_error_when_bracket_not_closed_correctly(self) -> None:
# GIVEN
to_parse = "name[bar, baz >= 1.0"
Expand All @@ -295,7 +312,8 @@ def test_error_when_bracket_not_closed_correctly(self) -> None:
# THEN
assert ctx.exconly() == (
"packaging.requirements.InvalidRequirement: "
"Expected closing RIGHT_BRACKET\n"
"Expected matching RIGHT_BRACKET for LEFT_BRACKET, "
"after extras\n"
" name[bar, baz >= 1.0\n"
" ~~~~~~~~~~^"
)
Expand All @@ -311,7 +329,8 @@ def test_error_when_extras_bracket_left_unclosed(self) -> None:
# THEN
assert ctx.exconly() == (
"packaging.requirements.InvalidRequirement: "
"Expected closing RIGHT_BRACKET\n"
"Expected matching RIGHT_BRACKET for LEFT_BRACKET, "
"after extras\n"
" name[bar, baz\n"
" ~~~~~~~~~^"
)
Expand All @@ -332,6 +351,23 @@ def test_error_no_space_after_url(self) -> None:
" ~~~~~~~~~~~~~~~~~~~~~~^"
)

def test_error_marker_bracket_unclosed(self) -> None:
# GIVEN
to_parse = "name; (extra == 'example'"

# WHEN
with pytest.raises(InvalidRequirement) as ctx:
Requirement(to_parse)

# THEN
assert ctx.exconly() == (
"packaging.requirements.InvalidRequirement: "
"Expected matching RIGHT_PARENTHESIS for LEFT_PARENTHESIS, "
"after marker expression\n"
" name; (extra == 'example'\n"
" ~~~~~~~~~~~~~~~~~~~^"
)

def test_error_no_url_after_at(self) -> None:
# GIVEN
to_parse = "name @ "
Expand Down