Skip to content

Commit

Permalink
Use the unicode-ident crate (#7212)
Browse files Browse the repository at this point in the history
  • Loading branch information
MichaReiser committed Sep 7, 2023
1 parent 041cdb9 commit f1a4eb9
Show file tree
Hide file tree
Showing 8 changed files with 10 additions and 44 deletions.
29 changes: 3 additions & 26 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Expand Up @@ -49,7 +49,7 @@ toml = { version = "0.7.2" }
tracing = "0.1.37"
tracing-indicatif = "0.3.4"
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
unic-ucd-ident = "0.9.0"
unicode-ident = "1.0.11"
unicode-width = "0.1.10"
uuid = { version = "1.4.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
wsl = { version = "0.1.0" }
Expand Down
3 changes: 1 addition & 2 deletions crates/ruff_python_parser/Cargo.toml
Expand Up @@ -23,8 +23,7 @@ itertools = { workspace = true }
lalrpop-util = { version = "0.20.0", default-features = false }
num-bigint = { workspace = true }
num-traits = { workspace = true }
unic-emoji-char = "0.9.0"
unic-ucd-ident = { workspace = true }
unicode-ident = { workspace = true }
unicode_names2 = { version = "0.6.0", git = "https://github.com/youknowone/unicode_names2.git", rev = "4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde" }
rustc-hash = { workspace = true }
static_assertions = "1.1.0"
Expand Down
12 changes: 1 addition & 11 deletions crates/ruff_python_parser/src/lexer.rs
Expand Up @@ -36,8 +36,7 @@ use num_bigint::BigInt;
use num_traits::{Num, Zero};
use ruff_python_ast::IpyEscapeKind;
use ruff_text_size::{TextLen, TextRange, TextSize};
use unic_emoji_char::is_emoji_presentation;
use unic_ucd_ident::{is_xid_continue, is_xid_start};
use unicode_ident::{is_xid_continue, is_xid_start};

use crate::lexer::cursor::{Cursor, EOF_CHAR};
use crate::lexer::indentation::{Indentation, Indentations};
Expand Down Expand Up @@ -597,15 +596,6 @@ impl<'source> Lexer<'source> {
self.state = State::Other;

Ok((identifier, self.token_range()))
} else if is_emoji_presentation(c) {
self.state = State::Other;

Ok((
Tok::Name {
name: c.to_string(),
},
self.token_range(),
))
} else {
Err(LexicalError {
error: LexicalErrorType::UnrecognizedToken { tok: c },
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff_python_stdlib/Cargo.toml
Expand Up @@ -13,4 +13,4 @@ license = { workspace = true }
[lib]

[dependencies]
unic-ucd-ident = { workspace = true }
unicode-ident = { workspace = true }
2 changes: 1 addition & 1 deletion crates/ruff_python_stdlib/src/identifiers.rs
@@ -1,4 +1,4 @@
use unic_ucd_ident::{is_xid_continue, is_xid_start};
use unicode_ident::{is_xid_continue, is_xid_start};

use crate::keyword::is_keyword;

Expand Down
2 changes: 1 addition & 1 deletion crates/ruff_python_trivia/Cargo.toml
Expand Up @@ -18,7 +18,7 @@ ruff_source_file = { path = "../ruff_source_file" }

memchr = { workspace = true }
smallvec = { workspace = true }
unic-ucd-ident = { workspace = true }
unicode-ident = { workspace = true }

[dev-dependencies]
insta = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion crates/ruff_python_trivia/src/tokenizer.rs
@@ -1,5 +1,5 @@
use memchr::{memchr2, memchr3, memrchr3_iter};
use unic_ucd_ident::{is_xid_continue, is_xid_start};
use unicode_ident::{is_xid_continue, is_xid_start};

use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};

Expand Down

0 comments on commit f1a4eb9

Please sign in to comment.