1
0
Fork 0

Merging upstream version 25.32.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:57:37 +01:00
parent 160ab5bf81
commit 02152e9ba6
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
74 changed files with 2284 additions and 1814 deletions

2
sqlglotrs/Cargo.lock generated
View file

@ -136,7 +136,7 @@ dependencies = [
[[package]]
name = "sqlglotrs"
version = "0.2.14"
version = "0.3.0"
dependencies = [
"pyo3",
]

View file

@ -1,6 +1,6 @@
[package]
name = "sqlglotrs"
version = "0.2.14"
version = "0.3.0"
edition = "2021"
license = "MIT"

View file

@ -19,6 +19,7 @@ pub struct TokenTypeSettings {
pub string: TokenType,
pub var: TokenType,
pub heredoc_string_alternative: TokenType,
pub hint: TokenType,
}
#[pymethods]
@ -38,6 +39,7 @@ impl TokenTypeSettings {
string: TokenType,
var: TokenType,
heredoc_string_alternative: TokenType,
hint: TokenType,
) -> Self {
TokenTypeSettings {
bit_string,
@ -53,6 +55,7 @@ impl TokenTypeSettings {
string,
var,
heredoc_string_alternative,
hint,
}
}
}
@ -75,9 +78,11 @@ pub struct TokenizerSettings {
pub var_single_tokens: HashSet<char>,
pub commands: HashSet<TokenType>,
pub command_prefix_tokens: HashSet<TokenType>,
pub tokens_preceding_hint: HashSet<TokenType>,
pub heredoc_tag_is_identifier: bool,
pub string_escapes_allowed_in_raw_strings: bool,
pub nested_comments: bool,
pub hint_start: String,
}
#[pymethods]
@ -99,9 +104,11 @@ impl TokenizerSettings {
var_single_tokens: HashSet<String>,
commands: HashSet<TokenType>,
command_prefix_tokens: HashSet<TokenType>,
tokens_preceding_hint: HashSet<TokenType>,
heredoc_tag_is_identifier: bool,
string_escapes_allowed_in_raw_strings: bool,
nested_comments: bool,
hint_start: String,
) -> Self {
let to_char = |v: &String| {
if v.len() == 1 {
@ -150,9 +157,11 @@ impl TokenizerSettings {
var_single_tokens: var_single_tokens_native,
commands,
command_prefix_tokens,
tokens_preceding_hint,
heredoc_tag_is_identifier,
string_escapes_allowed_in_raw_strings,
nested_comments,
hint_start,
}
}
}

View file

@ -395,6 +395,12 @@ impl<'a> TokenizerState<'a> {
.push(self.text()[comment_start_size..].to_string());
}
if comment_start == self.settings.hint_start
&& self.tokens.last().is_some()
&& self.settings.tokens_preceding_hint.contains(&self.tokens.last().unwrap().token_type) {
self.add(self.token_types.hint, None)?;
}
// Leading comment is attached to the succeeding token, whilst trailing comment to the preceding.
// Multiple consecutive comments are preserved by appending them to the current comments list.
if Some(comment_start_line) == self.previous_token_line {