1
0
Fork 0

Adding upstream version 26.25.3.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-06-07 04:46:28 +02:00
parent bc7749846c
commit d9e621c994
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
83 changed files with 67317 additions and 67680 deletions

2
sqlglotrs/Cargo.lock generated
View file

@ -502,7 +502,7 @@ dependencies = [
[[package]]
name = "sqlglotrs"
version = "0.5.0"
version = "0.6.1"
dependencies = [
"criterion",
"pyo3",

View file

@ -1,6 +1,6 @@
[package]
name = "sqlglotrs"
version = "0.5.0"
version = "0.6.1"
edition = "2021"
license = "MIT"

View file

@ -1,7 +1,6 @@
use crate::settings::TokenType;
use crate::trie::{Trie, TrieResult};
use crate::{Token, TokenTypeSettings, TokenizerDialectSettings, TokenizerSettings};
use pyo3::exceptions::PyException;
use pyo3::prelude::*;
use std::cmp::{max, min};
@ -45,7 +44,7 @@ impl Tokenizer {
&self,
sql: &str,
dialect_settings: &TokenizerDialectSettings,
) -> Result<Vec<Token>, PyErr> {
) -> (Vec<Token>, Option<String>) {
let mut state = TokenizerState::new(
sql,
&self.settings,
@ -53,9 +52,14 @@ impl Tokenizer {
dialect_settings,
&self.keyword_trie,
);
state.tokenize().map_err(|e| {
PyException::new_err(format!("Error tokenizing '{}': {}", e.context, e.message))
})
let tokenize_result = state.tokenize();
match tokenize_result {
Ok(tokens) => (tokens, None),
Err(e) => {
let msg = format!("Error tokenizing '{}': {}", e.context, e.message);
(state.tokens, Some(msg))
}
}
}
}