1
0
Fork 0

Adding upstream version 26.1.3.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-13 21:59:41 +01:00
parent 09521056ff
commit d908bee480
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
119 changed files with 71635 additions and 68059 deletions

View file

@ -0,0 +1 @@
{"unescaped_sequences":{},"identifiers_can_start_with_digit":false,"numbers_can_be_underscore_separated":false}

74
sqlglotrs/benches/long.rs Normal file
View file

@ -0,0 +1,74 @@
use std::path::Path;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use sqlglotrs::settings::{TokenTypeSettings, TokenizerDialectSettings, TokenizerSettings};
use sqlglotrs::tokenizer::Tokenizer;
pub const LONG: &str = r#"
SELECT
"e"."employee_id" AS "Employee #",
"e"."first_name" || ' ' || "e"."last_name" AS "Name",
"e"."email" AS "Email",
"e"."phone_number" AS "Phone",
TO_CHAR("e"."hire_date", 'MM/DD/YYYY') AS "Hire Date",
TO_CHAR("e"."salary", 'L99G999D99', 'NLS_NUMERIC_CHARACTERS = ''.,'' NLS_CURRENCY = ''$''') AS "Salary",
"e"."commission_pct" AS "Commission %",
'works as ' || "j"."job_title" || ' in ' || "d"."department_name" || ' department (manager: ' || "dm"."first_name" || ' ' || "dm"."last_name" || ') and immediate supervisor: ' || "m"."first_name" || ' ' || "m"."last_name" AS "Current Job",
TO_CHAR("j"."min_salary", 'L99G999D99', 'NLS_NUMERIC_CHARACTERS = ''.,'' NLS_CURRENCY = ''$''') || ' - ' || TO_CHAR("j"."max_salary", 'L99G999D99', 'NLS_NUMERIC_CHARACTERS = ''.,'' NLS_CURRENCY = ''$''') AS "Current Salary",
"l"."street_address" || ', ' || "l"."postal_code" || ', ' || "l"."city" || ', ' || "l"."state_province" || ', ' || "c"."country_name" || ' (' || "r"."region_name" || ')' AS "Location",
"jh"."job_id" AS "History Job ID",
'worked from ' || TO_CHAR("jh"."start_date", 'MM/DD/YYYY') || ' to ' || TO_CHAR("jh"."end_date", 'MM/DD/YYYY') || ' as ' || "jj"."job_title" || ' in ' || "dd"."department_name" || ' department' AS "History Job Title",
case when 1 then 1 when 2 then 2 when 3 then 3 when 4 then 4 when 5 then 5 else a(b(c + 1 * 3 % 4)) end
FROM "employees" AS e
JOIN "jobs" AS j
ON "e"."job_id" = "j"."job_id"
LEFT JOIN "employees" AS m
ON "e"."manager_id" = "m"."employee_id"
LEFT JOIN "departments" AS d
ON "d"."department_id" = "e"."department_id"
LEFT JOIN "employees" AS dm
ON "d"."manager_id" = "dm"."employee_id"
LEFT JOIN "locations" AS l
ON "d"."location_id" = "l"."location_id"
LEFT JOIN "countries" AS c
ON "l"."country_id" = "c"."country_id"
LEFT JOIN "regions" AS r
ON "c"."region_id" = "r"."region_id"
LEFT JOIN "job_history" AS jh
ON "e"."employee_id" = "jh"."employee_id"
LEFT JOIN "jobs" AS jj
ON "jj"."job_id" = "jh"."job_id"
LEFT JOIN "departments" AS dd
ON "dd"."department_id" = "jh"."department_id"
ORDER BY
"e"."employee_id"
"#;
fn long(c: &mut Criterion) {
// Read tokenizer settings
let path = Path::new(env!("CARGO_MANIFEST_DIR")).join("benches");
let settings_file = std::fs::read_to_string(path.join("tokenizer_settings.json")).unwrap();
let tokenizer_settings = serde_json::from_str::<TokenizerSettings>(&settings_file).unwrap();
let settings_type_file =
std::fs::read_to_string(path.join("token_type_settings.json")).unwrap();
let settings_type_file =
serde_json::from_str::<TokenTypeSettings>(&settings_type_file).unwrap();
let dialect_settings = std::fs::read_to_string(path.join("dialect_settings.json")).unwrap();
let dialect_settings =
serde_json::from_str::<TokenizerDialectSettings>(&dialect_settings).unwrap();
let tokenizer = Tokenizer::new(tokenizer_settings, settings_type_file);
c.bench_function("long", |b| {
b.iter(|| black_box(tokenizer.tokenize(LONG, &dialect_settings)));
});
}
criterion_group! {
name = benches;
config = Criterion::default();
targets = long
}
criterion_main!(benches);

View file

@ -0,0 +1 @@
{"bit_string":67,"break_":55,"dcolon":11,"heredoc_string":72,"raw_string":71,"hex_string":68,"identifier":58,"number":57,"parameter":47,"semicolon":13,"string":56,"var":66,"heredoc_string_alternative":66,"hint":254}

View file

@ -0,0 +1 @@
{"unescaped_sequences":{},"identifiers_can_start_with_digit":false,"numbers_can_be_underscore_separated":false}

View file

@ -0,0 +1 @@
{"white_space":{"\n":55,"\t":54,"\r":55," ":54},"single_tokens":{"\"":320,",":6,".":7,"[":2,"*":14,":":10,"]":3,"'":320,"(":0,")":1,"?":311,"-":8,"@":47,"$":46},"keywords":{"..":7},"numeric_literals":{},"identifiers":{"\"":"\""},"identifier_escapes":["\\"],"string_escapes":["\\"],"quotes":{"'":"'"},"format_strings":{"N'":["'",70],"n'":["'",70]},"has_bit_strings":false,"has_hex_strings":false,"comments":{"{#":"#}","--":null,"/*":"*/"},"var_single_tokens":[],"commands":[237,341,205,234,324],"command_prefix_tokens":[13,197],"tokens_preceding_hint":[261,334,221,361],"heredoc_tag_is_identifier":false,"string_escapes_allowed_in_raw_strings":true,"nested_comments":true,"hint_start":"/*+"}