Adding upstream version 11.4.5.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
36706608dc
commit
3e1362a879
131 changed files with 53004 additions and 37079 deletions
|
@ -6,6 +6,8 @@ class TestBigQuery(Validator):
|
|||
dialect = "bigquery"
|
||||
|
||||
def test_bigquery(self):
|
||||
self.validate_identity("SELECT AS STRUCT 1 AS a, 2 AS b")
|
||||
self.validate_identity("SELECT AS VALUE STRUCT(1 AS a, 2 AS b)")
|
||||
self.validate_identity("SELECT STRUCT<ARRAY<STRING>>(['2023-01-17'])")
|
||||
self.validate_identity("SELECT * FROM q UNPIVOT(values FOR quarter IN (b, c))")
|
||||
self.validate_identity(
|
||||
|
@ -13,6 +15,15 @@ class TestBigQuery(Validator):
|
|||
)
|
||||
|
||||
self.validate_all("LEAST(x, y)", read={"sqlite": "MIN(x, y)"})
|
||||
self.validate_all("CAST(x AS CHAR)", write={"bigquery": "CAST(x AS STRING)"})
|
||||
self.validate_all("CAST(x AS NCHAR)", write={"bigquery": "CAST(x AS STRING)"})
|
||||
self.validate_all("CAST(x AS NVARCHAR)", write={"bigquery": "CAST(x AS STRING)"})
|
||||
self.validate_all(
|
||||
"SELECT ARRAY(SELECT AS STRUCT 1 a, 2 b)",
|
||||
write={
|
||||
"bigquery": "SELECT ARRAY(SELECT AS STRUCT 1 AS a, 2 AS b)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"REGEXP_CONTAINS('foo', '.*')",
|
||||
read={"bigquery": "REGEXP_CONTAINS('foo', '.*')"},
|
||||
|
|
|
@ -812,11 +812,13 @@ class TestDialect(Validator):
|
|||
self.validate_all(
|
||||
"JSON_EXTRACT(x, 'y')",
|
||||
read={
|
||||
"mysql": "JSON_EXTRACT(x, 'y')",
|
||||
"postgres": "x->'y'",
|
||||
"presto": "JSON_EXTRACT(x, 'y')",
|
||||
"starrocks": "x -> 'y'",
|
||||
},
|
||||
write={
|
||||
"mysql": "JSON_EXTRACT(x, 'y')",
|
||||
"oracle": "JSON_EXTRACT(x, 'y')",
|
||||
"postgres": "x -> 'y'",
|
||||
"presto": "JSON_EXTRACT(x, 'y')",
|
||||
|
@ -834,6 +836,17 @@ class TestDialect(Validator):
|
|||
"presto": "JSON_EXTRACT_SCALAR(x, 'y')",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSON_EXTRACT_SCALAR(stream_data, '$.data.results')",
|
||||
read={
|
||||
"hive": "GET_JSON_OBJECT(stream_data, '$.data.results')",
|
||||
"mysql": "stream_data ->> '$.data.results'",
|
||||
},
|
||||
write={
|
||||
"hive": "GET_JSON_OBJECT(stream_data, '$.data.results')",
|
||||
"mysql": "stream_data ->> '$.data.results'",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"JSONB_EXTRACT(x, 'y')",
|
||||
read={
|
||||
|
@ -1000,6 +1013,7 @@ class TestDialect(Validator):
|
|||
self.validate_identity("some.column LIKE 'foo' || another.column || 'bar' || LOWER(x)")
|
||||
self.validate_identity("some.column LIKE 'foo' + another.column + 'bar'")
|
||||
|
||||
self.validate_all("LIKE(x, 'z')", write={"": "'z' LIKE x"})
|
||||
self.validate_all(
|
||||
"x ILIKE '%y'",
|
||||
read={
|
||||
|
@ -1196,9 +1210,13 @@ class TestDialect(Validator):
|
|||
)
|
||||
self.validate_all(
|
||||
"SELECT x FROM y LIMIT 10",
|
||||
read={
|
||||
"tsql": "SELECT TOP 10 x FROM y",
|
||||
},
|
||||
write={
|
||||
"sqlite": "SELECT x FROM y LIMIT 10",
|
||||
"oracle": "SELECT x FROM y FETCH FIRST 10 ROWS ONLY",
|
||||
"tsql": "SELECT x FROM y FETCH FIRST 10 ROWS ONLY",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1493,6 +1511,46 @@ SELECT
|
|||
},
|
||||
)
|
||||
|
||||
def test_logarithm(self):
|
||||
self.validate_all(
|
||||
"LOG(x)",
|
||||
read={
|
||||
"duckdb": "LOG(x)",
|
||||
"postgres": "LOG(x)",
|
||||
"redshift": "LOG(x)",
|
||||
"sqlite": "LOG(x)",
|
||||
"teradata": "LOG(x)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"LN(x)",
|
||||
read={
|
||||
"bigquery": "LOG(x)",
|
||||
"clickhouse": "LOG(x)",
|
||||
"databricks": "LOG(x)",
|
||||
"drill": "LOG(x)",
|
||||
"hive": "LOG(x)",
|
||||
"mysql": "LOG(x)",
|
||||
"tsql": "LOG(x)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"LOG(b, n)",
|
||||
read={
|
||||
"bigquery": "LOG(n, b)",
|
||||
"databricks": "LOG(b, n)",
|
||||
"drill": "LOG(b, n)",
|
||||
"hive": "LOG(b, n)",
|
||||
"mysql": "LOG(b, n)",
|
||||
"oracle": "LOG(b, n)",
|
||||
"postgres": "LOG(b, n)",
|
||||
"snowflake": "LOG(b, n)",
|
||||
"spark": "LOG(b, n)",
|
||||
"sqlite": "LOG(b, n)",
|
||||
"tsql": "LOG(n, b)",
|
||||
},
|
||||
)
|
||||
|
||||
def test_count_if(self):
|
||||
self.validate_identity("COUNT_IF(DISTINCT cond)")
|
||||
|
||||
|
|
|
@ -125,6 +125,7 @@ class TestDuckDB(Validator):
|
|||
"SELECT a['x space'] FROM (SELECT {'x space': 1, 'y': 2, 'z': 3} AS a)"
|
||||
)
|
||||
|
||||
self.validate_all("x ~ y", write={"duckdb": "REGEXP_MATCHES(x, y)"})
|
||||
self.validate_all("SELECT * FROM 'x.y'", write={"duckdb": 'SELECT * FROM "x.y"'})
|
||||
self.validate_all(
|
||||
"WITH 'x' AS (SELECT 1) SELECT * FROM x",
|
||||
|
|
|
@ -246,6 +246,30 @@ class TestHive(Validator):
|
|||
)
|
||||
|
||||
def test_time(self):
|
||||
self.validate_all(
|
||||
"(UNIX_TIMESTAMP(y) - UNIX_TIMESTAMP(x)) * 1000",
|
||||
read={
|
||||
"presto": "DATE_DIFF('millisecond', x, y)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"UNIX_TIMESTAMP(y) - UNIX_TIMESTAMP(x)",
|
||||
read={
|
||||
"presto": "DATE_DIFF('second', x, y)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"(UNIX_TIMESTAMP(y) - UNIX_TIMESTAMP(x)) / 60",
|
||||
read={
|
||||
"presto": "DATE_DIFF('minute', x, y)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"(UNIX_TIMESTAMP(y) - UNIX_TIMESTAMP(x)) / 3600",
|
||||
read={
|
||||
"presto": "DATE_DIFF('hour', x, y)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"DATEDIFF(a, b)",
|
||||
write={
|
||||
|
|
|
@ -16,6 +16,7 @@ class TestMySQL(Validator):
|
|||
)
|
||||
|
||||
def test_identity(self):
|
||||
self.validate_identity("x ->> '$.name'")
|
||||
self.validate_identity("SELECT CAST(`a`.`b` AS INT) FROM foo")
|
||||
self.validate_identity("SELECT TRIM(LEADING 'bla' FROM ' XXX ')")
|
||||
self.validate_identity("SELECT TRIM(TRAILING 'bla' FROM ' XXX ')")
|
||||
|
@ -424,6 +425,10 @@ COMMENT='客户账户表'"""
|
|||
show = self.validate_identity("SHOW INDEX FROM foo FROM bar")
|
||||
self.assertEqual(show.text("db"), "bar")
|
||||
|
||||
self.validate_all(
|
||||
"SHOW INDEX FROM bar.foo", write={"mysql": "SHOW INDEX FROM foo FROM bar"}
|
||||
)
|
||||
|
||||
def test_show_db_like_or_where_sql(self):
|
||||
for key in [
|
||||
"OPEN TABLES",
|
||||
|
|
|
@ -12,6 +12,24 @@ class TestOracle(Validator):
|
|||
self.validate_identity("SELECT e1.x, e2.x FROM e e1, e e2 WHERE e1.y = e2.y (+)")
|
||||
self.validate_identity("SELECT e1.x, e2.x FROM e e1, e e2 WHERE e1.y (+) = e2.y (+)")
|
||||
|
||||
def test_hints(self):
|
||||
self.validate_identity("SELECT /*+ USE_NL(A B) */ A.COL_TEST FROM TABLE_A A, TABLE_B B")
|
||||
self.validate_identity(
|
||||
"SELECT /*+ INDEX(v.j jhist_employee_ix (employee_id start_date)) */ * FROM v"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT /*+ USE_NL(A B C) */ A.COL_TEST FROM TABLE_A A, TABLE_B B, TABLE_C C"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT /*+ NO_INDEX(employees emp_empid) */ employee_id FROM employees WHERE employee_id > 200"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT /*+ NO_INDEX_FFS(items item_order_ix) */ order_id FROM order_items items"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT /*+ LEADING(e j) */ * FROM employees e, departments d, job_history j WHERE e.department_id = d.department_id AND e.hire_date = j.start_date"
|
||||
)
|
||||
|
||||
def test_xml_table(self):
|
||||
self.validate_identity("XMLTABLE('x')")
|
||||
self.validate_identity("XMLTABLE('x' RETURNING SEQUENCE BY REF)")
|
||||
|
|
|
@ -194,8 +194,9 @@ class TestPostgres(Validator):
|
|||
write={
|
||||
"postgres": "SELECT * FROM x FETCH FIRST 1 ROWS ONLY",
|
||||
"presto": "SELECT * FROM x FETCH FIRST 1 ROWS ONLY",
|
||||
"hive": "SELECT * FROM x FETCH FIRST 1 ROWS ONLY",
|
||||
"spark": "SELECT * FROM x FETCH FIRST 1 ROWS ONLY",
|
||||
"hive": "SELECT * FROM x LIMIT 1",
|
||||
"spark": "SELECT * FROM x LIMIT 1",
|
||||
"sqlite": "SELECT * FROM x LIMIT 1",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
|
|
@ -369,6 +369,12 @@ class TestPresto(Validator):
|
|||
self.validate_identity("START TRANSACTION ISOLATION LEVEL REPEATABLE READ")
|
||||
self.validate_identity("APPROX_PERCENTILE(a, b, c, d)")
|
||||
|
||||
self.validate_all(
|
||||
"SELECT JSON_OBJECT(KEY 'key1' VALUE 1, KEY 'key2' VALUE TRUE)",
|
||||
write={
|
||||
"presto": "SELECT JSON_OBJECT('key1': 1, 'key2': TRUE)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"ARRAY_AGG(x ORDER BY y DESC)",
|
||||
write={
|
||||
|
|
|
@ -530,6 +530,7 @@ class TestSnowflake(Validator):
|
|||
"snowflake": "DATEADD(DAY, 5, CAST('2008-12-25' AS DATE))",
|
||||
},
|
||||
)
|
||||
self.validate_identity("DATEDIFF(DAY, 5, CAST('2008-12-25' AS DATE))")
|
||||
|
||||
def test_semi_structured_types(self):
|
||||
self.validate_identity("SELECT CAST(a AS VARIANT)")
|
||||
|
@ -814,6 +815,7 @@ FROM persons AS p, LATERAL FLATTEN(input => p.c, path => 'contact') AS f, LATERA
|
|||
|
||||
self.assertIsInstance(like, exp.LikeAny)
|
||||
self.assertIsInstance(ilike, exp.ILikeAny)
|
||||
like.sql() # check that this doesn't raise
|
||||
|
||||
def test_match_recognize(self):
|
||||
for row in (
|
||||
|
|
|
@ -212,6 +212,7 @@ TBLPROPERTIES (
|
|||
self.validate_identity("TRIM(BOTH 'SL' FROM 'SSparkSQLS')")
|
||||
self.validate_identity("TRIM(LEADING 'SL' FROM 'SSparkSQLS')")
|
||||
self.validate_identity("TRIM(TRAILING 'SL' FROM 'SSparkSQLS')")
|
||||
self.validate_identity("SPLIT(str, pattern, lim)")
|
||||
|
||||
self.validate_all(
|
||||
"CAST(x AS TIMESTAMP)", read={"trino": "CAST(x AS TIMESTAMP(6) WITH TIME ZONE)"}
|
||||
|
|
|
@ -56,6 +56,11 @@ class TestSQLite(Validator):
|
|||
)
|
||||
|
||||
def test_sqlite(self):
|
||||
self.validate_all("SELECT LIKE(y, x)", write={"sqlite": "SELECT x LIKE y"})
|
||||
self.validate_all("SELECT GLOB('*y*', 'xyz')", write={"sqlite": "SELECT 'xyz' GLOB '*y*'"})
|
||||
self.validate_all(
|
||||
"SELECT LIKE('%y%', 'xyz', '')", write={"sqlite": "SELECT 'xyz' LIKE '%y%' ESCAPE ''"}
|
||||
)
|
||||
self.validate_all(
|
||||
"CURRENT_DATE",
|
||||
read={
|
||||
|
|
|
@ -7,6 +7,7 @@ class TestTSQL(Validator):
|
|||
|
||||
def test_tsql(self):
|
||||
self.validate_identity("SELECT CASE WHEN a > 1 THEN b END")
|
||||
self.validate_identity("SELECT * FROM taxi ORDER BY 1 OFFSET 0 ROWS FETCH NEXT 3 ROWS ONLY")
|
||||
self.validate_identity("END")
|
||||
self.validate_identity("@x")
|
||||
self.validate_identity("#x")
|
||||
|
@ -567,15 +568,21 @@ WHERE
|
|||
write={"spark": "LAST_DAY(ADD_MONTHS(CURRENT_TIMESTAMP(), -1))"},
|
||||
)
|
||||
|
||||
def test_variables(self):
|
||||
# In TSQL @, # can be used as a prefix for variables/identifiers
|
||||
expr = parse_one("@x", read="tsql")
|
||||
self.assertIsInstance(expr, exp.Column)
|
||||
self.assertIsInstance(expr.this, exp.Identifier)
|
||||
|
||||
def test_identifier_prefixes(self):
|
||||
expr = parse_one("#x", read="tsql")
|
||||
self.assertIsInstance(expr, exp.Column)
|
||||
self.assertIsInstance(expr.this, exp.Identifier)
|
||||
self.assertEqual(expr.sql("tsql"), "#x")
|
||||
|
||||
expr = parse_one("@x", read="tsql")
|
||||
self.assertIsInstance(expr, exp.Parameter)
|
||||
self.assertIsInstance(expr.this, exp.Var)
|
||||
self.assertEqual(expr.sql("tsql"), "@x")
|
||||
|
||||
table = parse_one("select * from @x", read="tsql").args["from"].expressions[0]
|
||||
self.assertIsInstance(table, exp.Table)
|
||||
self.assertIsInstance(table.this, exp.Parameter)
|
||||
self.assertIsInstance(table.this.this, exp.Var)
|
||||
|
||||
def test_system_time(self):
|
||||
self.validate_all(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue