Adding upstream version 18.13.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
e833f2baa5
commit
fc6bad5705
76 changed files with 21248 additions and 19605 deletions
|
@ -790,3 +790,11 @@ class TestDuckDB(Validator):
|
|||
"duckdb": "ALTER TABLE db.t1 RENAME TO t2",
|
||||
},
|
||||
)
|
||||
|
||||
def test_timestamps_with_units(self):
|
||||
self.validate_all(
|
||||
"SELECT w::TIMESTAMP_S, x::TIMESTAMP_MS, y::TIMESTAMP_US, z::TIMESTAMP_NS",
|
||||
write={
|
||||
"duckdb": "SELECT CAST(w AS TIMESTAMP_S), CAST(x AS TIMESTAMP_MS), CAST(y AS TIMESTAMP), CAST(z AS TIMESTAMP_NS)",
|
||||
},
|
||||
)
|
||||
|
|
|
@ -65,6 +65,9 @@ class TestMySQL(Validator):
|
|||
self.validate_identity(
|
||||
"INSERT INTO x VALUES (1, 'a', 2.0) ON DUPLICATE KEY UPDATE x.id = 1"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE OR REPLACE VIEW my_view AS SELECT column1 AS `boo`, column2 AS `foo` FROM my_table WHERE column3 = 'some_value' UNION SELECT q.* FROM fruits_table, JSON_TABLE(Fruits, '$[*]' COLUMNS(id VARCHAR(255) PATH '$.$id', value VARCHAR(255) PATH '$.value')) AS q",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"CREATE TABLE z (a INT) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARACTER SET=utf8 COLLATE=utf8_bin COMMENT='x'",
|
||||
|
|
|
@ -234,21 +234,30 @@ MATCH_RECOGNIZE (
|
|||
|
||||
def test_json_table(self):
|
||||
self.validate_identity(
|
||||
"SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS (foo PATH 'bar'))"
|
||||
"SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS(foo PATH 'bar'))"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS foo PATH 'bar')",
|
||||
"SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS (foo PATH 'bar'))",
|
||||
"SELECT * FROM JSON_TABLE(foo FORMAT JSON, 'bla' ERROR ON ERROR NULL ON EMPTY COLUMNS(foo PATH 'bar'))",
|
||||
)
|
||||
self.validate_identity(
|
||||
"""SELECT
|
||||
CASE WHEN DBMS_LOB.GETLENGTH(info) < 32000 THEN DBMS_LOB.SUBSTR(info) END AS info_txt,
|
||||
info AS info_clob
|
||||
FROM schemaname.tablename ar
|
||||
INNER JOIN JSON_TABLE(:emps, '$[*]' COLUMNS (empno NUMBER PATH '$')) jt
|
||||
INNER JOIN JSON_TABLE(:emps, '$[*]' COLUMNS(empno NUMBER PATH '$')) jt
|
||||
ON ar.empno = jt.empno""",
|
||||
pretty=True,
|
||||
)
|
||||
self.validate_identity(
|
||||
"""SELECT
|
||||
*
|
||||
FROM JSON_TABLE(res, '$.info[*]' COLUMNS(
|
||||
tempid NUMBER PATH '$.tempid',
|
||||
NESTED PATH '$.calid[*]' COLUMNS(last_dt PATH '$.last_dt ')
|
||||
)) src""",
|
||||
pretty=True,
|
||||
)
|
||||
|
||||
def test_connect_by(self):
|
||||
start = "START WITH last_name = 'King'"
|
||||
|
|
|
@ -6,6 +6,18 @@ class TestRedshift(Validator):
|
|||
dialect = "redshift"
|
||||
|
||||
def test_redshift(self):
|
||||
self.validate_all(
|
||||
"SELECT APPROXIMATE COUNT(DISTINCT y)",
|
||||
read={
|
||||
"spark": "SELECT APPROX_COUNT_DISTINCT(y)",
|
||||
},
|
||||
write={
|
||||
"redshift": "SELECT APPROXIMATE COUNT(DISTINCT y)",
|
||||
"spark": "SELECT APPROX_COUNT_DISTINCT(y)",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT APPROXIMATE AS y")
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT 'a''b'",
|
||||
"SELECT 'a\\'b'",
|
||||
|
|
|
@ -361,7 +361,18 @@ TBLPROPERTIES (
|
|||
"SELECT CAST(123456 AS VARCHAR(3))",
|
||||
write={
|
||||
"": "SELECT TRY_CAST(123456 AS TEXT)",
|
||||
"databricks": "SELECT TRY_CAST(123456 AS STRING)",
|
||||
"spark": "SELECT CAST(123456 AS STRING)",
|
||||
"spark2": "SELECT CAST(123456 AS STRING)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT TRY_CAST('a' AS INT)",
|
||||
write={
|
||||
"": "SELECT TRY_CAST('a' AS INT)",
|
||||
"databricks": "SELECT TRY_CAST('a' AS INT)",
|
||||
"spark": "SELECT TRY_CAST('a' AS INT)",
|
||||
"spark2": "SELECT CAST('a' AS INT)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
|
|
@ -48,6 +48,14 @@ class TestTeradata(Validator):
|
|||
self.validate_identity("HELP STATISTICS personnel.employee FROM my_qcd")
|
||||
|
||||
def test_create(self):
|
||||
self.validate_identity(
|
||||
"REPLACE VIEW view_b (COL1, COL2) AS LOCKING ROW FOR ACCESS SELECT COL1, COL2 FROM table_b",
|
||||
"CREATE OR REPLACE VIEW view_b (COL1, COL2) AS LOCKING ROW FOR ACCESS SELECT COL1, COL2 FROM table_b",
|
||||
)
|
||||
self.validate_identity(
|
||||
"REPLACE VIEW view_b (COL1, COL2) AS LOCKING ROW FOR ACCESS SELECT COL1, COL2 FROM table_b",
|
||||
"CREATE OR REPLACE VIEW view_b (COL1, COL2) AS LOCKING ROW FOR ACCESS SELECT COL1, COL2 FROM table_b",
|
||||
)
|
||||
self.validate_identity("CREATE TABLE x (y INT) PRIMARY INDEX (y) PARTITION BY y INDEX (y)")
|
||||
self.validate_identity("CREATE TABLE x (y INT) PARTITION BY y INDEX (y)")
|
||||
self.validate_identity(
|
||||
|
|
|
@ -970,19 +970,19 @@ WHERE
|
|||
self.validate_all(
|
||||
"TRY_CONVERT(NVARCHAR, x, 121)",
|
||||
write={
|
||||
"spark": "CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))",
|
||||
"spark": "TRY_CAST(DATE_FORMAT(x, 'yyyy-MM-dd HH:mm:ss.SSSSSS') AS VARCHAR(30))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"TRY_CONVERT(INT, x)",
|
||||
write={
|
||||
"spark": "CAST(x AS INT)",
|
||||
"spark": "TRY_CAST(x AS INT)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"TRY_CAST(x AS INT)",
|
||||
write={
|
||||
"spark": "CAST(x AS INT)",
|
||||
"spark": "TRY_CAST(x AS INT)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue