Adding upstream version 25.7.1.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
f9dae8e951
commit
8356f462bb
102 changed files with 52995 additions and 52070 deletions
|
@ -17,9 +17,6 @@ class TestPostgres(Validator):
|
|||
)
|
||||
|
||||
self.validate_identity("SHA384(x)")
|
||||
self.validate_identity(
|
||||
'CREATE TABLE x (a TEXT COLLATE "de_DE")', "CREATE TABLE x (a TEXT COLLATE de_DE)"
|
||||
)
|
||||
self.validate_identity("1.x", "1. AS x")
|
||||
self.validate_identity("|/ x", "SQRT(x)")
|
||||
self.validate_identity("||/ x", "CBRT(x)")
|
||||
|
@ -565,15 +562,10 @@ class TestPostgres(Validator):
|
|||
"postgres": "GENERATE_SERIES(CAST('2019-01-01' AS TIMESTAMP), CURRENT_TIMESTAMP, INTERVAL '1 DAY')",
|
||||
"presto": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' DAY)",
|
||||
"trino": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP AS TIMESTAMP), INTERVAL '1' DAY)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"GENERATE_SERIES(a, b)",
|
||||
write={
|
||||
"postgres": "GENERATE_SERIES(a, b)",
|
||||
"presto": "SEQUENCE(a, b)",
|
||||
"trino": "SEQUENCE(a, b)",
|
||||
"tsql": "GENERATE_SERIES(a, b)",
|
||||
"hive": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP() AS TIMESTAMP), INTERVAL '1' DAY)",
|
||||
"spark2": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP() AS TIMESTAMP), INTERVAL '1' DAY)",
|
||||
"spark": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP() AS TIMESTAMP), INTERVAL '1' DAY)",
|
||||
"databricks": "SEQUENCE(CAST('2019-01-01' AS TIMESTAMP), CAST(CURRENT_TIMESTAMP() AS TIMESTAMP), INTERVAL '1' DAY)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -583,6 +575,20 @@ class TestPostgres(Validator):
|
|||
"presto": "SEQUENCE(a, b)",
|
||||
"trino": "SEQUENCE(a, b)",
|
||||
"tsql": "GENERATE_SERIES(a, b)",
|
||||
"hive": "SEQUENCE(a, b)",
|
||||
"spark2": "SEQUENCE(a, b)",
|
||||
"spark": "SEQUENCE(a, b)",
|
||||
"databricks": "SEQUENCE(a, b)",
|
||||
},
|
||||
write={
|
||||
"postgres": "GENERATE_SERIES(a, b)",
|
||||
"presto": "SEQUENCE(a, b)",
|
||||
"trino": "SEQUENCE(a, b)",
|
||||
"tsql": "GENERATE_SERIES(a, b)",
|
||||
"hive": "SEQUENCE(a, b)",
|
||||
"spark2": "SEQUENCE(a, b)",
|
||||
"spark": "SEQUENCE(a, b)",
|
||||
"databricks": "SEQUENCE(a, b)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -759,6 +765,14 @@ class TestPostgres(Validator):
|
|||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT TO_DATE('01/01/2000', 'MM/DD/YYYY')",
|
||||
write={
|
||||
"duckdb": "SELECT CAST(STRPTIME('01/01/2000', '%m/%d/%Y') AS DATE)",
|
||||
"postgres": "SELECT TO_DATE('01/01/2000', 'MM/DD/YYYY')",
|
||||
},
|
||||
)
|
||||
|
||||
def test_ddl(self):
|
||||
# Checks that user-defined types are parsed into DataType instead of Identifier
|
||||
self.parse_one("CREATE TABLE t (a udt)").this.expressions[0].args["kind"].assert_is(
|
||||
|
@ -775,6 +789,8 @@ class TestPostgres(Validator):
|
|||
cdef.args["kind"].assert_is(exp.DataType)
|
||||
self.assertEqual(expr.sql(dialect="postgres"), "CREATE TABLE t (x INTERVAL DAY)")
|
||||
|
||||
self.validate_identity('CREATE TABLE x (a TEXT COLLATE "de_DE")')
|
||||
self.validate_identity('CREATE TABLE x (a TEXT COLLATE pg_catalog."default")')
|
||||
self.validate_identity("CREATE TABLE t (col INT[3][5])")
|
||||
self.validate_identity("CREATE TABLE t (col INT[3])")
|
||||
self.validate_identity("CREATE INDEX IF NOT EXISTS ON t(c)")
|
||||
|
@ -981,6 +997,34 @@ class TestPostgres(Validator):
|
|||
self.validate_identity("CREATE TABLE tbl (col UUID UNIQUE DEFAULT GEN_RANDOM_UUID())")
|
||||
self.validate_identity("CREATE TABLE tbl (col UUID, UNIQUE NULLS NOT DISTINCT (col))")
|
||||
|
||||
self.validate_identity("CREATE INDEX CONCURRENTLY ix_table_id ON tbl USING btree(id)")
|
||||
self.validate_identity(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_table_id ON tbl USING btree(id)"
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS public.rental
|
||||
(
|
||||
inventory_id INT NOT NULL,
|
||||
CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id)
|
||||
REFERENCES public.customer (customer_id) MATCH FULL
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE RESTRICT,
|
||||
CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id)
|
||||
REFERENCES public.inventory (inventory_id) MATCH PARTIAL
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE RESTRICT,
|
||||
CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id)
|
||||
REFERENCES public.staff (staff_id) MATCH SIMPLE
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE RESTRICT,
|
||||
INITIALLY IMMEDIATE
|
||||
)
|
||||
""",
|
||||
"CREATE TABLE IF NOT EXISTS public.rental (inventory_id INT NOT NULL, CONSTRAINT rental_customer_id_fkey FOREIGN KEY (customer_id) REFERENCES public.customer (customer_id) MATCH FULL ON UPDATE CASCADE ON DELETE RESTRICT, CONSTRAINT rental_inventory_id_fkey FOREIGN KEY (inventory_id) REFERENCES public.inventory (inventory_id) MATCH PARTIAL ON UPDATE CASCADE ON DELETE RESTRICT, CONSTRAINT rental_staff_id_fkey FOREIGN KEY (staff_id) REFERENCES public.staff (staff_id) MATCH SIMPLE ON UPDATE CASCADE ON DELETE RESTRICT, INITIALLY IMMEDIATE)",
|
||||
)
|
||||
|
||||
with self.assertRaises(ParseError):
|
||||
transpile("CREATE TABLE products (price DECIMAL CHECK price > 0)", read="postgres")
|
||||
with self.assertRaises(ParseError):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue