Adding upstream version 20.1.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
6a89523da4
commit
5bd573dda1
127 changed files with 73384 additions and 73067 deletions
|
@ -1,6 +1,5 @@
|
|||
from unittest import mock
|
||||
|
||||
from sqlglot import ParseError, exp, parse_one, transpile
|
||||
from sqlglot.helper import logger as helper_logger
|
||||
from tests.dialects.test_dialect import Validator
|
||||
|
||||
|
||||
|
@ -21,6 +20,9 @@ class TestPostgres(Validator):
|
|||
self.validate_identity("INSERT INTO x VALUES (1, 'a', 2.0) RETURNING *")
|
||||
self.validate_identity("UPDATE tbl_name SET foo = 123 RETURNING a")
|
||||
self.validate_identity("CREATE TABLE cities_partdef PARTITION OF cities DEFAULT")
|
||||
self.validate_identity(
|
||||
"CREATE CONSTRAINT TRIGGER my_trigger AFTER INSERT OR DELETE OR UPDATE OF col_a, col_b ON public.my_table DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE FUNCTION do_sth()"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE TABLE cust_part3 PARTITION OF customers FOR VALUES WITH (MODULUS 3, REMAINDER 2)"
|
||||
)
|
||||
|
@ -42,6 +44,9 @@ class TestPostgres(Validator):
|
|||
self.validate_identity(
|
||||
"CREATE INDEX foo ON bar.baz USING btree(col1 varchar_pattern_ops ASC, col2)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE INDEX index_issues_on_title_trigram ON public.issues USING gin(title public.gin_trgm_ops)"
|
||||
)
|
||||
self.validate_identity(
|
||||
"INSERT INTO x VALUES (1, 'a', 2.0) ON CONFLICT (id) DO NOTHING RETURNING *"
|
||||
)
|
||||
|
@ -148,7 +153,7 @@ class TestPostgres(Validator):
|
|||
write={
|
||||
"hive": "SELECT EXPLODE(c) FROM t",
|
||||
"postgres": "SELECT UNNEST(c) FROM t",
|
||||
"presto": "SELECT IF(pos = pos_2, col) AS col FROM t, UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(c)))) AS _u(pos) CROSS JOIN UNNEST(c) WITH ORDINALITY AS _u_2(col, pos_2) WHERE pos = pos_2 OR (pos > CARDINALITY(c) AND pos_2 = CARDINALITY(c))",
|
||||
"presto": "SELECT IF(_u.pos = _u_2.pos_2, _u_2.col) AS col FROM t, UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(c)))) AS _u(pos) CROSS JOIN UNNEST(c) WITH ORDINALITY AS _u_2(col, pos_2) WHERE _u.pos = _u_2.pos_2 OR (_u.pos > CARDINALITY(c) AND _u_2.pos_2 = CARDINALITY(c))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -156,20 +161,46 @@ class TestPostgres(Validator):
|
|||
write={
|
||||
"hive": "SELECT EXPLODE(ARRAY(1))",
|
||||
"postgres": "SELECT UNNEST(ARRAY[1])",
|
||||
"presto": "SELECT IF(pos = pos_2, col) AS col FROM UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(ARRAY[1])))) AS _u(pos) CROSS JOIN UNNEST(ARRAY[1]) WITH ORDINALITY AS _u_2(col, pos_2) WHERE pos = pos_2 OR (pos > CARDINALITY(ARRAY[1]) AND pos_2 = CARDINALITY(ARRAY[1]))",
|
||||
"presto": "SELECT IF(_u.pos = _u_2.pos_2, _u_2.col) AS col FROM UNNEST(SEQUENCE(1, GREATEST(CARDINALITY(ARRAY[1])))) AS _u(pos) CROSS JOIN UNNEST(ARRAY[1]) WITH ORDINALITY AS _u_2(col, pos_2) WHERE _u.pos = _u_2.pos_2 OR (_u.pos > CARDINALITY(ARRAY[1]) AND _u_2.pos_2 = CARDINALITY(ARRAY[1]))",
|
||||
},
|
||||
)
|
||||
|
||||
@mock.patch("sqlglot.helper.logger")
|
||||
def test_array_offset(self, logger):
|
||||
self.validate_all(
|
||||
"SELECT col[1]",
|
||||
write={
|
||||
"hive": "SELECT col[0]",
|
||||
"postgres": "SELECT col[1]",
|
||||
"presto": "SELECT col[1]",
|
||||
},
|
||||
)
|
||||
def test_array_offset(self):
|
||||
with self.assertLogs(helper_logger) as cm:
|
||||
self.validate_all(
|
||||
"SELECT col[1]",
|
||||
write={
|
||||
"bigquery": "SELECT col[0]",
|
||||
"duckdb": "SELECT col[1]",
|
||||
"hive": "SELECT col[0]",
|
||||
"postgres": "SELECT col[1]",
|
||||
"presto": "SELECT col[1]",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
cm.output,
|
||||
[
|
||||
"WARNING:sqlglot:Applying array index offset (-1)",
|
||||
"WARNING:sqlglot:Applying array index offset (1)",
|
||||
"WARNING:sqlglot:Applying array index offset (1)",
|
||||
"WARNING:sqlglot:Applying array index offset (1)",
|
||||
],
|
||||
)
|
||||
|
||||
def test_operator(self):
|
||||
expr = parse_one("1 OPERATOR(+) 2 OPERATOR(*) 3", read="postgres")
|
||||
|
||||
expr.left.assert_is(exp.Operator)
|
||||
expr.left.left.assert_is(exp.Literal)
|
||||
expr.left.right.assert_is(exp.Literal)
|
||||
expr.right.assert_is(exp.Literal)
|
||||
self.assertEqual(expr.sql(dialect="postgres"), "1 OPERATOR(+) 2 OPERATOR(*) 3")
|
||||
|
||||
self.validate_identity("SELECT operator FROM t")
|
||||
self.validate_identity("SELECT 1 OPERATOR(+) 2")
|
||||
self.validate_identity("SELECT 1 OPERATOR(+) /* foo */ 2")
|
||||
self.validate_identity("SELECT 1 OPERATOR(pg_catalog.+) 2")
|
||||
|
||||
def test_postgres(self):
|
||||
expr = parse_one(
|
||||
|
@ -197,6 +228,14 @@ class TestPostgres(Validator):
|
|||
self.assertIsInstance(expr, exp.AlterTable)
|
||||
self.assertEqual(expr.sql(dialect="postgres"), alter_table_only)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT c.oid, n.nspname, c.relname "
|
||||
"FROM pg_catalog.pg_class AS c "
|
||||
"LEFT JOIN pg_catalog.pg_namespace AS n ON n.oid = c.relnamespace "
|
||||
"WHERE c.relname OPERATOR(pg_catalog.~) '^(courses)$' COLLATE pg_catalog.default AND "
|
||||
"pg_catalog.PG_TABLE_IS_VISIBLE(c.oid) "
|
||||
"ORDER BY 2, 3"
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT ARRAY[]::INT[] AS foo",
|
||||
"SELECT CAST(ARRAY[] AS INT[]) AS foo",
|
||||
|
@ -728,26 +767,23 @@ class TestPostgres(Validator):
|
|||
)
|
||||
|
||||
def test_string_concat(self):
|
||||
self.validate_all(
|
||||
"SELECT CONCAT('abcde', 2, NULL, 22)",
|
||||
write={
|
||||
"postgres": "SELECT CONCAT(COALESCE(CAST('abcde' AS TEXT), ''), COALESCE(CAST(2 AS TEXT), ''), COALESCE(CAST(NULL AS TEXT), ''), COALESCE(CAST(22 AS TEXT), ''))",
|
||||
},
|
||||
)
|
||||
self.validate_identity("SELECT CONCAT('abcde', 2, NULL, 22)")
|
||||
|
||||
self.validate_all(
|
||||
"CONCAT(a, b)",
|
||||
write={
|
||||
"": "CONCAT(COALESCE(CAST(a AS TEXT), ''), COALESCE(CAST(b AS TEXT), ''))",
|
||||
"duckdb": "CONCAT(COALESCE(CAST(a AS TEXT), ''), COALESCE(CAST(b AS TEXT), ''))",
|
||||
"postgres": "CONCAT(COALESCE(CAST(a AS TEXT), ''), COALESCE(CAST(b AS TEXT), ''))",
|
||||
"presto": "CONCAT(CAST(COALESCE(CAST(a AS VARCHAR), '') AS VARCHAR), CAST(COALESCE(CAST(b AS VARCHAR), '') AS VARCHAR))",
|
||||
"": "CONCAT(COALESCE(a, ''), COALESCE(b, ''))",
|
||||
"clickhouse": "CONCAT(COALESCE(a, ''), COALESCE(b, ''))",
|
||||
"duckdb": "CONCAT(a, b)",
|
||||
"postgres": "CONCAT(a, b)",
|
||||
"presto": "CONCAT(COALESCE(CAST(a AS VARCHAR), ''), COALESCE(CAST(b AS VARCHAR), ''))",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"a || b",
|
||||
write={
|
||||
"": "a || b",
|
||||
"clickhouse": "CONCAT(CAST(a AS String), CAST(b AS String))",
|
||||
"clickhouse": "a || b",
|
||||
"duckdb": "a || b",
|
||||
"postgres": "a || b",
|
||||
"presto": "CONCAT(CAST(a AS VARCHAR), CAST(b AS VARCHAR))",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue