Adding upstream version 23.7.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
f1aa09959c
commit
27c061b7af
187 changed files with 86502 additions and 71397 deletions
|
@ -51,6 +51,8 @@ class TestBigQuery(Validator):
|
|||
self.assertEqual(table.name, "_y")
|
||||
|
||||
self.validate_identity("SELECT * FROM x-0.y")
|
||||
self.assertEqual(exp.to_table("`a.b`.`c.d`", dialect="bigquery").sql(), '"a"."b"."c"."d"')
|
||||
self.assertEqual(exp.to_table("`x`.`y.z`", dialect="bigquery").sql(), '"x"."y"."z"')
|
||||
self.assertEqual(exp.to_table("`x.y.z`", dialect="bigquery").sql(), '"x"."y"."z"')
|
||||
self.assertEqual(exp.to_table("`x.y.z`", dialect="bigquery").sql("bigquery"), "`x.y.z`")
|
||||
self.assertEqual(exp.to_table("`x`.`y`", dialect="bigquery").sql("bigquery"), "`x`.`y`")
|
||||
|
@ -58,6 +60,8 @@ class TestBigQuery(Validator):
|
|||
select_with_quoted_udf = self.validate_identity("SELECT `p.d.UdF`(data) FROM `p.d.t`")
|
||||
self.assertEqual(select_with_quoted_udf.selects[0].name, "p.d.UdF")
|
||||
|
||||
self.validate_identity("SELECT ARRAY_TO_STRING(list, '--') AS text")
|
||||
self.validate_identity("SELECT jsondoc['some_key']")
|
||||
self.validate_identity("SELECT `p.d.UdF`(data).* FROM `p.d.t`")
|
||||
self.validate_identity("SELECT * FROM `my-project.my-dataset.my-table`")
|
||||
self.validate_identity("CREATE OR REPLACE TABLE `a.b.c` CLONE `a.b.d`")
|
||||
|
@ -177,6 +181,13 @@ class TestBigQuery(Validator):
|
|||
self.validate_identity(
|
||||
"""SELECT JSON_EXTRACT_SCALAR('5')""", """SELECT JSON_EXTRACT_SCALAR('5', '$')"""
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE OR REPLACE VIEW test (tenant_id OPTIONS (description='Test description on table creation')) AS SELECT 1 AS tenant_id, 1 AS customer_id",
|
||||
)
|
||||
self.validate_identity(
|
||||
"CREATE VIEW `d.v` OPTIONS (expiration_timestamp=TIMESTAMP '2020-01-02T04:05:06.007Z') AS SELECT 1 AS c",
|
||||
"CREATE VIEW `d.v` OPTIONS (expiration_timestamp=CAST('2020-01-02T04:05:06.007Z' AS TIMESTAMP)) AS SELECT 1 AS c",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT ARRAY(SELECT AS STRUCT 1 a, 2 b)",
|
||||
"SELECT ARRAY(SELECT AS STRUCT 1 AS a, 2 AS b)",
|
||||
|
@ -185,10 +196,6 @@ class TestBigQuery(Validator):
|
|||
"select array_contains([1, 2, 3], 1)",
|
||||
"SELECT EXISTS(SELECT 1 FROM UNNEST([1, 2, 3]) AS _col WHERE _col = 1)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"create or replace view test (tenant_id OPTIONS(description='Test description on table creation')) select 1 as tenant_id, 1 as customer_id;",
|
||||
"CREATE OR REPLACE VIEW test (tenant_id OPTIONS (description='Test description on table creation')) AS SELECT 1 AS tenant_id, 1 AS customer_id",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT SPLIT(foo)",
|
||||
"SELECT SPLIT(foo, ',')",
|
||||
|
@ -658,6 +665,13 @@ class TestBigQuery(Validator):
|
|||
"duckdb": "SELECT {'y': ARRAY(SELECT {'b': 1} FROM x)} FROM z",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT CAST(STRUCT(1) AS STRUCT<INT64>)",
|
||||
write={
|
||||
"bigquery": "SELECT CAST(STRUCT(1) AS STRUCT<INT64>)",
|
||||
"snowflake": "SELECT CAST(OBJECT_CONSTRUCT('_0', 1) AS OBJECT)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"cast(x as date format 'MM/DD/YYYY')",
|
||||
write={
|
||||
|
@ -724,10 +738,10 @@ class TestBigQuery(Validator):
|
|||
self.validate_all(
|
||||
"'\\\\'",
|
||||
write={
|
||||
"bigquery": r"'\\'",
|
||||
"duckdb": r"'\\'",
|
||||
"presto": r"'\\'",
|
||||
"hive": r"'\\'",
|
||||
"bigquery": "'\\\\'",
|
||||
"duckdb": "'\\'",
|
||||
"presto": "'\\'",
|
||||
"hive": "'\\\\'",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
|
@ -1004,20 +1018,28 @@ class TestBigQuery(Validator):
|
|||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT cola, colb FROM UNNEST([STRUCT(1 AS cola, 'test' AS colb)])",
|
||||
"SELECT cola, colb FROM UNNEST([STRUCT(1 AS cola, 'test' AS colb)]) AS tab",
|
||||
read={
|
||||
"bigquery": "SELECT cola, colb FROM UNNEST([STRUCT(1 AS cola, 'test' AS colb)])",
|
||||
"bigquery": "SELECT cola, colb FROM UNNEST([STRUCT(1 AS cola, 'test' AS colb)]) as tab",
|
||||
"snowflake": "SELECT cola, colb FROM (VALUES (1, 'test')) AS tab(cola, colb)",
|
||||
"spark": "SELECT cola, colb FROM VALUES (1, 'test') AS tab(cola, colb)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM UNNEST([STRUCT(1 AS id)]) CROSS JOIN UNNEST([STRUCT(1 AS id)])",
|
||||
"SELECT * FROM UNNEST([STRUCT(1 AS _c0)]) AS t1",
|
||||
read={
|
||||
"bigquery": "SELECT * FROM UNNEST([STRUCT(1 AS id)]) CROSS JOIN UNNEST([STRUCT(1 AS id)])",
|
||||
"bigquery": "SELECT * FROM UNNEST([STRUCT(1 AS _c0)]) AS t1",
|
||||
"postgres": "SELECT * FROM (VALUES (1)) AS t1",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT * FROM UNNEST([STRUCT(1 AS id)]) AS t1 CROSS JOIN UNNEST([STRUCT(1 AS id)]) AS t2",
|
||||
read={
|
||||
"bigquery": "SELECT * FROM UNNEST([STRUCT(1 AS id)]) AS t1 CROSS JOIN UNNEST([STRUCT(1 AS id)]) AS t2",
|
||||
"postgres": "SELECT * FROM (VALUES (1)) AS t1(id) CROSS JOIN (VALUES (1)) AS t2(id)",
|
||||
},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"SELECT REGEXP_EXTRACT(abc, 'pattern(group)') FROM table",
|
||||
write={
|
||||
|
@ -1050,28 +1072,43 @@ class TestBigQuery(Validator):
|
|||
)
|
||||
self.validate_all(
|
||||
"""SELECT
|
||||
`u`.`harness_user_email` AS `harness_user_email`,
|
||||
`d`.`harness_user_id` AS `harness_user_id`,
|
||||
`harness_account_id` AS `harness_account_id`
|
||||
FROM `analytics_staging`.`stg_mongodb__users` AS `u`, UNNEST(`u`.`harness_cluster_details`) AS `d`, UNNEST(`d`.`harness_account_ids`) AS `harness_account_id`
|
||||
`u`.`user_email` AS `user_email`,
|
||||
`d`.`user_id` AS `user_id`,
|
||||
`account_id` AS `account_id`
|
||||
FROM `analytics_staging`.`stg_mongodb__users` AS `u`, UNNEST(`u`.`cluster_details`) AS `d`, UNNEST(`d`.`account_ids`) AS `account_id`
|
||||
WHERE
|
||||
NOT `harness_account_id` IS NULL""",
|
||||
NOT `account_id` IS NULL""",
|
||||
read={
|
||||
"": """
|
||||
SELECT
|
||||
"u"."harness_user_email" AS "harness_user_email",
|
||||
"_q_0"."d"."harness_user_id" AS "harness_user_id",
|
||||
"_q_1"."harness_account_id" AS "harness_account_id"
|
||||
"u"."user_email" AS "user_email",
|
||||
"_q_0"."d"."user_id" AS "user_id",
|
||||
"_q_1"."account_id" AS "account_id"
|
||||
FROM
|
||||
"analytics_staging"."stg_mongodb__users" AS "u",
|
||||
UNNEST("u"."harness_cluster_details") AS "_q_0"("d"),
|
||||
UNNEST("_q_0"."d"."harness_account_ids") AS "_q_1"("harness_account_id")
|
||||
UNNEST("u"."cluster_details") AS "_q_0"("d"),
|
||||
UNNEST("_q_0"."d"."account_ids") AS "_q_1"("account_id")
|
||||
WHERE
|
||||
NOT "_q_1"."harness_account_id" IS NULL
|
||||
NOT "_q_1"."account_id" IS NULL
|
||||
"""
|
||||
},
|
||||
pretty=True,
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT MOD(x, 10)",
|
||||
read={"postgres": "SELECT x % 10"},
|
||||
write={
|
||||
"bigquery": "SELECT MOD(x, 10)",
|
||||
"postgres": "SELECT x % 10",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"SELECT CAST(x AS DATETIME)",
|
||||
write={
|
||||
"": "SELECT CAST(x AS TIMESTAMP)",
|
||||
"bigquery": "SELECT CAST(x AS DATETIME)",
|
||||
},
|
||||
)
|
||||
|
||||
def test_errors(self):
|
||||
with self.assertRaises(TokenError):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue