Merging upstream version 18.7.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
77523b6777
commit
d1b976f442
96 changed files with 59037 additions and 52828 deletions
|
@ -9,36 +9,6 @@ class TestBigQuery(Validator):
|
|||
maxDiff = None
|
||||
|
||||
def test_bigquery(self):
|
||||
self.validate_identity("SELECT * FROM tbl FOR SYSTEM_TIME AS OF z")
|
||||
self.validate_identity(
|
||||
"""SELECT JSON '"foo"' AS json_data""",
|
||||
"""SELECT PARSE_JSON('"foo"') AS json_data""",
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"""SELECT
|
||||
`u`.`harness_user_email` AS `harness_user_email`,
|
||||
`d`.`harness_user_id` AS `harness_user_id`,
|
||||
`harness_account_id` AS `harness_account_id`
|
||||
FROM `analytics_staging`.`stg_mongodb__users` AS `u`, UNNEST(`u`.`harness_cluster_details`) AS `d`, UNNEST(`d`.`harness_account_ids`) AS `harness_account_id`
|
||||
WHERE
|
||||
NOT `harness_account_id` IS NULL""",
|
||||
read={
|
||||
"": """
|
||||
SELECT
|
||||
"u"."harness_user_email" AS "harness_user_email",
|
||||
"_q_0"."d"."harness_user_id" AS "harness_user_id",
|
||||
"_q_1"."harness_account_id" AS "harness_account_id"
|
||||
FROM
|
||||
"analytics_staging"."stg_mongodb__users" AS "u",
|
||||
UNNEST("u"."harness_cluster_details") AS "_q_0"("d"),
|
||||
UNNEST("_q_0"."d"."harness_account_ids") AS "_q_1"("harness_account_id")
|
||||
WHERE
|
||||
NOT "_q_1"."harness_account_id" IS NULL
|
||||
"""
|
||||
},
|
||||
pretty=True,
|
||||
)
|
||||
with self.assertRaises(TokenError):
|
||||
transpile("'\\'", read="bigquery")
|
||||
|
||||
|
@ -63,6 +33,9 @@ WHERE
|
|||
with self.assertRaises(ParseError):
|
||||
transpile("DATE_ADD(x, day)", read="bigquery")
|
||||
|
||||
self.validate_identity("SELECT test.Unknown FROM test")
|
||||
self.validate_identity(r"SELECT '\n\r\a\v\f\t'")
|
||||
self.validate_identity("SELECT * FROM tbl FOR SYSTEM_TIME AS OF z")
|
||||
self.validate_identity("STRING_AGG(DISTINCT a ORDER BY b DESC, c DESC LIMIT 10)")
|
||||
self.validate_identity("SELECT PARSE_TIMESTAMP('%c', 'Thu Dec 25 07:30:00 2008', 'UTC')")
|
||||
self.validate_identity("SELECT ANY_VALUE(fruit HAVING MAX sold) FROM fruits")
|
||||
|
@ -111,6 +84,7 @@ WHERE
|
|||
self.validate_identity("COMMIT TRANSACTION")
|
||||
self.validate_identity("ROLLBACK TRANSACTION")
|
||||
self.validate_identity("CAST(x AS BIGNUMERIC)")
|
||||
self.validate_identity("SELECT y + 1 FROM x GROUP BY y + 1 ORDER BY 1")
|
||||
self.validate_identity(
|
||||
"DATE(CAST('2016-12-25 05:30:00+07' AS DATETIME), 'America/Los_Angeles')"
|
||||
)
|
||||
|
@ -132,6 +106,22 @@ WHERE
|
|||
self.validate_identity(
|
||||
"SELECT LAST_VALUE(a IGNORE NULLS) OVER y FROM x WINDOW y AS (PARTITION BY CATEGORY)",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT a overlaps",
|
||||
"SELECT a AS overlaps",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT y + 1 z FROM x GROUP BY y + 1 ORDER BY z",
|
||||
"SELECT y + 1 AS z FROM x GROUP BY z ORDER BY z",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT y + 1 z FROM x GROUP BY y + 1",
|
||||
"SELECT y + 1 AS z FROM x GROUP BY y + 1",
|
||||
)
|
||||
self.validate_identity(
|
||||
"""SELECT JSON '"foo"' AS json_data""",
|
||||
"""SELECT PARSE_JSON('"foo"') AS json_data""",
|
||||
)
|
||||
|
||||
self.validate_all("SELECT SPLIT(foo)", write={"bigquery": "SELECT SPLIT(foo, ',')"})
|
||||
self.validate_all("SELECT 1 AS hash", write={"bigquery": "SELECT 1 AS `hash`"})
|
||||
|
@ -246,7 +236,7 @@ WHERE
|
|||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"WITH cte AS (SELECT [1, 2, 3] AS arr) SELECT col FROM cte CROSS JOIN UNNEST(arr) AS col",
|
||||
"WITH cte AS (SELECT [1, 2, 3] AS arr) SELECT IF(pos = pos_2, col, NULL) AS col FROM cte, UNNEST(GENERATE_ARRAY(0, GREATEST(ARRAY_LENGTH(arr)) - 1)) AS pos CROSS JOIN UNNEST(arr) AS col WITH OFFSET AS pos_2 WHERE pos = pos_2 OR (pos > (ARRAY_LENGTH(arr) - 1) AND pos_2 = (ARRAY_LENGTH(arr) - 1))",
|
||||
read={
|
||||
"spark": "WITH cte AS (SELECT ARRAY(1, 2, 3) AS arr) SELECT EXPLODE(arr) FROM cte"
|
||||
},
|
||||
|
@ -291,6 +281,10 @@ WHERE
|
|||
"bigquery": "SELECT ARRAY(SELECT AS STRUCT 1 AS a, 2 AS b)",
|
||||
},
|
||||
)
|
||||
self.validate_identity(
|
||||
r"REGEXP_EXTRACT(svc_plugin_output, r'\\\((.*)')",
|
||||
r"REGEXP_EXTRACT(svc_plugin_output, '\\\\\\((.*)')",
|
||||
)
|
||||
self.validate_all(
|
||||
"REGEXP_CONTAINS('foo', '.*')",
|
||||
read={
|
||||
|
@ -302,7 +296,7 @@ WHERE
|
|||
"mysql": "REGEXP_LIKE('foo', '.*')",
|
||||
"starrocks": "REGEXP('foo', '.*')",
|
||||
},
|
||||
),
|
||||
)
|
||||
self.validate_all(
|
||||
'"""x"""',
|
||||
write={
|
||||
|
@ -453,7 +447,6 @@ WHERE
|
|||
"SELECT ARRAY(SELECT * FROM foo JOIN bla ON x = y)",
|
||||
write={"bigquery": "SELECT ARRAY(SELECT * FROM foo JOIN bla ON x = y)"},
|
||||
)
|
||||
|
||||
self.validate_all(
|
||||
"x IS unknown",
|
||||
write={
|
||||
|
@ -464,6 +457,16 @@ WHERE
|
|||
"spark": "x IS NULL",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"x IS NOT unknown",
|
||||
write={
|
||||
"bigquery": "NOT x IS NULL",
|
||||
"duckdb": "NOT x IS NULL",
|
||||
"presto": "NOT x IS NULL",
|
||||
"hive": "NOT x IS NULL",
|
||||
"spark": "NOT x IS NULL",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"CURRENT_TIMESTAMP()",
|
||||
read={
|
||||
|
@ -682,16 +685,32 @@ WHERE
|
|||
"spark": "TO_JSON(x)",
|
||||
},
|
||||
)
|
||||
self.validate_all(
|
||||
"""SELECT
|
||||
`u`.`harness_user_email` AS `harness_user_email`,
|
||||
`d`.`harness_user_id` AS `harness_user_id`,
|
||||
`harness_account_id` AS `harness_account_id`
|
||||
FROM `analytics_staging`.`stg_mongodb__users` AS `u`, UNNEST(`u`.`harness_cluster_details`) AS `d`, UNNEST(`d`.`harness_account_ids`) AS `harness_account_id`
|
||||
WHERE
|
||||
NOT `harness_account_id` IS NULL""",
|
||||
read={
|
||||
"": """
|
||||
SELECT
|
||||
"u"."harness_user_email" AS "harness_user_email",
|
||||
"_q_0"."d"."harness_user_id" AS "harness_user_id",
|
||||
"_q_1"."harness_account_id" AS "harness_account_id"
|
||||
FROM
|
||||
"analytics_staging"."stg_mongodb__users" AS "u",
|
||||
UNNEST("u"."harness_cluster_details") AS "_q_0"("d"),
|
||||
UNNEST("_q_0"."d"."harness_account_ids") AS "_q_1"("harness_account_id")
|
||||
WHERE
|
||||
NOT "_q_1"."harness_account_id" IS NULL
|
||||
"""
|
||||
},
|
||||
pretty=True,
|
||||
)
|
||||
|
||||
self.validate_identity(
|
||||
"SELECT y + 1 z FROM x GROUP BY y + 1 ORDER BY z",
|
||||
"SELECT y + 1 AS z FROM x GROUP BY z ORDER BY z",
|
||||
)
|
||||
self.validate_identity(
|
||||
"SELECT y + 1 z FROM x GROUP BY y + 1",
|
||||
"SELECT y + 1 AS z FROM x GROUP BY y + 1",
|
||||
)
|
||||
self.validate_identity("SELECT y + 1 FROM x GROUP BY y + 1 ORDER BY 1")
|
||||
self.validate_identity("LOG(n, b)")
|
||||
|
||||
def test_user_defined_functions(self):
|
||||
self.validate_identity(
|
||||
|
@ -702,6 +721,10 @@ WHERE
|
|||
self.validate_identity(
|
||||
"CREATE TABLE FUNCTION a(x INT64) RETURNS TABLE <q STRING, r INT64> AS SELECT s, t"
|
||||
)
|
||||
self.validate_identity(
|
||||
'''CREATE TEMPORARY FUNCTION string_length_0(strings ARRAY<STRING>) RETURNS FLOAT64 LANGUAGE js AS """'use strict'; function string_length(strings) { return _.sum(_.map(strings, ((x) => x.length))); } return string_length(strings);""" OPTIONS (library=['gs://ibis-testing-libraries/lodash.min.js'])''',
|
||||
"CREATE TEMPORARY FUNCTION string_length_0(strings ARRAY<STRING>) RETURNS FLOAT64 LANGUAGE js OPTIONS (library=['gs://ibis-testing-libraries/lodash.min.js']) AS '\\'use strict\\'; function string_length(strings) { return _.sum(_.map(strings, ((x) => x.length))); } return string_length(strings);'",
|
||||
)
|
||||
|
||||
def test_group_concat(self):
|
||||
self.validate_all(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue