1
0
Fork 0

Adding upstream version 26.25.3.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-06-07 04:46:28 +02:00
parent bc7749846c
commit d9e621c994
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
83 changed files with 67317 additions and 67680 deletions

78
.github/workflows/benchmark-sqlglot.yml vendored Normal file
View file

@ -0,0 +1,78 @@
name: Benchmark pull requests
on:
issue_comment:
types: [created, edited, deleted]
pull_request:
types: [opened, synchronize, reopened]
jobs:
run-benchmark:
name: run benchmark
runs-on: ubuntu-latest
if: |
(github.event_name == 'issue_comment' &&
contains(github.event.comment.body, '/benchmark') &&
github.event.issue.pull_request) ||
(github.event_name == 'pull_request' &&
contains(github.event.pull_request.body, '/benchmark'))
steps:
- name: Checkout PR branch
uses: actions/checkout@v4
with:
fetch-depth: 0 # Needed to fetch main branch too
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.13
- name: Create a virtual environment
run: |
python -m venv .venv
source ./.venv/bin/activate
python -m pip install --upgrade pip
pip install pyperf
- name: Run benchmark on PR branch
run: |
source ./.venv/bin/activate
make install-dev
make install-dev-rs-release
python benchmarks/parse.py --quiet --output bench_parse_pr.json
python benchmarks/optimize.py --quiet --fast --output bench_optimize_pr.json
- name: Checkout main branch into subdir
run: |
git fetch origin main
git worktree add main-branch origin/main
- name: Reset virtual environment
run: |
rm -rf .venv
python -m venv .venv
source ./.venv/bin/activate
python -m pip install --upgrade pip
pip install pyperf
- name: Run benchmark on main branch
run: |
source ./.venv/bin/activate
cd main-branch
make install-dev
make install-dev-rs-release
python benchmarks/parse.py --quiet --output ../bench_parse_main.json
python benchmarks/optimize.py --quiet --fast --output ../bench_optimize_main.json
cd ..
- name: Compare benchmarks and save results
run: |
source ./.venv/bin/activate
python -m pyperf compare_to bench_parse_pr.json bench_parse_main.json --table --table-format=md > bench_parse_comparison.txt
python -m pyperf compare_to bench_optimize_pr.json bench_optimize_main.json --table --table-format=md > bench_optimize_comparison.txt
- name: Combine benchmark outputs
run: |
echo "## Parsing Benchmark" > combined_benchmarks.md
cat bench_parse_comparison.txt >> combined_benchmarks.md
echo -e "\n---\n" >> combined_benchmarks.md
echo "## Optimization Benchmark" >> combined_benchmarks.md
cat bench_optimize_comparison.txt >> combined_benchmarks.md
- name: Comment on PR for parse benchmark results
uses: peter-evans/create-or-update-comment@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number || github.event.pull_request.number }}
body-file: combined_benchmarks.md

View file

@ -1,8 +1,10 @@
name: Benchmark Rust tokenizer changes
on: on:
pull_request: pull_request:
paths: paths:
- 'sqlglotrs/**' - 'sqlglotrs/**'
name: benchmark pull requests
jobs: jobs:
run-benchmark: run-benchmark:
name: run benchmark name: run benchmark

View file

@ -1,4 +1,4 @@
name: Publish Python Release to PyPI name: Publish sqlglot and sqlglotrs to PyPI
on: on:
push: push:

View file

@ -1,20 +1,21 @@
name: Test and Lint Python Package name: Run tests and linter checks
on: on:
push: push:
branches: [ main ] branches: [ main ]
pull_request: pull_request:
branches: [ main ] branches: [ main ]
jobs: jobs:
build: run-checks:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
strategy: strategy:
matrix: matrix:
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4 uses: actions/setup-python@v5
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
cache: pip cache: pip
@ -26,7 +27,7 @@ jobs:
source ./.venv/bin/activate source ./.venv/bin/activate
python -m pip install --upgrade pip python -m pip install --upgrade pip
make install-dev make install-dev
- name: Run checks (linter, code style, tests) - name: Run tests and linter checks
run: | run: |
source ./.venv/bin/activate source ./.venv/bin/activate
make check make check

File diff suppressed because it is too large Load diff

View file

@ -1,28 +0,0 @@
import typing as t
def border(columns: t.Iterable[str]) -> str:
columns = " | ".join(columns)
return f"| {columns} |"
def ascii_table(table: list[dict[str, t.Any]]) -> str:
columns = []
for row in table:
for key in row:
if key not in columns:
columns.append(key)
widths = {column: max(len(column), 15) for column in columns}
lines = [
border(column.rjust(width) for column, width in widths.items()),
border(str("-" * width) for width in widths.values()),
]
for row in table:
lines.append(
border(str(row[column]).rjust(width)[0:width] for column, width in widths.items())
)
return "\n".join(lines)

View file

@ -1,12 +1,13 @@
import sys import sys
import typing as t import os
from argparse import ArgumentParser import pyperf
# Add the project root to the path so we can import from tests
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from benchmarks.helpers import ascii_table
from sqlglot.optimizer import optimize from sqlglot.optimizer import optimize
from sqlglot import parse_one from sqlglot import parse_one
from tests.helpers import load_sql_fixture_pairs, TPCH_SCHEMA, TPCDS_SCHEMA from tests.helpers import load_sql_fixture_pairs, TPCH_SCHEMA, TPCDS_SCHEMA
from timeit import Timer
# Deeply nested conditions currently require a lot of recursion # Deeply nested conditions currently require a lot of recursion
sys.setrecursionlimit(10000) sys.setrecursionlimit(10000)
@ -16,55 +17,56 @@ def gen_condition(n):
return parse_one(" OR ".join(f"a = {i} AND b = {i}" for i in range(n))) return parse_one(" OR ".join(f"a = {i} AND b = {i}" for i in range(n)))
BENCHMARKS = { # Create benchmark functions that return the setup data
"tpch": lambda: ( def get_tpch_setup():
return (
[parse_one(sql) for _, sql, _ in load_sql_fixture_pairs("optimizer/tpc-h/tpc-h.sql")], [parse_one(sql) for _, sql, _ in load_sql_fixture_pairs("optimizer/tpc-h/tpc-h.sql")],
TPCH_SCHEMA, TPCH_SCHEMA,
3, )
),
"tpcds": lambda: (
def get_tpcds_setup():
return (
[parse_one(sql) for _, sql, _ in load_sql_fixture_pairs("optimizer/tpc-ds/tpc-ds.sql")], [parse_one(sql) for _, sql, _ in load_sql_fixture_pairs("optimizer/tpc-ds/tpc-ds.sql")],
TPCDS_SCHEMA, TPCDS_SCHEMA,
3, )
),
"condition_10": lambda: (
[gen_condition(10)],
{},
10,
),
"condition_100": lambda: (
[gen_condition(100)],
{},
10,
),
"condition_1000": lambda: (
[gen_condition(1000)],
{},
3,
),
}
def bench() -> list[dict[str, t.Any]]: def get_condition_10_setup():
parser = ArgumentParser() return ([gen_condition(10)], {})
parser.add_argument("-b", "--benchmark", choices=BENCHMARKS, action="append")
args = parser.parse_args()
benchmarks = list(args.benchmark or BENCHMARKS)
table = []
for benchmark in benchmarks:
expressions, schema, n = BENCHMARKS[benchmark]()
def func(): def get_condition_100_setup():
for e in expressions: return ([gen_condition(100)], {})
optimize(e, schema)
timer = Timer(func)
min_duration = min(timer.repeat(repeat=n, number=1))
table.append({"Benchmark": benchmark, "Duration (s)": round(min_duration, 4)})
return table def get_condition_1000_setup():
return ([gen_condition(1000)], {})
# Optimizer functions that will be benchmarked
def optimize_queries(expressions, schema):
for e in expressions:
optimize(e, schema)
def run_benchmarks():
runner = pyperf.Runner()
# Define benchmarks with their setup functions
benchmarks = {
"tpch": get_tpch_setup,
# "tpcds": get_tpcds_setup, # This is left out because it's too slow in CI
"condition_10": get_condition_10_setup,
"condition_100": get_condition_100_setup,
"condition_1000": get_condition_1000_setup,
}
for benchmark_name, benchmark_setup in benchmarks.items():
expressions, schema = benchmark_setup()
runner.bench_func(f"optimize_{benchmark_name}", optimize_queries, expressions, schema)
if __name__ == "__main__": if __name__ == "__main__":
print(ascii_table(bench())) run_benchmarks()

View file

@ -1,12 +1,8 @@
import collections.abc import collections.abc
import pyperf
from benchmarks.helpers import ascii_table
# moz_sql_parser 3.10 compatibility # moz_sql_parser 3.10 compatibility
collections.Iterable = collections.abc.Iterable collections.Iterable = collections.abc.Iterable
import timeit
import numpy as np
# import sqlfluff # import sqlfluff
# import moz_sql_parser # import moz_sql_parser
@ -56,7 +52,7 @@ ORDER BY
"e"."employee_id" "e"."employee_id"
""" """
short = "select 1 as a, case when 1 then 1 when 2 then 2 else 3 end as b, c from x" short = "SELECT 1 AS a, CASE WHEN 1 THEN 1 WHEN 2 THEN 2 ELSE 3 END AS b, c FROM x"
crazy = "SELECT 1+" crazy = "SELECT 1+"
crazy += "+".join(str(i) for i in range(500)) crazy += "+".join(str(i) for i in range(500))
@ -190,41 +186,20 @@ def sqlfluff_parse(sql):
sqlfluff.parse(sql) sqlfluff.parse(sql)
def diff(row, column): QUERIES = {"tpch": tpch, "short": short, "long": long, "crazy": crazy}
if column == "Query":
return ""
column = row[column]
if isinstance(column, str):
return " (N/A)"
return f" ({str(column / row['sqlglot'])[0:5]})"
libs = [ def run_benchmarks():
"sqlglot", runner = pyperf.Runner()
"sqlglotrs",
# "sqlfluff",
# "sqltree",
# "sqlparse",
# "moz_sql_parser",
# "sqloxide",
]
table = []
for name, sql in {"tpch": tpch, "short": short, "long": long, "crazy": crazy}.items(): libs = ["sqlglot", "sqlglotrs"]
row = {"Query": name}
table.append(row)
for lib in libs: for lib in libs:
try: for query_name, sql in QUERIES.items():
row[lib] = np.mean(timeit.repeat(lambda: globals()[lib + "_parse"](sql), number=3)) bench_name = f"parse_{lib}_{query_name}"
except Exception as e: parse_func = globals()[f"{lib}_parse"]
print(e)
row[lib] = "error"
print( runner.bench_func(bench_name, parse_func, sql)
ascii_table(
[
{k: v if v == "Query" else str(row[k])[0:7] + diff(row, k) for k, v in row.items()} if __name__ == "__main__":
for row in table run_benchmarks()
]
)
)

File diff suppressed because one or more lines are too long

View file

@ -84,8 +84,8 @@
</span><span id="L-17"><a href="#L-17"><span class="linenos">17</span></a><span class="n">__version_tuple__</span><span class="p">:</span> <span class="n">VERSION_TUPLE</span> </span><span id="L-17"><a href="#L-17"><span class="linenos">17</span></a><span class="n">__version_tuple__</span><span class="p">:</span> <span class="n">VERSION_TUPLE</span>
</span><span id="L-18"><a href="#L-18"><span class="linenos">18</span></a><span class="n">version_tuple</span><span class="p">:</span> <span class="n">VERSION_TUPLE</span> </span><span id="L-18"><a href="#L-18"><span class="linenos">18</span></a><span class="n">version_tuple</span><span class="p">:</span> <span class="n">VERSION_TUPLE</span>
</span><span id="L-19"><a href="#L-19"><span class="linenos">19</span></a> </span><span id="L-19"><a href="#L-19"><span class="linenos">19</span></a>
</span><span id="L-20"><a href="#L-20"><span class="linenos">20</span></a><span class="n">__version__</span> <span class="o">=</span> <span class="n">version</span> <span class="o">=</span> <span class="s1">&#39;26.23.0&#39;</span> </span><span id="L-20"><a href="#L-20"><span class="linenos">20</span></a><span class="n">__version__</span> <span class="o">=</span> <span class="n">version</span> <span class="o">=</span> <span class="s1">&#39;26.25.2&#39;</span>
</span><span id="L-21"><a href="#L-21"><span class="linenos">21</span></a><span class="n">__version_tuple__</span> <span class="o">=</span> <span class="n">version_tuple</span> <span class="o">=</span> <span class="p">(</span><span class="mi">26</span><span class="p">,</span> <span class="mi">23</span><span class="p">,</span> <span class="mi">0</span><span class="p">)</span> </span><span id="L-21"><a href="#L-21"><span class="linenos">21</span></a><span class="n">__version_tuple__</span> <span class="o">=</span> <span class="n">version_tuple</span> <span class="o">=</span> <span class="p">(</span><span class="mi">26</span><span class="p">,</span> <span class="mi">25</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
</span></pre></div> </span></pre></div>
@ -93,7 +93,7 @@
<section id="__version__"> <section id="__version__">
<div class="attr variable"> <div class="attr variable">
<span class="name">__version__</span><span class="annotation">: str</span> = <span class="name">__version__</span><span class="annotation">: str</span> =
<span class="default_value">&#39;26.23.0&#39;</span> <span class="default_value">&#39;26.25.2&#39;</span>
</div> </div>
@ -105,7 +105,7 @@
<section id="__version_tuple__"> <section id="__version_tuple__">
<div class="attr variable"> <div class="attr variable">
<span class="name">__version_tuple__</span><span class="annotation">: object</span> = <span class="name">__version_tuple__</span><span class="annotation">: object</span> =
<span class="default_value">(26, 23, 0)</span> <span class="default_value">(26, 25, 2)</span>
</div> </div>
@ -117,7 +117,7 @@
<section id="version"> <section id="version">
<div class="attr variable"> <div class="attr variable">
<span class="name">version</span><span class="annotation">: str</span> = <span class="name">version</span><span class="annotation">: str</span> =
<span class="default_value">&#39;26.23.0&#39;</span> <span class="default_value">&#39;26.25.2&#39;</span>
</div> </div>
@ -129,7 +129,7 @@
<section id="version_tuple"> <section id="version_tuple">
<div class="attr variable"> <div class="attr variable">
<span class="name">version_tuple</span><span class="annotation">: object</span> = <span class="name">version_tuple</span><span class="annotation">: object</span> =
<span class="default_value">(26, 23, 0)</span> <span class="default_value">(26, 25, 2)</span>
</div> </div>

View file

@ -359,7 +359,7 @@ dialect implementations in order to understand how their various components can
<section id="Athena"> <section id="Athena">
<div class="attr variable"> <div class="attr variable">
<span class="name">Athena</span> = <span class="name">Athena</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904393074608&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634507837104&#39;&gt;</span>
</div> </div>
@ -371,7 +371,7 @@ dialect implementations in order to understand how their various components can
<section id="BigQuery"> <section id="BigQuery">
<div class="attr variable"> <div class="attr variable">
<span class="name">BigQuery</span> = <span class="name">BigQuery</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904406061312&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634507824912&#39;&gt;</span>
</div> </div>
@ -383,7 +383,7 @@ dialect implementations in order to understand how their various components can
<section id="ClickHouse"> <section id="ClickHouse">
<div class="attr variable"> <div class="attr variable">
<span class="name">ClickHouse</span> = <span class="name">ClickHouse</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904391016128&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634506710896&#39;&gt;</span>
</div> </div>
@ -395,7 +395,7 @@ dialect implementations in order to understand how their various components can
<section id="Databricks"> <section id="Databricks">
<div class="attr variable"> <div class="attr variable">
<span class="name">Databricks</span> = <span class="name">Databricks</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904405693408&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634506014208&#39;&gt;</span>
</div> </div>
@ -407,7 +407,7 @@ dialect implementations in order to understand how their various components can
<section id="Doris"> <section id="Doris">
<div class="attr variable"> <div class="attr variable">
<span class="name">Doris</span> = <span class="name">Doris</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904405691728&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634502703680&#39;&gt;</span>
</div> </div>
@ -419,7 +419,7 @@ dialect implementations in order to understand how their various components can
<section id="Drill"> <section id="Drill">
<div class="attr variable"> <div class="attr variable">
<span class="name">Drill</span> = <span class="name">Drill</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904402062592&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634506225040&#39;&gt;</span>
</div> </div>
@ -431,7 +431,7 @@ dialect implementations in order to understand how their various components can
<section id="Druid"> <section id="Druid">
<div class="attr variable"> <div class="attr variable">
<span class="name">Druid</span> = <span class="name">Druid</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904389006720&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634506216304&#39;&gt;</span>
</div> </div>
@ -443,7 +443,7 @@ dialect implementations in order to understand how their various components can
<section id="DuckDB"> <section id="DuckDB">
<div class="attr variable"> <div class="attr variable">
<span class="name">DuckDB</span> = <span class="name">DuckDB</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904389011808&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634494301136&#39;&gt;</span>
</div> </div>
@ -455,7 +455,7 @@ dialect implementations in order to understand how their various components can
<section id="Dune"> <section id="Dune">
<div class="attr variable"> <div class="attr variable">
<span class="name">Dune</span> = <span class="name">Dune</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904405826688&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634505383792&#39;&gt;</span>
</div> </div>
@ -467,7 +467,7 @@ dialect implementations in order to understand how their various components can
<section id="Hive"> <section id="Hive">
<div class="attr variable"> <div class="attr variable">
<span class="name">Hive</span> = <span class="name">Hive</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904397287840&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634502237920&#39;&gt;</span>
</div> </div>
@ -479,7 +479,7 @@ dialect implementations in order to understand how their various components can
<section id="Materialize"> <section id="Materialize">
<div class="attr variable"> <div class="attr variable">
<span class="name">Materialize</span> = <span class="name">Materialize</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904407772256&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634509349440&#39;&gt;</span>
</div> </div>
@ -491,7 +491,7 @@ dialect implementations in order to understand how their various components can
<section id="MySQL"> <section id="MySQL">
<div class="attr variable"> <div class="attr variable">
<span class="name">MySQL</span> = <span class="name">MySQL</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904405858928&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634507150432&#39;&gt;</span>
</div> </div>
@ -503,7 +503,7 @@ dialect implementations in order to understand how their various components can
<section id="Oracle"> <section id="Oracle">
<div class="attr variable"> <div class="attr variable">
<span class="name">Oracle</span> = <span class="name">Oracle</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904401435536&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634496723616&#39;&gt;</span>
</div> </div>
@ -515,7 +515,7 @@ dialect implementations in order to understand how their various components can
<section id="Postgres"> <section id="Postgres">
<div class="attr variable"> <div class="attr variable">
<span class="name">Postgres</span> = <span class="name">Postgres</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904406312352&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634496715216&#39;&gt;</span>
</div> </div>
@ -527,7 +527,7 @@ dialect implementations in order to understand how their various components can
<section id="Presto"> <section id="Presto">
<div class="attr variable"> <div class="attr variable">
<span class="name">Presto</span> = <span class="name">Presto</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904406604816&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634508955024&#39;&gt;</span>
</div> </div>
@ -539,7 +539,7 @@ dialect implementations in order to understand how their various components can
<section id="PRQL"> <section id="PRQL">
<div class="attr variable"> <div class="attr variable">
<span class="name">PRQL</span> = <span class="name">PRQL</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904406615424&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634507747792&#39;&gt;</span>
</div> </div>
@ -551,7 +551,7 @@ dialect implementations in order to understand how their various components can
<section id="Redshift"> <section id="Redshift">
<div class="attr variable"> <div class="attr variable">
<span class="name">Redshift</span> = <span class="name">Redshift</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904401186272&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634504908416&#39;&gt;</span>
</div> </div>
@ -563,7 +563,7 @@ dialect implementations in order to understand how their various components can
<section id="RisingWave"> <section id="RisingWave">
<div class="attr variable"> <div class="attr variable">
<span class="name">RisingWave</span> = <span class="name">RisingWave</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904401188912&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634504916624&#39;&gt;</span>
</div> </div>
@ -575,7 +575,7 @@ dialect implementations in order to understand how their various components can
<section id="Snowflake"> <section id="Snowflake">
<div class="attr variable"> <div class="attr variable">
<span class="name">Snowflake</span> = <span class="name">Snowflake</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904405754480&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634507238688&#39;&gt;</span>
</div> </div>
@ -587,7 +587,7 @@ dialect implementations in order to understand how their various components can
<section id="Spark"> <section id="Spark">
<div class="attr variable"> <div class="attr variable">
<span class="name">Spark</span> = <span class="name">Spark</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904405919904&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634507369712&#39;&gt;</span>
</div> </div>
@ -599,7 +599,7 @@ dialect implementations in order to understand how their various components can
<section id="Spark2"> <section id="Spark2">
<div class="attr variable"> <div class="attr variable">
<span class="name">Spark2</span> = <span class="name">Spark2</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904406383936&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634506022528&#39;&gt;</span>
</div> </div>
@ -611,7 +611,7 @@ dialect implementations in order to understand how their various components can
<section id="SQLite"> <section id="SQLite">
<div class="attr variable"> <div class="attr variable">
<span class="name">SQLite</span> = <span class="name">SQLite</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904406386960&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634506023632&#39;&gt;</span>
</div> </div>
@ -623,7 +623,7 @@ dialect implementations in order to understand how their various components can
<section id="StarRocks"> <section id="StarRocks">
<div class="attr variable"> <div class="attr variable">
<span class="name">StarRocks</span> = <span class="name">StarRocks</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904391390896&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634509005568&#39;&gt;</span>
</div> </div>
@ -635,7 +635,7 @@ dialect implementations in order to understand how their various components can
<section id="Tableau"> <section id="Tableau">
<div class="attr variable"> <div class="attr variable">
<span class="name">Tableau</span> = <span class="name">Tableau</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904396179488&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634508366016&#39;&gt;</span>
</div> </div>
@ -647,7 +647,7 @@ dialect implementations in order to understand how their various components can
<section id="Teradata"> <section id="Teradata">
<div class="attr variable"> <div class="attr variable">
<span class="name">Teradata</span> = <span class="name">Teradata</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904389318016&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634494650928&#39;&gt;</span>
</div> </div>
@ -659,7 +659,7 @@ dialect implementations in order to understand how their various components can
<section id="Trino"> <section id="Trino">
<div class="attr variable"> <div class="attr variable">
<span class="name">Trino</span> = <span class="name">Trino</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904389325888&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634494658800&#39;&gt;</span>
</div> </div>
@ -671,7 +671,7 @@ dialect implementations in order to understand how their various components can
<section id="TSQL"> <section id="TSQL">
<div class="attr variable"> <div class="attr variable">
<span class="name">TSQL</span> = <span class="name">TSQL</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904389333872&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634494683168&#39;&gt;</span>
</div> </div>
@ -683,7 +683,7 @@ dialect implementations in order to understand how their various components can
<section id="Dialect"> <section id="Dialect">
<div class="attr variable"> <div class="attr variable">
<span class="name">Dialect</span> = <span class="name">Dialect</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904389341792&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634494691088&#39;&gt;</span>
</div> </div>
@ -695,7 +695,7 @@ dialect implementations in order to understand how their various components can
<section id="Dialects"> <section id="Dialects">
<div class="attr variable"> <div class="attr variable">
<span class="name">Dialects</span> = <span class="name">Dialects</span> =
<span class="default_value">&lt;MagicMock id=&#39;139904389366160&#39;&gt;</span> <span class="default_value">&lt;MagicMock id=&#39;140634494715456&#39;&gt;</span>
</div> </div>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -739,6 +739,7 @@ Default: True</li>
<dd id="Druid.Generator.SUPPORTS_CONVERT_TIMEZONE" class="variable"><a href="../generator.html#Generator.SUPPORTS_CONVERT_TIMEZONE">SUPPORTS_CONVERT_TIMEZONE</a></dd> <dd id="Druid.Generator.SUPPORTS_CONVERT_TIMEZONE" class="variable"><a href="../generator.html#Generator.SUPPORTS_CONVERT_TIMEZONE">SUPPORTS_CONVERT_TIMEZONE</a></dd>
<dd id="Druid.Generator.SUPPORTS_MEDIAN" class="variable"><a href="../generator.html#Generator.SUPPORTS_MEDIAN">SUPPORTS_MEDIAN</a></dd> <dd id="Druid.Generator.SUPPORTS_MEDIAN" class="variable"><a href="../generator.html#Generator.SUPPORTS_MEDIAN">SUPPORTS_MEDIAN</a></dd>
<dd id="Druid.Generator.SUPPORTS_UNIX_SECONDS" class="variable"><a href="../generator.html#Generator.SUPPORTS_UNIX_SECONDS">SUPPORTS_UNIX_SECONDS</a></dd> <dd id="Druid.Generator.SUPPORTS_UNIX_SECONDS" class="variable"><a href="../generator.html#Generator.SUPPORTS_UNIX_SECONDS">SUPPORTS_UNIX_SECONDS</a></dd>
<dd id="Druid.Generator.ALTER_SET_WRAPPED" class="variable"><a href="../generator.html#Generator.ALTER_SET_WRAPPED">ALTER_SET_WRAPPED</a></dd>
<dd id="Druid.Generator.PARSE_JSON_NAME" class="variable"><a href="../generator.html#Generator.PARSE_JSON_NAME">PARSE_JSON_NAME</a></dd> <dd id="Druid.Generator.PARSE_JSON_NAME" class="variable"><a href="../generator.html#Generator.PARSE_JSON_NAME">PARSE_JSON_NAME</a></dd>
<dd id="Druid.Generator.ARRAY_SIZE_NAME" class="variable"><a href="../generator.html#Generator.ARRAY_SIZE_NAME">ARRAY_SIZE_NAME</a></dd> <dd id="Druid.Generator.ARRAY_SIZE_NAME" class="variable"><a href="../generator.html#Generator.ARRAY_SIZE_NAME">ARRAY_SIZE_NAME</a></dd>
<dd id="Druid.Generator.ALTER_SET_TYPE" class="variable"><a href="../generator.html#Generator.ALTER_SET_TYPE">ALTER_SET_TYPE</a></dd> <dd id="Druid.Generator.ALTER_SET_TYPE" class="variable"><a href="../generator.html#Generator.ALTER_SET_TYPE">ALTER_SET_TYPE</a></dd>
@ -756,6 +757,7 @@ Default: True</li>
<dd id="Druid.Generator.UNWRAPPED_INTERVAL_VALUES" class="variable"><a href="../generator.html#Generator.UNWRAPPED_INTERVAL_VALUES">UNWRAPPED_INTERVAL_VALUES</a></dd> <dd id="Druid.Generator.UNWRAPPED_INTERVAL_VALUES" class="variable"><a href="../generator.html#Generator.UNWRAPPED_INTERVAL_VALUES">UNWRAPPED_INTERVAL_VALUES</a></dd>
<dd id="Druid.Generator.PARAMETERIZABLE_TEXT_TYPES" class="variable"><a href="../generator.html#Generator.PARAMETERIZABLE_TEXT_TYPES">PARAMETERIZABLE_TEXT_TYPES</a></dd> <dd id="Druid.Generator.PARAMETERIZABLE_TEXT_TYPES" class="variable"><a href="../generator.html#Generator.PARAMETERIZABLE_TEXT_TYPES">PARAMETERIZABLE_TEXT_TYPES</a></dd>
<dd id="Druid.Generator.EXPRESSIONS_WITHOUT_NESTED_CTES" class="variable"><a href="../generator.html#Generator.EXPRESSIONS_WITHOUT_NESTED_CTES">EXPRESSIONS_WITHOUT_NESTED_CTES</a></dd> <dd id="Druid.Generator.EXPRESSIONS_WITHOUT_NESTED_CTES" class="variable"><a href="../generator.html#Generator.EXPRESSIONS_WITHOUT_NESTED_CTES">EXPRESSIONS_WITHOUT_NESTED_CTES</a></dd>
<dd id="Druid.Generator.RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS" class="variable"><a href="../generator.html#Generator.RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS">RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS</a></dd>
<dd id="Druid.Generator.SENTINEL_LINE_BREAK" class="variable"><a href="../generator.html#Generator.SENTINEL_LINE_BREAK">SENTINEL_LINE_BREAK</a></dd> <dd id="Druid.Generator.SENTINEL_LINE_BREAK" class="variable"><a href="../generator.html#Generator.SENTINEL_LINE_BREAK">SENTINEL_LINE_BREAK</a></dd>
<dd id="Druid.Generator.pretty" class="variable"><a href="../generator.html#Generator.pretty">pretty</a></dd> <dd id="Druid.Generator.pretty" class="variable"><a href="../generator.html#Generator.pretty">pretty</a></dd>
<dd id="Druid.Generator.identify" class="variable"><a href="../generator.html#Generator.identify">identify</a></dd> <dd id="Druid.Generator.identify" class="variable"><a href="../generator.html#Generator.identify">identify</a></dd>
@ -774,7 +776,7 @@ Default: True</li>
<dd id="Druid.Generator.unsupported" class="function"><a href="../generator.html#Generator.unsupported">unsupported</a></dd> <dd id="Druid.Generator.unsupported" class="function"><a href="../generator.html#Generator.unsupported">unsupported</a></dd>
<dd id="Druid.Generator.sep" class="function"><a href="../generator.html#Generator.sep">sep</a></dd> <dd id="Druid.Generator.sep" class="function"><a href="../generator.html#Generator.sep">sep</a></dd>
<dd id="Druid.Generator.seg" class="function"><a href="../generator.html#Generator.seg">seg</a></dd> <dd id="Druid.Generator.seg" class="function"><a href="../generator.html#Generator.seg">seg</a></dd>
<dd id="Druid.Generator.pad_comment" class="function"><a href="../generator.html#Generator.pad_comment">pad_comment</a></dd> <dd id="Druid.Generator.sanitize_comment" class="function"><a href="../generator.html#Generator.sanitize_comment">sanitize_comment</a></dd>
<dd id="Druid.Generator.maybe_comment" class="function"><a href="../generator.html#Generator.maybe_comment">maybe_comment</a></dd> <dd id="Druid.Generator.maybe_comment" class="function"><a href="../generator.html#Generator.maybe_comment">maybe_comment</a></dd>
<dd id="Druid.Generator.wrap" class="function"><a href="../generator.html#Generator.wrap">wrap</a></dd> <dd id="Druid.Generator.wrap" class="function"><a href="../generator.html#Generator.wrap">wrap</a></dd>
<dd id="Druid.Generator.no_identify" class="function"><a href="../generator.html#Generator.no_identify">no_identify</a></dd> <dd id="Druid.Generator.no_identify" class="function"><a href="../generator.html#Generator.no_identify">no_identify</a></dd>
@ -909,6 +911,7 @@ Default: True</li>
<dd id="Druid.Generator.matchrecognize_sql" class="function"><a href="../generator.html#Generator.matchrecognize_sql">matchrecognize_sql</a></dd> <dd id="Druid.Generator.matchrecognize_sql" class="function"><a href="../generator.html#Generator.matchrecognize_sql">matchrecognize_sql</a></dd>
<dd id="Druid.Generator.query_modifiers" class="function"><a href="../generator.html#Generator.query_modifiers">query_modifiers</a></dd> <dd id="Druid.Generator.query_modifiers" class="function"><a href="../generator.html#Generator.query_modifiers">query_modifiers</a></dd>
<dd id="Druid.Generator.options_modifier" class="function"><a href="../generator.html#Generator.options_modifier">options_modifier</a></dd> <dd id="Druid.Generator.options_modifier" class="function"><a href="../generator.html#Generator.options_modifier">options_modifier</a></dd>
<dd id="Druid.Generator.for_modifiers" class="function"><a href="../generator.html#Generator.for_modifiers">for_modifiers</a></dd>
<dd id="Druid.Generator.queryoption_sql" class="function"><a href="../generator.html#Generator.queryoption_sql">queryoption_sql</a></dd> <dd id="Druid.Generator.queryoption_sql" class="function"><a href="../generator.html#Generator.queryoption_sql">queryoption_sql</a></dd>
<dd id="Druid.Generator.offset_limit_modifiers" class="function"><a href="../generator.html#Generator.offset_limit_modifiers">offset_limit_modifiers</a></dd> <dd id="Druid.Generator.offset_limit_modifiers" class="function"><a href="../generator.html#Generator.offset_limit_modifiers">offset_limit_modifiers</a></dd>
<dd id="Druid.Generator.after_limit_modifiers" class="function"><a href="../generator.html#Generator.after_limit_modifiers">after_limit_modifiers</a></dd> <dd id="Druid.Generator.after_limit_modifiers" class="function"><a href="../generator.html#Generator.after_limit_modifiers">after_limit_modifiers</a></dd>
@ -1142,6 +1145,7 @@ Default: True</li>
<dd id="Druid.Generator.encodeproperty_sql" class="function"><a href="../generator.html#Generator.encodeproperty_sql">encodeproperty_sql</a></dd> <dd id="Druid.Generator.encodeproperty_sql" class="function"><a href="../generator.html#Generator.encodeproperty_sql">encodeproperty_sql</a></dd>
<dd id="Druid.Generator.includeproperty_sql" class="function"><a href="../generator.html#Generator.includeproperty_sql">includeproperty_sql</a></dd> <dd id="Druid.Generator.includeproperty_sql" class="function"><a href="../generator.html#Generator.includeproperty_sql">includeproperty_sql</a></dd>
<dd id="Druid.Generator.xmlelement_sql" class="function"><a href="../generator.html#Generator.xmlelement_sql">xmlelement_sql</a></dd> <dd id="Druid.Generator.xmlelement_sql" class="function"><a href="../generator.html#Generator.xmlelement_sql">xmlelement_sql</a></dd>
<dd id="Druid.Generator.xmlkeyvalueoption_sql" class="function"><a href="../generator.html#Generator.xmlkeyvalueoption_sql">xmlkeyvalueoption_sql</a></dd>
<dd id="Druid.Generator.partitionbyrangeproperty_sql" class="function"><a href="../generator.html#Generator.partitionbyrangeproperty_sql">partitionbyrangeproperty_sql</a></dd> <dd id="Druid.Generator.partitionbyrangeproperty_sql" class="function"><a href="../generator.html#Generator.partitionbyrangeproperty_sql">partitionbyrangeproperty_sql</a></dd>
<dd id="Druid.Generator.partitionbyrangepropertydynamic_sql" class="function"><a href="../generator.html#Generator.partitionbyrangepropertydynamic_sql">partitionbyrangepropertydynamic_sql</a></dd> <dd id="Druid.Generator.partitionbyrangepropertydynamic_sql" class="function"><a href="../generator.html#Generator.partitionbyrangepropertydynamic_sql">partitionbyrangepropertydynamic_sql</a></dd>
<dd id="Druid.Generator.unpivotcolumns_sql" class="function"><a href="../generator.html#Generator.unpivotcolumns_sql">unpivotcolumns_sql</a></dd> <dd id="Druid.Generator.unpivotcolumns_sql" class="function"><a href="../generator.html#Generator.unpivotcolumns_sql">unpivotcolumns_sql</a></dd>

File diff suppressed because one or more lines are too long

View file

@ -590,7 +590,6 @@
<dd id="Dune.Tokenizer.HEREDOC_TAG_IS_IDENTIFIER" class="variable"><a href="../tokens.html#Tokenizer.HEREDOC_TAG_IS_IDENTIFIER">HEREDOC_TAG_IS_IDENTIFIER</a></dd> <dd id="Dune.Tokenizer.HEREDOC_TAG_IS_IDENTIFIER" class="variable"><a href="../tokens.html#Tokenizer.HEREDOC_TAG_IS_IDENTIFIER">HEREDOC_TAG_IS_IDENTIFIER</a></dd>
<dd id="Dune.Tokenizer.HEREDOC_STRING_ALTERNATIVE" class="variable"><a href="../tokens.html#Tokenizer.HEREDOC_STRING_ALTERNATIVE">HEREDOC_STRING_ALTERNATIVE</a></dd> <dd id="Dune.Tokenizer.HEREDOC_STRING_ALTERNATIVE" class="variable"><a href="../tokens.html#Tokenizer.HEREDOC_STRING_ALTERNATIVE">HEREDOC_STRING_ALTERNATIVE</a></dd>
<dd id="Dune.Tokenizer.STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS" class="variable"><a href="../tokens.html#Tokenizer.STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS">STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS</a></dd> <dd id="Dune.Tokenizer.STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS" class="variable"><a href="../tokens.html#Tokenizer.STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS">STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS</a></dd>
<dd id="Dune.Tokenizer.NESTED_COMMENTS" class="variable"><a href="../tokens.html#Tokenizer.NESTED_COMMENTS">NESTED_COMMENTS</a></dd>
<dd id="Dune.Tokenizer.HINT_START" class="variable"><a href="../tokens.html#Tokenizer.HINT_START">HINT_START</a></dd> <dd id="Dune.Tokenizer.HINT_START" class="variable"><a href="../tokens.html#Tokenizer.HINT_START">HINT_START</a></dd>
<dd id="Dune.Tokenizer.TOKENS_PRECEDING_HINT" class="variable"><a href="../tokens.html#Tokenizer.TOKENS_PRECEDING_HINT">TOKENS_PRECEDING_HINT</a></dd> <dd id="Dune.Tokenizer.TOKENS_PRECEDING_HINT" class="variable"><a href="../tokens.html#Tokenizer.TOKENS_PRECEDING_HINT">TOKENS_PRECEDING_HINT</a></dd>
<dd id="Dune.Tokenizer.WHITE_SPACE" class="variable"><a href="../tokens.html#Tokenizer.WHITE_SPACE">WHITE_SPACE</a></dd> <dd id="Dune.Tokenizer.WHITE_SPACE" class="variable"><a href="../tokens.html#Tokenizer.WHITE_SPACE">WHITE_SPACE</a></dd>
@ -610,6 +609,7 @@
</div> </div>
<div><dt><a href="presto.html#Presto.Tokenizer">sqlglot.dialects.presto.Presto.Tokenizer</a></dt> <div><dt><a href="presto.html#Presto.Tokenizer">sqlglot.dialects.presto.Presto.Tokenizer</a></dt>
<dd id="Dune.Tokenizer.UNICODE_STRINGS" class="variable"><a href="presto.html#Presto.Tokenizer.UNICODE_STRINGS">UNICODE_STRINGS</a></dd> <dd id="Dune.Tokenizer.UNICODE_STRINGS" class="variable"><a href="presto.html#Presto.Tokenizer.UNICODE_STRINGS">UNICODE_STRINGS</a></dd>
<dd id="Dune.Tokenizer.NESTED_COMMENTS" class="variable"><a href="presto.html#Presto.Tokenizer.NESTED_COMMENTS">NESTED_COMMENTS</a></dd>
<dd id="Dune.Tokenizer.KEYWORDS" class="variable"><a href="presto.html#Presto.Tokenizer.KEYWORDS">KEYWORDS</a></dd> <dd id="Dune.Tokenizer.KEYWORDS" class="variable"><a href="presto.html#Presto.Tokenizer.KEYWORDS">KEYWORDS</a></dd>
</div> </div>
@ -798,6 +798,7 @@ Default: True</li>
<dd id="Dune.Generator.ARRAY_CONCAT_IS_VAR_LEN" class="variable"><a href="../generator.html#Generator.ARRAY_CONCAT_IS_VAR_LEN">ARRAY_CONCAT_IS_VAR_LEN</a></dd> <dd id="Dune.Generator.ARRAY_CONCAT_IS_VAR_LEN" class="variable"><a href="../generator.html#Generator.ARRAY_CONCAT_IS_VAR_LEN">ARRAY_CONCAT_IS_VAR_LEN</a></dd>
<dd id="Dune.Generator.SUPPORTS_CONVERT_TIMEZONE" class="variable"><a href="../generator.html#Generator.SUPPORTS_CONVERT_TIMEZONE">SUPPORTS_CONVERT_TIMEZONE</a></dd> <dd id="Dune.Generator.SUPPORTS_CONVERT_TIMEZONE" class="variable"><a href="../generator.html#Generator.SUPPORTS_CONVERT_TIMEZONE">SUPPORTS_CONVERT_TIMEZONE</a></dd>
<dd id="Dune.Generator.SUPPORTS_UNIX_SECONDS" class="variable"><a href="../generator.html#Generator.SUPPORTS_UNIX_SECONDS">SUPPORTS_UNIX_SECONDS</a></dd> <dd id="Dune.Generator.SUPPORTS_UNIX_SECONDS" class="variable"><a href="../generator.html#Generator.SUPPORTS_UNIX_SECONDS">SUPPORTS_UNIX_SECONDS</a></dd>
<dd id="Dune.Generator.ALTER_SET_WRAPPED" class="variable"><a href="../generator.html#Generator.ALTER_SET_WRAPPED">ALTER_SET_WRAPPED</a></dd>
<dd id="Dune.Generator.ALTER_SET_TYPE" class="variable"><a href="../generator.html#Generator.ALTER_SET_TYPE">ALTER_SET_TYPE</a></dd> <dd id="Dune.Generator.ALTER_SET_TYPE" class="variable"><a href="../generator.html#Generator.ALTER_SET_TYPE">ALTER_SET_TYPE</a></dd>
<dd id="Dune.Generator.ARRAY_SIZE_DIM_REQUIRED" class="variable"><a href="../generator.html#Generator.ARRAY_SIZE_DIM_REQUIRED">ARRAY_SIZE_DIM_REQUIRED</a></dd> <dd id="Dune.Generator.ARRAY_SIZE_DIM_REQUIRED" class="variable"><a href="../generator.html#Generator.ARRAY_SIZE_DIM_REQUIRED">ARRAY_SIZE_DIM_REQUIRED</a></dd>
<dd id="Dune.Generator.TIME_PART_SINGULARS" class="variable"><a href="../generator.html#Generator.TIME_PART_SINGULARS">TIME_PART_SINGULARS</a></dd> <dd id="Dune.Generator.TIME_PART_SINGULARS" class="variable"><a href="../generator.html#Generator.TIME_PART_SINGULARS">TIME_PART_SINGULARS</a></dd>
@ -810,6 +811,7 @@ Default: True</li>
<dd id="Dune.Generator.UNWRAPPED_INTERVAL_VALUES" class="variable"><a href="../generator.html#Generator.UNWRAPPED_INTERVAL_VALUES">UNWRAPPED_INTERVAL_VALUES</a></dd> <dd id="Dune.Generator.UNWRAPPED_INTERVAL_VALUES" class="variable"><a href="../generator.html#Generator.UNWRAPPED_INTERVAL_VALUES">UNWRAPPED_INTERVAL_VALUES</a></dd>
<dd id="Dune.Generator.PARAMETERIZABLE_TEXT_TYPES" class="variable"><a href="../generator.html#Generator.PARAMETERIZABLE_TEXT_TYPES">PARAMETERIZABLE_TEXT_TYPES</a></dd> <dd id="Dune.Generator.PARAMETERIZABLE_TEXT_TYPES" class="variable"><a href="../generator.html#Generator.PARAMETERIZABLE_TEXT_TYPES">PARAMETERIZABLE_TEXT_TYPES</a></dd>
<dd id="Dune.Generator.EXPRESSIONS_WITHOUT_NESTED_CTES" class="variable"><a href="../generator.html#Generator.EXPRESSIONS_WITHOUT_NESTED_CTES">EXPRESSIONS_WITHOUT_NESTED_CTES</a></dd> <dd id="Dune.Generator.EXPRESSIONS_WITHOUT_NESTED_CTES" class="variable"><a href="../generator.html#Generator.EXPRESSIONS_WITHOUT_NESTED_CTES">EXPRESSIONS_WITHOUT_NESTED_CTES</a></dd>
<dd id="Dune.Generator.RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS" class="variable"><a href="../generator.html#Generator.RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS">RESPECT_IGNORE_NULLS_UNSUPPORTED_EXPRESSIONS</a></dd>
<dd id="Dune.Generator.SENTINEL_LINE_BREAK" class="variable"><a href="../generator.html#Generator.SENTINEL_LINE_BREAK">SENTINEL_LINE_BREAK</a></dd> <dd id="Dune.Generator.SENTINEL_LINE_BREAK" class="variable"><a href="../generator.html#Generator.SENTINEL_LINE_BREAK">SENTINEL_LINE_BREAK</a></dd>
<dd id="Dune.Generator.pretty" class="variable"><a href="../generator.html#Generator.pretty">pretty</a></dd> <dd id="Dune.Generator.pretty" class="variable"><a href="../generator.html#Generator.pretty">pretty</a></dd>
<dd id="Dune.Generator.identify" class="variable"><a href="../generator.html#Generator.identify">identify</a></dd> <dd id="Dune.Generator.identify" class="variable"><a href="../generator.html#Generator.identify">identify</a></dd>
@ -828,7 +830,7 @@ Default: True</li>
<dd id="Dune.Generator.unsupported" class="function"><a href="../generator.html#Generator.unsupported">unsupported</a></dd> <dd id="Dune.Generator.unsupported" class="function"><a href="../generator.html#Generator.unsupported">unsupported</a></dd>
<dd id="Dune.Generator.sep" class="function"><a href="../generator.html#Generator.sep">sep</a></dd> <dd id="Dune.Generator.sep" class="function"><a href="../generator.html#Generator.sep">sep</a></dd>
<dd id="Dune.Generator.seg" class="function"><a href="../generator.html#Generator.seg">seg</a></dd> <dd id="Dune.Generator.seg" class="function"><a href="../generator.html#Generator.seg">seg</a></dd>
<dd id="Dune.Generator.pad_comment" class="function"><a href="../generator.html#Generator.pad_comment">pad_comment</a></dd> <dd id="Dune.Generator.sanitize_comment" class="function"><a href="../generator.html#Generator.sanitize_comment">sanitize_comment</a></dd>
<dd id="Dune.Generator.maybe_comment" class="function"><a href="../generator.html#Generator.maybe_comment">maybe_comment</a></dd> <dd id="Dune.Generator.maybe_comment" class="function"><a href="../generator.html#Generator.maybe_comment">maybe_comment</a></dd>
<dd id="Dune.Generator.wrap" class="function"><a href="../generator.html#Generator.wrap">wrap</a></dd> <dd id="Dune.Generator.wrap" class="function"><a href="../generator.html#Generator.wrap">wrap</a></dd>
<dd id="Dune.Generator.no_identify" class="function"><a href="../generator.html#Generator.no_identify">no_identify</a></dd> <dd id="Dune.Generator.no_identify" class="function"><a href="../generator.html#Generator.no_identify">no_identify</a></dd>
@ -961,6 +963,7 @@ Default: True</li>
<dd id="Dune.Generator.matchrecognize_sql" class="function"><a href="../generator.html#Generator.matchrecognize_sql">matchrecognize_sql</a></dd> <dd id="Dune.Generator.matchrecognize_sql" class="function"><a href="../generator.html#Generator.matchrecognize_sql">matchrecognize_sql</a></dd>
<dd id="Dune.Generator.query_modifiers" class="function"><a href="../generator.html#Generator.query_modifiers">query_modifiers</a></dd> <dd id="Dune.Generator.query_modifiers" class="function"><a href="../generator.html#Generator.query_modifiers">query_modifiers</a></dd>
<dd id="Dune.Generator.options_modifier" class="function"><a href="../generator.html#Generator.options_modifier">options_modifier</a></dd> <dd id="Dune.Generator.options_modifier" class="function"><a href="../generator.html#Generator.options_modifier">options_modifier</a></dd>
<dd id="Dune.Generator.for_modifiers" class="function"><a href="../generator.html#Generator.for_modifiers">for_modifiers</a></dd>
<dd id="Dune.Generator.queryoption_sql" class="function"><a href="../generator.html#Generator.queryoption_sql">queryoption_sql</a></dd> <dd id="Dune.Generator.queryoption_sql" class="function"><a href="../generator.html#Generator.queryoption_sql">queryoption_sql</a></dd>
<dd id="Dune.Generator.after_limit_modifiers" class="function"><a href="../generator.html#Generator.after_limit_modifiers">after_limit_modifiers</a></dd> <dd id="Dune.Generator.after_limit_modifiers" class="function"><a href="../generator.html#Generator.after_limit_modifiers">after_limit_modifiers</a></dd>
<dd id="Dune.Generator.select_sql" class="function"><a href="../generator.html#Generator.select_sql">select_sql</a></dd> <dd id="Dune.Generator.select_sql" class="function"><a href="../generator.html#Generator.select_sql">select_sql</a></dd>
@ -1189,6 +1192,7 @@ Default: True</li>
<dd id="Dune.Generator.encodeproperty_sql" class="function"><a href="../generator.html#Generator.encodeproperty_sql">encodeproperty_sql</a></dd> <dd id="Dune.Generator.encodeproperty_sql" class="function"><a href="../generator.html#Generator.encodeproperty_sql">encodeproperty_sql</a></dd>
<dd id="Dune.Generator.includeproperty_sql" class="function"><a href="../generator.html#Generator.includeproperty_sql">includeproperty_sql</a></dd> <dd id="Dune.Generator.includeproperty_sql" class="function"><a href="../generator.html#Generator.includeproperty_sql">includeproperty_sql</a></dd>
<dd id="Dune.Generator.xmlelement_sql" class="function"><a href="../generator.html#Generator.xmlelement_sql">xmlelement_sql</a></dd> <dd id="Dune.Generator.xmlelement_sql" class="function"><a href="../generator.html#Generator.xmlelement_sql">xmlelement_sql</a></dd>
<dd id="Dune.Generator.xmlkeyvalueoption_sql" class="function"><a href="../generator.html#Generator.xmlkeyvalueoption_sql">xmlkeyvalueoption_sql</a></dd>
<dd id="Dune.Generator.partitionbyrangeproperty_sql" class="function"><a href="../generator.html#Generator.partitionbyrangeproperty_sql">partitionbyrangeproperty_sql</a></dd> <dd id="Dune.Generator.partitionbyrangeproperty_sql" class="function"><a href="../generator.html#Generator.partitionbyrangeproperty_sql">partitionbyrangeproperty_sql</a></dd>
<dd id="Dune.Generator.partitionbyrangepropertydynamic_sql" class="function"><a href="../generator.html#Generator.partitionbyrangepropertydynamic_sql">partitionbyrangepropertydynamic_sql</a></dd> <dd id="Dune.Generator.partitionbyrangepropertydynamic_sql" class="function"><a href="../generator.html#Generator.partitionbyrangepropertydynamic_sql">partitionbyrangepropertydynamic_sql</a></dd>
<dd id="Dune.Generator.unpivotcolumns_sql" class="function"><a href="../generator.html#Generator.unpivotcolumns_sql">unpivotcolumns_sql</a></dd> <dd id="Dune.Generator.unpivotcolumns_sql" class="function"><a href="../generator.html#Generator.unpivotcolumns_sql">unpivotcolumns_sql</a></dd>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

View file

@ -2003,7 +2003,7 @@ belong to some totally-ordered set.</p>
<section id="DATE_UNITS"> <section id="DATE_UNITS">
<div class="attr variable"> <div class="attr variable">
<span class="name">DATE_UNITS</span> = <span class="name">DATE_UNITS</span> =
<span class="default_value">{&#39;quarter&#39;, &#39;year_month&#39;, &#39;week&#39;, &#39;year&#39;, &#39;month&#39;, &#39;day&#39;}</span> <span class="default_value">{&#39;year_month&#39;, &#39;year&#39;, &#39;day&#39;, &#39;month&#39;, &#39;quarter&#39;, &#39;week&#39;}</span>
</div> </div>

View file

@ -641,7 +641,7 @@
<div class="attr variable"> <div class="attr variable">
<span class="name">ALL_JSON_PATH_PARTS</span> = <span class="name">ALL_JSON_PATH_PARTS</span> =
<input id="ALL_JSON_PATH_PARTS-view-value" class="view-value-toggle-state" type="checkbox" aria-hidden="true" tabindex="-1"> <input id="ALL_JSON_PATH_PARTS-view-value" class="view-value-toggle-state" type="checkbox" aria-hidden="true" tabindex="-1">
<label class="view-value-button pdoc-button" for="ALL_JSON_PATH_PARTS-view-value"></label><span class="default_value">{&lt;class &#39;<a href="expressions.html#JSONPathFilter">sqlglot.expressions.JSONPathFilter</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathUnion">sqlglot.expressions.JSONPathUnion</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathSubscript">sqlglot.expressions.JSONPathSubscript</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathWildcard">sqlglot.expressions.JSONPathWildcard</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathSelector">sqlglot.expressions.JSONPathSelector</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathSlice">sqlglot.expressions.JSONPathSlice</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathScript">sqlglot.expressions.JSONPathScript</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathRoot">sqlglot.expressions.JSONPathRoot</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathRecursive">sqlglot.expressions.JSONPathRecursive</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathKey">sqlglot.expressions.JSONPathKey</a>&#39;&gt;}</span> <label class="view-value-button pdoc-button" for="ALL_JSON_PATH_PARTS-view-value"></label><span class="default_value">{&lt;class &#39;<a href="expressions.html#JSONPathScript">sqlglot.expressions.JSONPathScript</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathRoot">sqlglot.expressions.JSONPathRoot</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathRecursive">sqlglot.expressions.JSONPathRecursive</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathKey">sqlglot.expressions.JSONPathKey</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathWildcard">sqlglot.expressions.JSONPathWildcard</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathFilter">sqlglot.expressions.JSONPathFilter</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathUnion">sqlglot.expressions.JSONPathUnion</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathSubscript">sqlglot.expressions.JSONPathSubscript</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathSelector">sqlglot.expressions.JSONPathSelector</a>&#39;&gt;, &lt;class &#39;<a href="expressions.html#JSONPathSlice">sqlglot.expressions.JSONPathSlice</a>&#39;&gt;}</span>
</div> </div>

File diff suppressed because one or more lines are too long

View file

@ -581,7 +581,7 @@ queries if it would result in multiple table selects in a single query:</p>
<div class="attr variable"> <div class="attr variable">
<span class="name">UNMERGABLE_ARGS</span> = <span class="name">UNMERGABLE_ARGS</span> =
<input id="UNMERGABLE_ARGS-view-value" class="view-value-toggle-state" type="checkbox" aria-hidden="true" tabindex="-1"> <input id="UNMERGABLE_ARGS-view-value" class="view-value-toggle-state" type="checkbox" aria-hidden="true" tabindex="-1">
<label class="view-value-button pdoc-button" for="UNMERGABLE_ARGS-view-value"></label><span class="default_value">{&#39;settings&#39;, &#39;connect&#39;, &#39;pivots&#39;, &#39;options&#39;, &#39;qualify&#39;, &#39;windows&#39;, &#39;into&#39;, &#39;with&#39;, &#39;prewhere&#39;, &#39;having&#39;, &#39;distribute&#39;, &#39;limit&#39;, &#39;sample&#39;, &#39;operation_modifiers&#39;, &#39;offset&#39;, &#39;distinct&#39;, &#39;format&#39;, &#39;locks&#39;, &#39;cluster&#39;, &#39;match&#39;, &#39;laterals&#39;, &#39;kind&#39;, &#39;group&#39;, &#39;sort&#39;}</span> <label class="view-value-button pdoc-button" for="UNMERGABLE_ARGS-view-value"></label><span class="default_value">{&#39;kind&#39;, &#39;laterals&#39;, &#39;locks&#39;, &#39;sort&#39;, &#39;limit&#39;, &#39;settings&#39;, &#39;qualify&#39;, &#39;distinct&#39;, &#39;with&#39;, &#39;match&#39;, &#39;pivots&#39;, &#39;having&#39;, &#39;sample&#39;, &#39;windows&#39;, &#39;operation_modifiers&#39;, &#39;offset&#39;, &#39;format&#39;, &#39;cluster&#39;, &#39;connect&#39;, &#39;into&#39;, &#39;options&#39;, &#39;prewhere&#39;, &#39;group&#39;, &#39;distribute&#39;}</span>
</div> </div>

View file

@ -133,7 +133,7 @@
</span><span id="L-69"><a href="#L-69"><span class="linenos"> 69</span></a> <span class="c1"># kind / side syntax (e.g INNER UNION ALL BY NAME) which changes the semantics of the operation</span> </span><span id="L-69"><a href="#L-69"><span class="linenos"> 69</span></a> <span class="c1"># kind / side syntax (e.g INNER UNION ALL BY NAME) which changes the semantics of the operation</span>
</span><span id="L-70"><a href="#L-70"><span class="linenos"> 70</span></a> <span class="n">left</span><span class="p">,</span> <span class="n">right</span> <span class="o">=</span> <span class="n">scope</span><span class="o">.</span><span class="n">union_scopes</span> </span><span id="L-70"><a href="#L-70"><span class="linenos"> 70</span></a> <span class="n">left</span><span class="p">,</span> <span class="n">right</span> <span class="o">=</span> <span class="n">scope</span><span class="o">.</span><span class="n">union_scopes</span>
</span><span id="L-71"><a href="#L-71"><span class="linenos"> 71</span></a> <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">left</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">selects</span><span class="p">)</span> <span class="o">!=</span> <span class="nb">len</span><span class="p">(</span><span class="n">right</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">selects</span><span class="p">):</span> </span><span id="L-71"><a href="#L-71"><span class="linenos"> 71</span></a> <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">left</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">selects</span><span class="p">)</span> <span class="o">!=</span> <span class="nb">len</span><span class="p">(</span><span class="n">right</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">selects</span><span class="p">):</span>
</span><span id="L-72"><a href="#L-72"><span class="linenos"> 72</span></a> <span class="n">scope_sql</span> <span class="o">=</span> <span class="n">scope</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">sql</span><span class="p">()</span> </span><span id="L-72"><a href="#L-72"><span class="linenos"> 72</span></a> <span class="n">scope_sql</span> <span class="o">=</span> <span class="n">scope</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">sql</span><span class="p">(</span><span class="n">dialect</span><span class="o">=</span><span class="n">dialect</span><span class="p">)</span>
</span><span id="L-73"><a href="#L-73"><span class="linenos"> 73</span></a> <span class="k">raise</span> <span class="n">OptimizeError</span><span class="p">(</span> </span><span id="L-73"><a href="#L-73"><span class="linenos"> 73</span></a> <span class="k">raise</span> <span class="n">OptimizeError</span><span class="p">(</span>
</span><span id="L-74"><a href="#L-74"><span class="linenos"> 74</span></a> <span class="sa">f</span><span class="s2">&quot;Invalid set operation due to column mismatch: </span><span class="si">{</span><span class="n">scope_sql</span><span class="si">}</span><span class="s2">.&quot;</span> </span><span id="L-74"><a href="#L-74"><span class="linenos"> 74</span></a> <span class="sa">f</span><span class="s2">&quot;Invalid set operation due to column mismatch: </span><span class="si">{</span><span class="n">scope_sql</span><span class="si">}</span><span class="s2">.&quot;</span>
</span><span id="L-75"><a href="#L-75"><span class="linenos"> 75</span></a> <span class="p">)</span> </span><span id="L-75"><a href="#L-75"><span class="linenos"> 75</span></a> <span class="p">)</span>
@ -325,7 +325,7 @@
</span><span id="pushdown_projections-70"><a href="#pushdown_projections-70"><span class="linenos"> 70</span></a> <span class="c1"># kind / side syntax (e.g INNER UNION ALL BY NAME) which changes the semantics of the operation</span> </span><span id="pushdown_projections-70"><a href="#pushdown_projections-70"><span class="linenos"> 70</span></a> <span class="c1"># kind / side syntax (e.g INNER UNION ALL BY NAME) which changes the semantics of the operation</span>
</span><span id="pushdown_projections-71"><a href="#pushdown_projections-71"><span class="linenos"> 71</span></a> <span class="n">left</span><span class="p">,</span> <span class="n">right</span> <span class="o">=</span> <span class="n">scope</span><span class="o">.</span><span class="n">union_scopes</span> </span><span id="pushdown_projections-71"><a href="#pushdown_projections-71"><span class="linenos"> 71</span></a> <span class="n">left</span><span class="p">,</span> <span class="n">right</span> <span class="o">=</span> <span class="n">scope</span><span class="o">.</span><span class="n">union_scopes</span>
</span><span id="pushdown_projections-72"><a href="#pushdown_projections-72"><span class="linenos"> 72</span></a> <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">left</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">selects</span><span class="p">)</span> <span class="o">!=</span> <span class="nb">len</span><span class="p">(</span><span class="n">right</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">selects</span><span class="p">):</span> </span><span id="pushdown_projections-72"><a href="#pushdown_projections-72"><span class="linenos"> 72</span></a> <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">left</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">selects</span><span class="p">)</span> <span class="o">!=</span> <span class="nb">len</span><span class="p">(</span><span class="n">right</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">selects</span><span class="p">):</span>
</span><span id="pushdown_projections-73"><a href="#pushdown_projections-73"><span class="linenos"> 73</span></a> <span class="n">scope_sql</span> <span class="o">=</span> <span class="n">scope</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">sql</span><span class="p">()</span> </span><span id="pushdown_projections-73"><a href="#pushdown_projections-73"><span class="linenos"> 73</span></a> <span class="n">scope_sql</span> <span class="o">=</span> <span class="n">scope</span><span class="o">.</span><span class="n">expression</span><span class="o">.</span><span class="n">sql</span><span class="p">(</span><span class="n">dialect</span><span class="o">=</span><span class="n">dialect</span><span class="p">)</span>
</span><span id="pushdown_projections-74"><a href="#pushdown_projections-74"><span class="linenos"> 74</span></a> <span class="k">raise</span> <span class="n">OptimizeError</span><span class="p">(</span> </span><span id="pushdown_projections-74"><a href="#pushdown_projections-74"><span class="linenos"> 74</span></a> <span class="k">raise</span> <span class="n">OptimizeError</span><span class="p">(</span>
</span><span id="pushdown_projections-75"><a href="#pushdown_projections-75"><span class="linenos"> 75</span></a> <span class="sa">f</span><span class="s2">&quot;Invalid set operation due to column mismatch: </span><span class="si">{</span><span class="n">scope_sql</span><span class="si">}</span><span class="s2">.&quot;</span> </span><span id="pushdown_projections-75"><a href="#pushdown_projections-75"><span class="linenos"> 75</span></a> <span class="sa">f</span><span class="s2">&quot;Invalid set operation due to column mismatch: </span><span class="si">{</span><span class="n">scope_sql</span><span class="si">}</span><span class="s2">.&quot;</span>
</span><span id="pushdown_projections-76"><a href="#pushdown_projections-76"><span class="linenos"> 76</span></a> <span class="p">)</span> </span><span id="pushdown_projections-76"><a href="#pushdown_projections-76"><span class="linenos"> 76</span></a> <span class="p">)</span>

View file

@ -3201,7 +3201,7 @@ prefix are statically known.</p>
<div class="attr variable"> <div class="attr variable">
<span class="name">DATETRUNC_COMPARISONS</span> = <span class="name">DATETRUNC_COMPARISONS</span> =
<input id="DATETRUNC_COMPARISONS-view-value" class="view-value-toggle-state" type="checkbox" aria-hidden="true" tabindex="-1"> <input id="DATETRUNC_COMPARISONS-view-value" class="view-value-toggle-state" type="checkbox" aria-hidden="true" tabindex="-1">
<label class="view-value-button pdoc-button" for="DATETRUNC_COMPARISONS-view-value"></label><span class="default_value">{&lt;class &#39;<a href="../expressions.html#In">sqlglot.expressions.In</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#LT">sqlglot.expressions.LT</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#NEQ">sqlglot.expressions.NEQ</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#EQ">sqlglot.expressions.EQ</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#GTE">sqlglot.expressions.GTE</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#GT">sqlglot.expressions.GT</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#LTE">sqlglot.expressions.LTE</a>&#39;&gt;}</span> <label class="view-value-button pdoc-button" for="DATETRUNC_COMPARISONS-view-value"></label><span class="default_value">{&lt;class &#39;<a href="../expressions.html#LT">sqlglot.expressions.LT</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#GTE">sqlglot.expressions.GTE</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#EQ">sqlglot.expressions.EQ</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#LTE">sqlglot.expressions.LTE</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#NEQ">sqlglot.expressions.NEQ</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#GT">sqlglot.expressions.GT</a>&#39;&gt;, &lt;class &#39;<a href="../expressions.html#In">sqlglot.expressions.In</a>&#39;&gt;}</span>
</div> </div>
@ -3285,7 +3285,7 @@ prefix are statically known.</p>
<section id="JOINS"> <section id="JOINS">
<div class="attr variable"> <div class="attr variable">
<span class="name">JOINS</span> = <span class="name">JOINS</span> =
<span class="default_value">{(&#39;RIGHT&#39;, &#39;&#39;), (&#39;RIGHT&#39;, &#39;OUTER&#39;), (&#39;&#39;, &#39;INNER&#39;), (&#39;&#39;, &#39;&#39;)}</span> <span class="default_value">{(&#39;RIGHT&#39;, &#39;&#39;), (&#39;&#39;, &#39;INNER&#39;), (&#39;RIGHT&#39;, &#39;OUTER&#39;), (&#39;&#39;, &#39;&#39;)}</span>
</div> </div>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -29,6 +29,7 @@ setup(
"types-pytz", "types-pytz",
"typing_extensions", "typing_extensions",
"maturin>=1.4,<2.0", "maturin>=1.4,<2.0",
"pyperf",
], ],
"rs": [f"sqlglotrs=={sqlglotrs_version()}"], "rs": [f"sqlglotrs=={sqlglotrs_version()}"],
}, },

View file

@ -494,6 +494,8 @@ class BigQuery(Dialect):
(prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R") (prefix + q, q) for q in t.cast(t.List[str], QUOTES) for prefix in ("r", "R")
] ]
NESTED_COMMENTS = False
KEYWORDS = { KEYWORDS = {
**tokens.Tokenizer.KEYWORDS, **tokens.Tokenizer.KEYWORDS,
"ANY TYPE": TokenType.VARIANT, "ANY TYPE": TokenType.VARIANT,
@ -930,6 +932,7 @@ class BigQuery(Dialect):
exp.Array: inline_array_unless_query, exp.Array: inline_array_unless_query,
exp.ArrayContains: _array_contains_sql, exp.ArrayContains: _array_contains_sql,
exp.ArrayFilter: filter_array_using_unnest, exp.ArrayFilter: filter_array_using_unnest,
exp.ArrayRemove: filter_array_using_unnest,
exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]), exp.Cast: transforms.preprocess([transforms.remove_precision_parameterized_types]),
exp.CollateProperty: lambda self, e: ( exp.CollateProperty: lambda self, e: (
f"DEFAULT COLLATE {self.sql(e, 'this')}" f"DEFAULT COLLATE {self.sql(e, 'this')}"

View file

@ -15,6 +15,7 @@ from sqlglot.dialects.dialect import (
no_pivot_sql, no_pivot_sql,
build_json_extract_path, build_json_extract_path,
rename_func, rename_func,
remove_from_array_using_filter,
sha256_sql, sha256_sql,
strposition_sql, strposition_sql,
var_map_sql, var_map_sql,
@ -1061,6 +1062,7 @@ class ClickHouse(Dialect):
exp.ApproxDistinct: rename_func("uniq"), exp.ApproxDistinct: rename_func("uniq"),
exp.ArrayConcat: rename_func("arrayConcat"), exp.ArrayConcat: rename_func("arrayConcat"),
exp.ArrayFilter: lambda self, e: self.func("arrayFilter", e.expression, e.this), exp.ArrayFilter: lambda self, e: self.func("arrayFilter", e.expression, e.this),
exp.ArrayRemove: remove_from_array_using_filter,
exp.ArraySum: rename_func("arraySum"), exp.ArraySum: rename_func("arraySum"),
exp.ArgMax: arg_max_or_min_no_count("argMax"), exp.ArgMax: arg_max_or_min_no_count("argMax"),
exp.ArgMin: arg_max_or_min_no_count("argMin"), exp.ArgMin: arg_max_or_min_no_count("argMin"),

View file

@ -499,6 +499,9 @@ class Dialect(metaclass=_Dialect):
equivalent of CREATE SCHEMA is CREATE DATABASE. equivalent of CREATE SCHEMA is CREATE DATABASE.
""" """
# Whether ADD is present for each column added by ALTER TABLE
ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = True
# --- Autofilled --- # --- Autofilled ---
tokenizer_class = Tokenizer tokenizer_class = Tokenizer
@ -1733,13 +1736,18 @@ def json_path_key_only_name(self: Generator, expression: exp.JSONPathKey) -> str
return expression.name return expression.name
def filter_array_using_unnest(self: Generator, expression: exp.ArrayFilter) -> str: def filter_array_using_unnest(
self: Generator, expression: exp.ArrayFilter | exp.ArrayRemove
) -> str:
cond = expression.expression cond = expression.expression
if isinstance(cond, exp.Lambda) and len(cond.expressions) == 1: if isinstance(cond, exp.Lambda) and len(cond.expressions) == 1:
alias = cond.expressions[0] alias = cond.expressions[0]
cond = cond.this cond = cond.this
elif isinstance(cond, exp.Predicate): elif isinstance(cond, exp.Predicate):
alias = "_u" alias = "_u"
elif isinstance(expression, exp.ArrayRemove):
alias = "_u"
cond = exp.NEQ(this=alias, expression=expression.expression)
else: else:
self.unsupported("Unsupported filter condition") self.unsupported("Unsupported filter condition")
return "" return ""
@ -1749,6 +1757,16 @@ def filter_array_using_unnest(self: Generator, expression: exp.ArrayFilter) -> s
return self.sql(exp.Array(expressions=[filtered])) return self.sql(exp.Array(expressions=[filtered]))
def remove_from_array_using_filter(self: Generator, expression: exp.ArrayRemove) -> str:
lambda_id = exp.to_identifier("_u")
cond = exp.NEQ(this=lambda_id, expression=expression.expression)
return self.sql(
exp.ArrayFilter(
this=expression.this, expression=exp.Lambda(this=cond, expressions=[lambda_id])
)
)
def to_number_with_nls_param(self: Generator, expression: exp.ToNumber) -> str: def to_number_with_nls_param(self: Generator, expression: exp.ToNumber) -> str:
return self.func( return self.func(
"TO_NUMBER", "TO_NUMBER",

View file

@ -26,6 +26,7 @@ from sqlglot.dialects.dialect import (
no_timestamp_sql, no_timestamp_sql,
pivot_column_names, pivot_column_names,
rename_func, rename_func,
remove_from_array_using_filter,
strposition_sql, strposition_sql,
str_to_time_sql, str_to_time_sql,
timestamptrunc_sql, timestamptrunc_sql,
@ -625,6 +626,7 @@ class DuckDB(Dialect):
exp.ApproxDistinct: approx_count_distinct_sql, exp.ApproxDistinct: approx_count_distinct_sql,
exp.Array: inline_array_unless_query, exp.Array: inline_array_unless_query,
exp.ArrayFilter: rename_func("LIST_FILTER"), exp.ArrayFilter: rename_func("LIST_FILTER"),
exp.ArrayRemove: remove_from_array_using_filter,
exp.ArraySort: _array_sort_sql, exp.ArraySort: _array_sort_sql,
exp.ArraySum: rename_func("LIST_SUM"), exp.ArraySum: rename_func("LIST_SUM"),
exp.BitwiseXor: rename_func("XOR"), exp.BitwiseXor: rename_func("XOR"),

View file

@ -741,6 +741,17 @@ class Hive(Dialect):
return self.func("STRUCT", *values) return self.func("STRUCT", *values)
def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str:
return super().columndef_sql(
expression,
sep=(
": "
if isinstance(expression.parent, exp.DataType)
and expression.parent.is_type("struct")
else sep
),
)
def alterset_sql(self, expression: exp.AlterSet) -> str: def alterset_sql(self, expression: exp.AlterSet) -> str:
exprs = self.expressions(expression, flat=True) exprs = self.expressions(expression, flat=True)
exprs = f" {exprs}" if exprs else "" exprs = f" {exprs}" if exprs else ""

View file

@ -187,6 +187,8 @@ class MySQL(Dialect):
BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")] BIT_STRINGS = [("b'", "'"), ("B'", "'"), ("0b", "")]
HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")] HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", "")]
NESTED_COMMENTS = False
KEYWORDS = { KEYWORDS = {
**tokens.Tokenizer.KEYWORDS, **tokens.Tokenizer.KEYWORDS,
"CHARSET": TokenType.CHARACTER_SET, "CHARSET": TokenType.CHARACTER_SET,
@ -211,6 +213,7 @@ class MySQL(Dialect):
"START": TokenType.BEGIN, "START": TokenType.BEGIN,
"SIGNED": TokenType.BIGINT, "SIGNED": TokenType.BIGINT,
"SIGNED INTEGER": TokenType.BIGINT, "SIGNED INTEGER": TokenType.BIGINT,
"TIMESTAMP": TokenType.TIMESTAMPTZ,
"UNLOCK TABLES": TokenType.COMMAND, "UNLOCK TABLES": TokenType.COMMAND,
"UNSIGNED": TokenType.UBIGINT, "UNSIGNED": TokenType.UBIGINT,
"UNSIGNED INTEGER": TokenType.UBIGINT, "UNSIGNED INTEGER": TokenType.UBIGINT,

View file

@ -44,6 +44,7 @@ class Oracle(Dialect):
TABLESAMPLE_SIZE_IS_PERCENT = True TABLESAMPLE_SIZE_IS_PERCENT = True
NULL_ORDERING = "nulls_are_large" NULL_ORDERING = "nulls_are_large"
ON_CONDITION_EMPTY_BEFORE_ERROR = False ON_CONDITION_EMPTY_BEFORE_ERROR = False
ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False
# See section 8: https://docs.oracle.com/cd/A97630_01/server.920/a96540/sql_elements9a.htm # See section 8: https://docs.oracle.com/cd/A97630_01/server.920/a96540/sql_elements9a.htm
NORMALIZATION_STRATEGY = NormalizationStrategy.UPPERCASE NORMALIZATION_STRATEGY = NormalizationStrategy.UPPERCASE
@ -104,7 +105,6 @@ class Oracle(Dialect):
} }
class Parser(parser.Parser): class Parser(parser.Parser):
ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False
WINDOW_BEFORE_PAREN_TOKENS = {TokenType.OVER, TokenType.KEEP} WINDOW_BEFORE_PAREN_TOKENS = {TokenType.OVER, TokenType.KEEP}
VALUES_FOLLOWED_BY_PAREN = False VALUES_FOLLOWED_BY_PAREN = False
@ -341,11 +341,8 @@ class Oracle(Dialect):
def offset_sql(self, expression: exp.Offset) -> str: def offset_sql(self, expression: exp.Offset) -> str:
return f"{super().offset_sql(expression)} ROWS" return f"{super().offset_sql(expression)} ROWS"
def add_column_sql(self, expression: exp.Alter) -> str: def add_column_sql(self, expression: exp.Expression) -> str:
actions = self.expressions(expression, key="actions", flat=True) return f"ADD {self.sql(expression)}"
if len(expression.args.get("actions", [])) > 1:
return f"ADD ({actions})"
return f"ADD {actions}"
def queryoption_sql(self, expression: exp.QueryOption) -> str: def queryoption_sql(self, expression: exp.QueryOption) -> str:
option = self.sql(expression, "this") option = self.sql(expression, "this")

View file

@ -13,6 +13,7 @@ from sqlglot.dialects.dialect import (
datestrtodate_sql, datestrtodate_sql,
build_formatted_time, build_formatted_time,
filter_array_using_unnest, filter_array_using_unnest,
inline_array_sql,
json_extract_segments, json_extract_segments,
json_path_key_only_name, json_path_key_only_name,
max_or_greatest, max_or_greatest,
@ -728,11 +729,12 @@ class Postgres(Dialect):
def array_sql(self, expression: exp.Array) -> str: def array_sql(self, expression: exp.Array) -> str:
exprs = expression.expressions exprs = expression.expressions
return ( func_name = self.normalize_func("ARRAY")
f"{self.normalize_func('ARRAY')}({self.sql(exprs[0])})"
if isinstance(seq_get(exprs, 0), exp.Select) if isinstance(seq_get(exprs, 0), exp.Select):
else f"{self.normalize_func('ARRAY')}[{self.expressions(expression, flat=True)}]" return f"{func_name}({self.sql(exprs[0])})"
)
return f"{func_name}{inline_array_sql(self, expression)}"
def computedcolumnconstraint_sql(self, expression: exp.ComputedColumnConstraint) -> str: def computedcolumnconstraint_sql(self, expression: exp.ComputedColumnConstraint) -> str:
return f"GENERATED ALWAYS AS ({self.sql(expression, 'this')}) STORED" return f"GENERATED ALWAYS AS ({self.sql(expression, 'this')}) STORED"

View file

@ -294,6 +294,8 @@ class Presto(Dialect):
for prefix in ("U&", "u&") for prefix in ("U&", "u&")
] ]
NESTED_COMMENTS = False
KEYWORDS = { KEYWORDS = {
**tokens.Tokenizer.KEYWORDS, **tokens.Tokenizer.KEYWORDS,
"DEALLOCATE PREPARE": TokenType.COMMAND, "DEALLOCATE PREPARE": TokenType.COMMAND,

View file

@ -47,7 +47,7 @@ class PRQL(Dialect):
"DERIVE": lambda self, query: self._parse_selection(query), "DERIVE": lambda self, query: self._parse_selection(query),
"SELECT": lambda self, query: self._parse_selection(query, append=False), "SELECT": lambda self, query: self._parse_selection(query, append=False),
"TAKE": lambda self, query: self._parse_take(query), "TAKE": lambda self, query: self._parse_take(query),
"FILTER": lambda self, query: query.where(self._parse_assignment()), "FILTER": lambda self, query: query.where(self._parse_disjunction()),
"APPEND": lambda self, query: query.union( "APPEND": lambda self, query: query.union(
_select_all(self._parse_table()), distinct=False, copy=False _select_all(self._parse_table()), distinct=False, copy=False
), ),

View file

@ -347,14 +347,3 @@ class Spark2(Hive):
return self.func("TO_JSON", arg) return self.func("TO_JSON", arg)
return super(Hive.Generator, self).cast_sql(expression, safe_prefix=safe_prefix) return super(Hive.Generator, self).cast_sql(expression, safe_prefix=safe_prefix)
def columndef_sql(self, expression: exp.ColumnDef, sep: str = " ") -> str:
return super().columndef_sql(
expression,
sep=(
": "
if isinstance(expression.parent, exp.DataType)
and expression.parent.is_type("struct")
else sep
),
)

View file

@ -94,6 +94,8 @@ class SQLite(Dialect):
IDENTIFIERS = ['"', ("[", "]"), "`"] IDENTIFIERS = ['"', ("[", "]"), "`"]
HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", ""), ("0X", "")] HEX_STRINGS = [("x'", "'"), ("X'", "'"), ("0x", ""), ("0X", "")]
NESTED_COMMENTS = False
KEYWORDS = tokens.Tokenizer.KEYWORDS.copy() KEYWORDS = tokens.Tokenizer.KEYWORDS.copy()
KEYWORDS.pop("/*+") KEYWORDS.pop("/*+")

View file

@ -102,6 +102,24 @@ OPTIONS: parser.OPTIONS_TYPE = {
"USE": ("PLAN",), "USE": ("PLAN",),
} }
XML_OPTIONS: parser.OPTIONS_TYPE = {
**dict.fromkeys(
(
"AUTO",
"EXPLICIT",
"TYPE",
),
tuple(),
),
"ELEMENTS": (
"XSINIL",
"ABSENT",
),
"BINARY": ("BASE64",),
}
OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL")
@ -390,6 +408,7 @@ class TSQL(Dialect):
TYPED_DIVISION = True TYPED_DIVISION = True
CONCAT_COALESCE = True CONCAT_COALESCE = True
NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE
ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False
TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'"
@ -474,6 +493,7 @@ class TSQL(Dialect):
"114": "%H:%M:%S:%f", "114": "%H:%M:%S:%f",
"120": "%Y-%m-%d %H:%M:%S", "120": "%Y-%m-%d %H:%M:%S",
"121": "%Y-%m-%d %H:%M:%S.%f", "121": "%Y-%m-%d %H:%M:%S.%f",
"126": "%Y-%m-%dT%H:%M:%S.%f",
} }
FORMAT_TIME_MAPPING = { FORMAT_TIME_MAPPING = {
@ -540,13 +560,13 @@ class TSQL(Dialect):
class Parser(parser.Parser): class Parser(parser.Parser):
SET_REQUIRES_ASSIGNMENT_DELIMITER = False SET_REQUIRES_ASSIGNMENT_DELIMITER = False
LOG_DEFAULTS_TO_LN = True LOG_DEFAULTS_TO_LN = True
ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False
STRING_ALIASES = True STRING_ALIASES = True
NO_PAREN_IF_COMMANDS = False NO_PAREN_IF_COMMANDS = False
QUERY_MODIFIER_PARSERS = { QUERY_MODIFIER_PARSERS = {
**parser.Parser.QUERY_MODIFIER_PARSERS, **parser.Parser.QUERY_MODIFIER_PARSERS,
TokenType.OPTION: lambda self: ("options", self._parse_options()), TokenType.OPTION: lambda self: ("options", self._parse_options()),
TokenType.FOR: lambda self: ("for", self._parse_for()),
} }
# T-SQL does not allow BEGIN to be used as an identifier # T-SQL does not allow BEGIN to be used as an identifier
@ -637,6 +657,9 @@ class TSQL(Dialect):
else self.expression(exp.ScopeResolution, this=this, expression=to), else self.expression(exp.ScopeResolution, this=this, expression=to),
} }
def _parse_alter_table_set(self) -> exp.AlterSet:
return self._parse_wrapped(super()._parse_alter_table_set)
def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]: def _parse_wrapped_select(self, table: bool = False) -> t.Optional[exp.Expression]:
if self._match(TokenType.MERGE): if self._match(TokenType.MERGE):
comments = self._prev_comments comments = self._prev_comments
@ -670,6 +693,28 @@ class TSQL(Dialect):
return self._parse_wrapped_csv(_parse_option) return self._parse_wrapped_csv(_parse_option)
def _parse_xml_key_value_option(self) -> exp.XMLKeyValueOption:
this = self._parse_primary_or_var()
if self._match(TokenType.L_PAREN, advance=False):
expression = self._parse_wrapped(self._parse_string)
else:
expression = None
return exp.XMLKeyValueOption(this=this, expression=expression)
def _parse_for(self) -> t.Optional[t.List[exp.Expression]]:
if not self._match_pair(TokenType.FOR, TokenType.XML):
return None
def _parse_for_xml() -> t.Optional[exp.Expression]:
return self.expression(
exp.QueryOption,
this=self._parse_var_from_options(XML_OPTIONS, raise_unmatched=False)
or self._parse_xml_key_value_option(),
)
return self._parse_csv(_parse_for_xml)
def _parse_projections(self) -> t.List[exp.Expression]: def _parse_projections(self) -> t.List[exp.Expression]:
""" """
T-SQL supports the syntax alias = expression in the SELECT's projection list, T-SQL supports the syntax alias = expression in the SELECT's projection list,
@ -921,6 +966,7 @@ class TSQL(Dialect):
COPY_PARAMS_EQ_REQUIRED = True COPY_PARAMS_EQ_REQUIRED = True
PARSE_JSON_NAME = None PARSE_JSON_NAME = None
EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False
ALTER_SET_WRAPPED = True
ALTER_SET_TYPE = "" ALTER_SET_TYPE = ""
EXPRESSIONS_WITHOUT_NESTED_CTES = { EXPRESSIONS_WITHOUT_NESTED_CTES = {

View file

@ -1243,6 +1243,45 @@ class Query(Expression):
""" """
raise NotImplementedError("Query objects must implement `select`") raise NotImplementedError("Query objects must implement `select`")
def where(
self: Q,
*expressions: t.Optional[ExpOrStr],
append: bool = True,
dialect: DialectType = None,
copy: bool = True,
**opts,
) -> Q:
"""
Append to or set the WHERE expressions.
Examples:
>>> Select().select("x").from_("tbl").where("x = 'a' OR x < 'b'").sql()
"SELECT x FROM tbl WHERE x = 'a' OR x < 'b'"
Args:
*expressions: the SQL code strings to parse.
If an `Expression` instance is passed, it will be used as-is.
Multiple expressions are combined with an AND operator.
append: if `True`, AND the new expressions to any existing expression.
Otherwise, this resets the expression.
dialect: the dialect used to parse the input expressions.
copy: if `False`, modify this expression instance in-place.
opts: other options to use to parse the input expressions.
Returns:
The modified expression.
"""
return _apply_conjunction_builder(
*[expr.this if isinstance(expr, Where) else expr for expr in expressions],
instance=self,
arg="where",
append=append,
into=Where,
dialect=dialect,
copy=copy,
**opts,
)
def with_( def with_(
self: Q, self: Q,
alias: ExpOrStr, alias: ExpOrStr,
@ -4054,45 +4093,6 @@ class Select(Query):
**opts, **opts,
) )
def where(
self,
*expressions: t.Optional[ExpOrStr],
append: bool = True,
dialect: DialectType = None,
copy: bool = True,
**opts,
) -> Select:
"""
Append to or set the WHERE expressions.
Example:
>>> Select().select("x").from_("tbl").where("x = 'a' OR x < 'b'").sql()
"SELECT x FROM tbl WHERE x = 'a' OR x < 'b'"
Args:
*expressions: the SQL code strings to parse.
If an `Expression` instance is passed, it will be used as-is.
Multiple expressions are combined with an AND operator.
append: if `True`, AND the new expressions to any existing expression.
Otherwise, this resets the expression.
dialect: the dialect used to parse the input expressions.
copy: if `False`, modify this expression instance in-place.
opts: other options to use to parse the input expressions.
Returns:
Select: the modified expression.
"""
return _apply_conjunction_builder(
*expressions,
instance=self,
arg="where",
append=append,
into=Where,
dialect=dialect,
copy=copy,
**opts,
)
def having( def having(
self, self,
*expressions: t.Optional[ExpOrStr], *expressions: t.Optional[ExpOrStr],
@ -5370,6 +5370,10 @@ class AggFunc(Func):
pass pass
class ArrayRemove(Func):
arg_types = {"this": True, "expression": True}
class ParameterizedAgg(AggFunc): class ParameterizedAgg(AggFunc):
arg_types = {"this": True, "expressions": True, "params": True} arg_types = {"this": True, "expressions": True, "params": True}
@ -5573,7 +5577,7 @@ class String(Func):
class StringToArray(Func): class StringToArray(Func):
arg_types = {"this": True, "expression": True, "null": False} arg_types = {"this": True, "expression": True, "null": False}
_sql_names = ["STRING_TO_ARRAY", "SPLIT_BY_STRING"] _sql_names = ["STRING_TO_ARRAY", "SPLIT_BY_STRING", "STRTOK_TO_ARRAY"]
class ArrayOverlaps(Binary, Func): class ArrayOverlaps(Binary, Func):
@ -6972,6 +6976,11 @@ class XMLNamespace(Expression):
pass pass
# https://learn.microsoft.com/en-us/sql/t-sql/queries/select-for-clause-transact-sql?view=sql-server-ver17#syntax
class XMLKeyValueOption(Expression):
arg_types = {"this": True, "expression": False}
class Year(Func): class Year(Func):
pass pass
@ -7918,7 +7927,7 @@ def parse_identifier(name: str | Identifier, dialect: DialectType = None) -> Ide
return expression return expression
INTERVAL_STRING_RE = re.compile(r"\s*(-?[0-9]+)\s*([a-zA-Z]+)\s*") INTERVAL_STRING_RE = re.compile(r"\s*(-?[0-9]+(?:\.[0-9]+)?)\s*([a-zA-Z]+)\s*")
def to_interval(interval: str | Literal) -> Interval: def to_interval(interval: str | Literal) -> Interval:

View file

@ -460,6 +460,9 @@ class Generator(metaclass=_Generator):
# Whether UNIX_SECONDS(timestamp) is supported # Whether UNIX_SECONDS(timestamp) is supported
SUPPORTS_UNIX_SECONDS = False SUPPORTS_UNIX_SECONDS = False
# Whether to wrap <props> in `AlterSet`, e.g., ALTER ... SET (<props>)
ALTER_SET_WRAPPED = False
# The name to generate for the JSONPath expression. If `None`, only `this` will be generated # The name to generate for the JSONPath expression. If `None`, only `this` will be generated
PARSE_JSON_NAME: t.Optional[str] = "PARSE_JSON" PARSE_JSON_NAME: t.Optional[str] = "PARSE_JSON"
@ -808,9 +811,14 @@ class Generator(metaclass=_Generator):
def seg(self, sql: str, sep: str = " ") -> str: def seg(self, sql: str, sep: str = " ") -> str:
return f"{self.sep(sep)}{sql}" return f"{self.sep(sep)}{sql}"
def pad_comment(self, comment: str) -> str: def sanitize_comment(self, comment: str) -> str:
comment = " " + comment if comment[0].strip() else comment comment = " " + comment if comment[0].strip() else comment
comment = comment + " " if comment[-1].strip() else comment comment = comment + " " if comment[-1].strip() else comment
if not self.dialect.tokenizer_class.NESTED_COMMENTS:
# Necessary workaround to avoid syntax errors due to nesting: /* ... */ ... */
comment = comment.replace("*/", "* /")
return comment return comment
def maybe_comment( def maybe_comment(
@ -830,7 +838,7 @@ class Generator(metaclass=_Generator):
return sql return sql
comments_sql = " ".join( comments_sql = " ".join(
f"/*{self.pad_comment(comment)}*/" for comment in comments if comment f"/*{self.sanitize_comment(comment)}*/" for comment in comments if comment
) )
if not comments_sql: if not comments_sql:
@ -2596,6 +2604,7 @@ class Generator(metaclass=_Generator):
*self.offset_limit_modifiers(expression, isinstance(limit, exp.Fetch), limit), *self.offset_limit_modifiers(expression, isinstance(limit, exp.Fetch), limit),
*self.after_limit_modifiers(expression), *self.after_limit_modifiers(expression),
self.options_modifier(expression), self.options_modifier(expression),
self.for_modifiers(expression),
sep="", sep="",
) )
@ -2603,6 +2612,10 @@ class Generator(metaclass=_Generator):
options = self.expressions(expression, key="options") options = self.expressions(expression, key="options")
return f" {options}" if options else "" return f" {options}" if options else ""
def for_modifiers(self, expression: exp.Expression) -> str:
for_modifiers = self.expressions(expression, key="for")
return f"{self.sep()}FOR XML{self.seg(for_modifiers)}" if for_modifiers else ""
def queryoption_sql(self, expression: exp.QueryOption) -> str: def queryoption_sql(self, expression: exp.QueryOption) -> str:
self.unsupported("Unsupported query option.") self.unsupported("Unsupported query option.")
return "" return ""
@ -3248,7 +3261,7 @@ class Generator(metaclass=_Generator):
if expression.comments and self.comments: if expression.comments and self.comments:
for comment in expression.comments: for comment in expression.comments:
if comment: if comment:
op += f" /*{self.pad_comment(comment)}*/" op += f" /*{self.sanitize_comment(comment)}*/"
stack.extend((op, expression.left)) stack.extend((op, expression.left))
return op return op
@ -3430,21 +3443,32 @@ class Generator(metaclass=_Generator):
def alterset_sql(self, expression: exp.AlterSet) -> str: def alterset_sql(self, expression: exp.AlterSet) -> str:
exprs = self.expressions(expression, flat=True) exprs = self.expressions(expression, flat=True)
if self.ALTER_SET_WRAPPED:
exprs = f"({exprs})"
return f"SET {exprs}" return f"SET {exprs}"
def alter_sql(self, expression: exp.Alter) -> str: def alter_sql(self, expression: exp.Alter) -> str:
actions = expression.args["actions"] actions = expression.args["actions"]
if isinstance(actions[0], exp.ColumnDef): if not self.dialect.ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN and isinstance(
actions = self.add_column_sql(expression) actions[0], exp.ColumnDef
elif isinstance(actions[0], exp.Schema): ):
actions = self.expressions(expression, key="actions", prefix="ADD COLUMNS ") actions_sql = self.expressions(expression, key="actions", flat=True)
elif isinstance(actions[0], exp.Delete): actions_sql = f"ADD {actions_sql}"
actions = self.expressions(expression, key="actions", flat=True)
elif isinstance(actions[0], exp.Query):
actions = "AS " + self.expressions(expression, key="actions")
else: else:
actions = self.expressions(expression, key="actions", flat=True) actions_list = []
for action in actions:
if isinstance(action, (exp.ColumnDef, exp.Schema)):
action_sql = self.add_column_sql(action)
else:
action_sql = self.sql(action)
if isinstance(action, exp.Query):
action_sql = f"AS {action_sql}"
actions_list.append(action_sql)
actions_sql = self.format_args(*actions_list)
exists = " IF EXISTS" if expression.args.get("exists") else "" exists = " IF EXISTS" if expression.args.get("exists") else ""
on_cluster = self.sql(expression, "cluster") on_cluster = self.sql(expression, "cluster")
@ -3455,17 +3479,18 @@ class Generator(metaclass=_Generator):
kind = self.sql(expression, "kind") kind = self.sql(expression, "kind")
not_valid = " NOT VALID" if expression.args.get("not_valid") else "" not_valid = " NOT VALID" if expression.args.get("not_valid") else ""
return f"ALTER {kind}{exists}{only} {self.sql(expression, 'this')}{on_cluster} {actions}{not_valid}{options}" return f"ALTER {kind}{exists}{only} {self.sql(expression, 'this')}{on_cluster} {actions_sql}{not_valid}{options}"
def add_column_sql(self, expression: exp.Alter) -> str: def add_column_sql(self, expression: exp.Expression) -> str:
if self.ALTER_TABLE_INCLUDE_COLUMN_KEYWORD: sql = self.sql(expression)
return self.expressions( if isinstance(expression, exp.Schema):
expression, column_text = " COLUMNS"
key="actions", elif isinstance(expression, exp.ColumnDef) and self.ALTER_TABLE_INCLUDE_COLUMN_KEYWORD:
prefix="ADD COLUMN ", column_text = " COLUMN"
skip_first=True, else:
) column_text = ""
return f"ADD {self.expressions(expression, key='actions', flat=True)}"
return f"ADD{column_text} {sql}"
def droppartition_sql(self, expression: exp.DropPartition) -> str: def droppartition_sql(self, expression: exp.DropPartition) -> str:
expressions = self.expressions(expression) expressions = self.expressions(expression)
@ -4795,6 +4820,12 @@ class Generator(metaclass=_Generator):
name = f"NAME {self.sql(expression, 'this')}" name = f"NAME {self.sql(expression, 'this')}"
return self.func("XMLELEMENT", name, *expression.expressions) return self.func("XMLELEMENT", name, *expression.expressions)
def xmlkeyvalueoption_sql(self, expression: exp.XMLKeyValueOption) -> str:
this = self.sql(expression, "this")
expr = self.sql(expression, "expression")
expr = f"({expr})" if expr else ""
return f"{this}{expr}"
def partitionbyrangeproperty_sql(self, expression: exp.PartitionByRangeProperty) -> str: def partitionbyrangeproperty_sql(self, expression: exp.PartitionByRangeProperty) -> str:
partitions = self.expressions(expression, "partition_expressions") partitions = self.expressions(expression, "partition_expressions")
create = self.expressions(expression, "create_expressions") create = self.expressions(expression, "create_expressions")

View file

@ -69,7 +69,7 @@ def pushdown_projections(
# kind / side syntax (e.g INNER UNION ALL BY NAME) which changes the semantics of the operation # kind / side syntax (e.g INNER UNION ALL BY NAME) which changes the semantics of the operation
left, right = scope.union_scopes left, right = scope.union_scopes
if len(left.expression.selects) != len(right.expression.selects): if len(left.expression.selects) != len(right.expression.selects):
scope_sql = scope.expression.sql() scope_sql = scope.expression.sql(dialect=dialect)
raise OptimizeError( raise OptimizeError(
f"Invalid set operation due to column mismatch: {scope_sql}." f"Invalid set operation due to column mismatch: {scope_sql}."
) )

View file

@ -930,6 +930,14 @@ class Parser(metaclass=_Parser):
TokenType.FOR: lambda self, this: self._parse_comprehension(this), TokenType.FOR: lambda self, this: self._parse_comprehension(this),
} }
PIPE_SYNTAX_TRANSFORM_PARSERS = {
"SELECT": lambda self, query: self._parse_pipe_syntax_select(query),
"WHERE": lambda self, query: self._parse_pipe_syntax_where(query),
"ORDER BY": lambda self, query: query.order_by(self._parse_order(), copy=False),
"LIMIT": lambda self, query: self._parse_pipe_syntax_limit(query),
"OFFSET": lambda self, query: query.offset(self._parse_offset(), copy=False),
}
PROPERTY_PARSERS: t.Dict[str, t.Callable] = { PROPERTY_PARSERS: t.Dict[str, t.Callable] = {
"ALLOWED_VALUES": lambda self: self.expression( "ALLOWED_VALUES": lambda self: self.expression(
exp.AllowedValuesProperty, expressions=self._parse_csv(self._parse_primary) exp.AllowedValuesProperty, expressions=self._parse_csv(self._parse_primary)
@ -1116,6 +1124,25 @@ class Parser(metaclass=_Parser):
"TRUNCATE": lambda self: self._parse_partitioned_by_bucket_or_truncate(), "TRUNCATE": lambda self: self._parse_partitioned_by_bucket_or_truncate(),
} }
def _parse_pipe_syntax_select(self, query: exp.Query) -> exp.Query:
select = self._parse_select()
if isinstance(select, exp.Select):
return select.from_(query.subquery(copy=False), copy=False)
return query
def _parse_pipe_syntax_where(self, query: exp.Query) -> exp.Query:
where = self._parse_where()
return query.where(where, copy=False)
def _parse_pipe_syntax_limit(self, query: exp.Query) -> exp.Query:
limit = self._parse_limit()
offset = self._parse_offset()
if limit:
query.limit(limit, copy=False)
if offset:
query.offset(offset, copy=False)
return query
def _parse_partitioned_by_bucket_or_truncate(self) -> exp.Expression: def _parse_partitioned_by_bucket_or_truncate(self) -> exp.Expression:
klass = ( klass = (
exp.PartitionedByBucket exp.PartitionedByBucket
@ -1449,9 +1476,6 @@ class Parser(metaclass=_Parser):
LOG_DEFAULTS_TO_LN = False LOG_DEFAULTS_TO_LN = False
# Whether ADD is present for each column added by ALTER TABLE
ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = True
# Whether the table sample clause expects CSV syntax # Whether the table sample clause expects CSV syntax
TABLESAMPLE_CSV = False TABLESAMPLE_CSV = False
@ -3239,6 +3263,8 @@ class Parser(metaclass=_Parser):
this = self._parse_derived_table_values() this = self._parse_derived_table_values()
elif from_: elif from_:
this = exp.select("*").from_(from_.this, copy=False) this = exp.select("*").from_(from_.this, copy=False)
if self._match(TokenType.PIPE_GT, advance=False):
return self._parse_pipe_syntax_query(this)
elif self._match(TokenType.SUMMARIZE): elif self._match(TokenType.SUMMARIZE):
table = self._match(TokenType.TABLE) table = self._match(TokenType.TABLE)
this = self._parse_select() or self._parse_string() or self._parse_table() this = self._parse_select() or self._parse_string() or self._parse_table()
@ -7137,6 +7163,16 @@ class Parser(metaclass=_Parser):
return this return this
def _parse_pipe_syntax_query(self, query: exp.Select) -> exp.Query:
while self._match(TokenType.PIPE_GT):
parser = self.PIPE_SYNTAX_TRANSFORM_PARSERS.get(self._curr.text.upper())
if not parser:
self.raise_error(f"Unsupported pipe syntax operator: '{self._curr.text.upper()}'.")
else:
query = parser(self, query)
return query
def _parse_wrapped_id_vars(self, optional: bool = False) -> t.List[exp.Expression]: def _parse_wrapped_id_vars(self, optional: bool = False) -> t.List[exp.Expression]:
return self._parse_wrapped_csv(self._parse_id_var, optional=optional) return self._parse_wrapped_csv(self._parse_id_var, optional=optional)
@ -7216,7 +7252,7 @@ class Parser(metaclass=_Parser):
return self.expression(exp.Refresh, this=self._parse_string() or self._parse_table()) return self.expression(exp.Refresh, this=self._parse_string() or self._parse_table())
def _parse_add_column(self) -> t.Optional[exp.Expression]: def _parse_add_column(self) -> t.Optional[exp.Expression]:
if not self._match_text_seq("ADD"): if not self._prev.text.upper() == "ADD":
return None return None
self._match(TokenType.COLUMN) self._match(TokenType.COLUMN)
@ -7249,26 +7285,22 @@ class Parser(metaclass=_Parser):
) )
def _parse_alter_table_add(self) -> t.List[exp.Expression]: def _parse_alter_table_add(self) -> t.List[exp.Expression]:
index = self._index - 1 def _parse_add_column_or_constraint():
self._match_text_seq("ADD")
if self._match_set(self.ADD_CONSTRAINT_TOKENS, advance=False): if self._match_set(self.ADD_CONSTRAINT_TOKENS, advance=False):
return self._parse_csv( return self.expression(
lambda: self.expression(
exp.AddConstraint, expressions=self._parse_csv(self._parse_constraint) exp.AddConstraint, expressions=self._parse_csv(self._parse_constraint)
) )
) return self._parse_add_column()
self._retreat(index) if not self.dialect.ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN or self._match_text_seq(
if not self.ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN and self._match_text_seq("ADD"): "COLUMNS"
return self._parse_wrapped_csv(self._parse_field_def, optional=True) ):
if self._match_text_seq("ADD", "COLUMNS"):
schema = self._parse_schema() schema = self._parse_schema()
if schema:
return [schema]
return []
return self._parse_wrapped_csv(self._parse_add_column, optional=True) return ensure_list(schema) if schema else self._parse_csv(self._parse_field_def)
return self._parse_csv(_parse_add_column_or_constraint)
def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]: def _parse_alter_table_alter(self) -> t.Optional[exp.Expression]:
if self._match_texts(self.ALTER_ALTER_PARSERS): if self._match_texts(self.ALTER_ALTER_PARSERS):
@ -7391,7 +7423,8 @@ class Parser(metaclass=_Parser):
if self._match_text_seq("SERDE"): if self._match_text_seq("SERDE"):
alter_set.set("serde", self._parse_field()) alter_set.set("serde", self._parse_field())
alter_set.set("expressions", [self._parse_properties()]) properties = self._parse_wrapped(self._parse_properties, optional=True)
alter_set.set("expressions", [properties])
return alter_set return alter_set
@ -8180,6 +8213,8 @@ class Parser(metaclass=_Parser):
) )
def _parse_star_ops(self) -> t.Optional[exp.Expression]: def _parse_star_ops(self) -> t.Optional[exp.Expression]:
star_token = self._prev
if self._match_text_seq("COLUMNS", "(", advance=False): if self._match_text_seq("COLUMNS", "(", advance=False):
this = self._parse_function() this = self._parse_function()
if isinstance(this, exp.Columns): if isinstance(this, exp.Columns):
@ -8193,7 +8228,7 @@ class Parser(metaclass=_Parser):
"replace": self._parse_star_op("REPLACE"), "replace": self._parse_star_op("REPLACE"),
"rename": self._parse_star_op("RENAME"), "rename": self._parse_star_op("RENAME"),
}, },
) ).update_positions(star_token)
def _parse_grant_privilege(self) -> t.Optional[exp.GrantPrivilege]: def _parse_grant_privilege(self) -> t.Optional[exp.GrantPrivilege]:
privilege_parts = [] privilege_parts = []

View file

@ -57,6 +57,7 @@ class TokenType(AutoName):
OR = auto() OR = auto()
AMP = auto() AMP = auto()
DPIPE = auto() DPIPE = auto()
PIPE_GT = auto()
PIPE = auto() PIPE = auto()
PIPE_SLASH = auto() PIPE_SLASH = auto()
DPIPE_SLASH = auto() DPIPE_SLASH = auto()
@ -680,6 +681,7 @@ class Tokenizer(metaclass=_Tokenizer):
"==": TokenType.EQ, "==": TokenType.EQ,
"::": TokenType.DCOLON, "::": TokenType.DCOLON,
"||": TokenType.DPIPE, "||": TokenType.DPIPE,
"|>": TokenType.PIPE_GT,
">=": TokenType.GTE, ">=": TokenType.GTE,
"<=": TokenType.LTE, "<=": TokenType.LTE,
"<>": TokenType.NEQ, "<>": TokenType.NEQ,
@ -1505,10 +1507,14 @@ class Tokenizer(metaclass=_Tokenizer):
if not self._RS_TOKENIZER: if not self._RS_TOKENIZER:
raise SqlglotError("Rust tokenizer is not available") raise SqlglotError("Rust tokenizer is not available")
try: tokens, error_msg = self._RS_TOKENIZER.tokenize(sql, self._rs_dialect_settings)
tokens = self._RS_TOKENIZER.tokenize(sql, self._rs_dialect_settings) for token in tokens:
for token in tokens: token.token_type = _ALL_TOKEN_TYPES[token.token_type_index]
token.token_type = _ALL_TOKEN_TYPES[token.token_type_index]
return tokens # Setting this here so partial token lists can be inspected even if there is a failure
except Exception as e: self.tokens = tokens
raise TokenError(str(e))
if error_msg is not None:
raise TokenError(error_msg)
return tokens

2
sqlglotrs/Cargo.lock generated
View file

@ -502,7 +502,7 @@ dependencies = [
[[package]] [[package]]
name = "sqlglotrs" name = "sqlglotrs"
version = "0.5.0" version = "0.6.1"
dependencies = [ dependencies = [
"criterion", "criterion",
"pyo3", "pyo3",

View file

@ -1,6 +1,6 @@
[package] [package]
name = "sqlglotrs" name = "sqlglotrs"
version = "0.5.0" version = "0.6.1"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"

View file

@ -1,7 +1,6 @@
use crate::settings::TokenType; use crate::settings::TokenType;
use crate::trie::{Trie, TrieResult}; use crate::trie::{Trie, TrieResult};
use crate::{Token, TokenTypeSettings, TokenizerDialectSettings, TokenizerSettings}; use crate::{Token, TokenTypeSettings, TokenizerDialectSettings, TokenizerSettings};
use pyo3::exceptions::PyException;
use pyo3::prelude::*; use pyo3::prelude::*;
use std::cmp::{max, min}; use std::cmp::{max, min};
@ -45,7 +44,7 @@ impl Tokenizer {
&self, &self,
sql: &str, sql: &str,
dialect_settings: &TokenizerDialectSettings, dialect_settings: &TokenizerDialectSettings,
) -> Result<Vec<Token>, PyErr> { ) -> (Vec<Token>, Option<String>) {
let mut state = TokenizerState::new( let mut state = TokenizerState::new(
sql, sql,
&self.settings, &self.settings,
@ -53,9 +52,14 @@ impl Tokenizer {
dialect_settings, dialect_settings,
&self.keyword_trie, &self.keyword_trie,
); );
state.tokenize().map_err(|e| { let tokenize_result = state.tokenize();
PyException::new_err(format!("Error tokenizing '{}': {}", e.context, e.message)) match tokenize_result {
}) Ok(tokens) => (tokens, None),
Err(e) => {
let msg = format!("Error tokenizing '{}': {}", e.context, e.message);
(state.tokens, Some(msg))
}
}
} }
} }

View file

@ -1234,7 +1234,7 @@ LANGUAGE js AS
"bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRING>)", "bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRING>)",
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))", "duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))", "presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRING>)", "hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)", "spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
}, },
) )
@ -1244,7 +1244,7 @@ LANGUAGE js AS
"bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)", "bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a BIGINT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))", "duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a BIGINT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a BIGINT, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))", "presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a BIGINT, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a BIGINT, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)", "hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: BIGINT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: BIGINT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)", "spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: BIGINT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
}, },
) )
@ -2616,3 +2616,14 @@ OPTIONS (
"snowflake": "WITH t1 AS (SELECT (SELECT ARRAY_AGG(OBJECT_CONSTRUCT('alias_x1', x1, 'x2', x2 /* test */)) FROM t2 WHERE x2 = 4) AS array_col) SELECT array_col[0].alias_x1, array_col[0].x2 FROM t1", "snowflake": "WITH t1 AS (SELECT (SELECT ARRAY_AGG(OBJECT_CONSTRUCT('alias_x1', x1, 'x2', x2 /* test */)) FROM t2 WHERE x2 = 4) AS array_col) SELECT array_col[0].alias_x1, array_col[0].x2 FROM t1",
}, },
) )
def test_avoid_generating_nested_comment(self):
sql = """
select
id,
foo,
-- bar, /* the thing */
from facts
"""
expected = "SELECT\n id,\n foo\n/* bar, /* the thing * / */\nFROM facts"
self.assertEqual(self.parse_one(sql).sql("bigquery", pretty=True), expected)

View file

@ -2191,6 +2191,21 @@ class TestDialect(Validator):
"bigquery": "MOD(a, b + 1)", "bigquery": "MOD(a, b + 1)",
}, },
) )
self.validate_all(
"ARRAY_REMOVE(the_array, target)",
write={
"": "ARRAY_REMOVE(the_array, target)",
"clickhouse": "arrayFilter(_u -> _u <> target, the_array)",
"duckdb": "LIST_FILTER(the_array, _u -> _u <> target)",
"bigquery": "ARRAY(SELECT _u FROM UNNEST(the_array) AS _u WHERE _u <> target)",
"hive": "ARRAY_REMOVE(the_array, target)",
"postgres": "ARRAY_REMOVE(the_array, target)",
"presto": "ARRAY_REMOVE(the_array, target)",
"starrocks": "ARRAY_REMOVE(the_array, target)",
"databricks": "ARRAY_REMOVE(the_array, target)",
"snowflake": "ARRAY_REMOVE(the_array, target)",
},
)
def test_typeddiv(self): def test_typeddiv(self):
typed_div = exp.Div(this=exp.column("a"), expression=exp.column("b"), typed=True) typed_div = exp.Div(this=exp.column("a"), expression=exp.column("b"), typed=True)
@ -3453,3 +3468,63 @@ FROM subquery2""",
parse_one("SELECT 0xCC", read=read_dialect).sql(other_integer_dialects), parse_one("SELECT 0xCC", read=read_dialect).sql(other_integer_dialects),
"SELECT 0xCC", "SELECT 0xCC",
) )
def test_pipe_syntax(self):
self.validate_identity("FROM x", "SELECT * FROM x")
self.validate_identity("FROM x |> SELECT x1, x2", "SELECT x1, x2 FROM (SELECT * FROM x)")
self.validate_identity(
"FROM x |> SELECT x1 as c1, x2 as c2",
"SELECT x1 AS c1, x2 AS c2 FROM (SELECT * FROM x)",
)
self.validate_identity(
"FROM x |> SELECT x1 + 1 as x1_a, x2 - 1 as x2_a |> WHERE x1_a > 1",
"SELECT x1 + 1 AS x1_a, x2 - 1 AS x2_a FROM (SELECT * FROM x) WHERE x1_a > 1",
)
self.validate_identity(
"FROM x |> SELECT x1 + 1 as x1_a, x2 - 1 as x2_a |> WHERE x1_a > 1 |> SELECT x2_a",
"SELECT x2_a FROM (SELECT x1 + 1 AS x1_a, x2 - 1 AS x2_a FROM (SELECT * FROM x) WHERE x1_a > 1)",
)
self.validate_identity(
"FROM x |> WHERE x1 > 0 OR x2 > 0 |> WHERE x3 > 1 AND x4 > 1 |> SELECT x1, x4",
"SELECT x1, x4 FROM (SELECT * FROM x WHERE (x1 > 0 OR x2 > 0) AND (x3 > 1 AND x4 > 1))",
)
self.validate_identity(
"FROM x |> WHERE x1 > 1 |> WHERE x2 > 2 |> SELECT x1 as gt1, x2 as gt2",
"SELECT x1 AS gt1, x2 AS gt2 FROM (SELECT * FROM x WHERE x1 > 1 AND x2 > 2)",
)
self.validate_identity(
"FROM x |> WHERE x1 > 1 AND x2 > 2 |> SELECT x1 as gt1, x2 as gt2 |> SELECT gt1 * 2 + gt2 * 2 AS gt2_2",
"SELECT gt1 * 2 + gt2 * 2 AS gt2_2 FROM (SELECT x1 AS gt1, x2 AS gt2 FROM (SELECT * FROM x WHERE x1 > 1 AND x2 > 2))",
)
self.validate_identity("FROM x |> ORDER BY x1", "SELECT * FROM x ORDER BY x1")
self.validate_identity(
"FROM x |> ORDER BY x1 |> ORDER BY x2", "SELECT * FROM x ORDER BY x1, x2"
)
self.validate_identity(
"FROM x |> ORDER BY x1 |> WHERE x1 > 0 OR x1 != 1 |> ORDER BY x2 |> WHERE x2 > 0 AND x2 != 1 |> SELECT x1, x2",
"SELECT x1, x2 FROM (SELECT * FROM x WHERE (x1 > 0 OR x1 <> 1) AND (x2 > 0 AND x2 <> 1) ORDER BY x1, x2)",
)
self.validate_identity(
"FROM x |> ORDER BY x1 |> WHERE x1 > 0 |> SELECT x1",
"SELECT x1 FROM (SELECT * FROM x WHERE x1 > 0 ORDER BY x1)",
)
self.validate_identity(
"FROM x |> WHERE x1 > 0 |> SELECT x1 |> ORDER BY x1",
"SELECT x1 FROM (SELECT * FROM x WHERE x1 > 0) ORDER BY x1",
)
self.validate_identity(
"FROM x |> SELECT x1, x2, x3 |> ORDER BY x1 DESC NULLS FIRST, x2 ASC NULLS LAST, x3",
"SELECT x1, x2, x3 FROM (SELECT * FROM x) ORDER BY x1 DESC NULLS FIRST, x2 ASC NULLS LAST, x3",
)
for option in ("LIMIT 1", "OFFSET 2", "LIMIT 1 OFFSET 2"):
with self.subTest(f"Testing pipe syntax LIMIT and OFFSET option: {option}"):
self.validate_identity(f"FROM x |> {option}", f"SELECT * FROM x {option}")
self.validate_identity(f"FROM x |> {option}", f"SELECT * FROM x {option}")
self.validate_identity(
f"FROM x |> {option} |> SELECT x1, x2 |> WHERE x1 > 0 |> WHERE x2 > 0 |> ORDER BY x1, x2 ",
f"SELECT x1, x2 FROM (SELECT * FROM x {option}) WHERE x1 > 0 AND x2 > 0 ORDER BY x1, x2",
)
self.validate_identity(
f"FROM x |> SELECT x1, x2 |> WHERE x1 > 0 |> WHERE x2 > 0 |> ORDER BY x1, x2 |> {option}",
f"SELECT x1, x2 FROM (SELECT * FROM x) WHERE x1 > 0 AND x2 > 0 ORDER BY x1, x2 {option}",
)

View file

@ -568,6 +568,9 @@ class TestDuckDB(Validator):
) )
self.validate_all( self.validate_all(
"STRING_TO_ARRAY(x, 'a')", "STRING_TO_ARRAY(x, 'a')",
read={
"snowflake": "STRTOK_TO_ARRAY(x, 'a')",
},
write={ write={
"duckdb": "STR_SPLIT(x, 'a')", "duckdb": "STR_SPLIT(x, 'a')",
"presto": "SPLIT(x, 'a')", "presto": "SPLIT(x, 'a')",

View file

@ -184,6 +184,28 @@ class TestHive(Validator):
self.validate_identity( self.validate_identity(
"ALTER VIEW v1 UNSET TBLPROPERTIES ('tblp1', 'tblp2')", check_command_warning=True "ALTER VIEW v1 UNSET TBLPROPERTIES ('tblp1', 'tblp2')", check_command_warning=True
) )
self.validate_identity("CREATE TABLE foo (col STRUCT<struct_col_a: VARCHAR((50))>)")
self.validate_all(
"CREATE TABLE db.example_table (col_a struct<struct_col_a:int, struct_col_b:string>)",
write={
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
},
)
self.validate_all(
"CREATE TABLE db.example_table (col_a struct<struct_col_a:int, struct_col_b:struct<nested_col_a:string, nested_col_b:string>>)",
write={
"bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
},
)
def test_lateral_view(self): def test_lateral_view(self):
self.validate_all( self.validate_all(

View file

@ -111,7 +111,7 @@ class TestMySQL(Validator):
) )
self.validate_identity( self.validate_identity(
"CREATE TABLE test (ts TIMESTAMP, ts_tz TIMESTAMPTZ, ts_ltz TIMESTAMPLTZ)", "CREATE TABLE test (ts TIMESTAMP, ts_tz TIMESTAMPTZ, ts_ltz TIMESTAMPLTZ)",
"CREATE TABLE test (ts DATETIME, ts_tz TIMESTAMP, ts_ltz TIMESTAMP)", "CREATE TABLE test (ts TIMESTAMP, ts_tz TIMESTAMP, ts_ltz TIMESTAMP)",
) )
self.validate_identity( self.validate_identity(
"ALTER TABLE test_table ALTER COLUMN test_column SET DATA TYPE LONGTEXT", "ALTER TABLE test_table ALTER COLUMN test_column SET DATA TYPE LONGTEXT",
@ -298,7 +298,7 @@ class TestMySQL(Validator):
) )
self.validate_identity( self.validate_identity(
"CAST(x AS TIMESTAMP)", "CAST(x AS TIMESTAMP)",
"CAST(x AS DATETIME)", "TIMESTAMP(x)",
) )
self.validate_identity( self.validate_identity(
"CAST(x AS TIMESTAMPTZ)", "CAST(x AS TIMESTAMPTZ)",

View file

@ -8,19 +8,6 @@ class TestPostgres(Validator):
dialect = "postgres" dialect = "postgres"
def test_postgres(self): def test_postgres(self):
self.validate_all(
"x ? y",
write={
"": "JSONB_CONTAINS(x, y)",
"postgres": "x ? y",
},
)
self.validate_identity("SHA384(x)")
self.validate_identity("1.x", "1. AS x")
self.validate_identity("|/ x", "SQRT(x)")
self.validate_identity("||/ x", "CBRT(x)")
expr = self.parse_one("SELECT * FROM r CROSS JOIN LATERAL UNNEST(ARRAY[1]) AS s(location)") expr = self.parse_one("SELECT * FROM r CROSS JOIN LATERAL UNNEST(ARRAY[1]) AS s(location)")
unnest = expr.args["joins"][0].this.this unnest = expr.args["joins"][0].this.this
unnest.assert_is(exp.Unnest) unnest.assert_is(exp.Unnest)
@ -31,6 +18,14 @@ class TestPostgres(Validator):
self.assertIsInstance(expr, exp.Alter) self.assertIsInstance(expr, exp.Alter)
self.assertEqual(expr.sql(dialect="postgres"), alter_table_only) self.assertEqual(expr.sql(dialect="postgres"), alter_table_only)
sql = "ARRAY[x" + ",x" * 27 + "]"
expected_sql = "ARRAY[\n x" + (",\n x" * 27) + "\n]"
self.validate_identity(sql, expected_sql, pretty=True)
self.validate_identity("SHA384(x)")
self.validate_identity("1.x", "1. AS x")
self.validate_identity("|/ x", "SQRT(x)")
self.validate_identity("||/ x", "CBRT(x)")
self.validate_identity("SELECT EXTRACT(QUARTER FROM CAST('2025-04-26' AS DATE))") self.validate_identity("SELECT EXTRACT(QUARTER FROM CAST('2025-04-26' AS DATE))")
self.validate_identity("SELECT DATE_TRUNC('QUARTER', CAST('2025-04-26' AS DATE))") self.validate_identity("SELECT DATE_TRUNC('QUARTER', CAST('2025-04-26' AS DATE))")
self.validate_identity("STRING_TO_ARRAY('xx~^~yy~^~zz', '~^~', 'yy')") self.validate_identity("STRING_TO_ARRAY('xx~^~yy~^~zz', '~^~', 'yy')")
@ -79,6 +74,11 @@ class TestPostgres(Validator):
self.validate_identity("SELECT CURRENT_USER") self.validate_identity("SELECT CURRENT_USER")
self.validate_identity("SELECT * FROM ONLY t1") self.validate_identity("SELECT * FROM ONLY t1")
self.validate_identity("SELECT INTERVAL '-1 MONTH'") self.validate_identity("SELECT INTERVAL '-1 MONTH'")
self.validate_identity("SELECT INTERVAL '4.1 DAY'")
self.validate_identity("SELECT INTERVAL '3.14159 HOUR'")
self.validate_identity("SELECT INTERVAL '2.5 MONTH'")
self.validate_identity("SELECT INTERVAL '-10.75 MINUTE'")
self.validate_identity("SELECT INTERVAL '0.123456789 SECOND'")
self.validate_identity( self.validate_identity(
"SELECT * FROM test_data, LATERAL JSONB_ARRAY_ELEMENTS(data) WITH ORDINALITY AS elem(value, ordinality)" "SELECT * FROM test_data, LATERAL JSONB_ARRAY_ELEMENTS(data) WITH ORDINALITY AS elem(value, ordinality)"
) )
@ -374,6 +374,13 @@ FROM json_data, field_ids""",
pretty=True, pretty=True,
) )
self.validate_all(
"x ? y",
write={
"": "JSONB_CONTAINS(x, y)",
"postgres": "x ? y",
},
)
self.validate_all( self.validate_all(
"SELECT CURRENT_TIMESTAMP + INTERVAL '-3 MONTH'", "SELECT CURRENT_TIMESTAMP + INTERVAL '-3 MONTH'",
read={ read={
@ -1050,6 +1057,9 @@ FROM json_data, field_ids""",
self.validate_identity( self.validate_identity(
"CREATE UNLOGGED TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp" "CREATE UNLOGGED TABLE foo AS WITH t(c) AS (SELECT 1) SELECT * FROM (SELECT c AS c FROM t) AS temp"
) )
self.validate_identity(
"ALTER TABLE foo ADD COLUMN id BIGINT NOT NULL PRIMARY KEY DEFAULT 1, ADD CONSTRAINT fk_orders_user FOREIGN KEY (id) REFERENCES foo (id)"
)
self.validate_identity( self.validate_identity(
"CREATE TABLE t (col integer ARRAY[3])", "CREATE TABLE t (col integer ARRAY[3])",
"CREATE TABLE t (col INT[3])", "CREATE TABLE t (col INT[3])",

View file

@ -511,7 +511,7 @@ class TestPresto(Validator):
write={ write={
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))", "duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))", "presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRING>)", "hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)", "spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
}, },
) )
@ -520,7 +520,7 @@ class TestPresto(Validator):
write={ write={
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))", "duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))", "presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)", "hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)", "spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
}, },
) )

View file

@ -27,7 +27,7 @@ class TestSpark(Validator):
write={ write={
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))", "duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b TEXT))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))", "presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b VARCHAR))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRING>)", "hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)", "spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRING>)",
}, },
) )
@ -37,7 +37,7 @@ class TestSpark(Validator):
"bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)", "bigquery": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT64, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)",
"duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))", "duckdb": "CREATE TABLE db.example_table (col_a STRUCT(struct_col_a INT, struct_col_b STRUCT(nested_col_a TEXT, nested_col_b TEXT)))",
"presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))", "presto": "CREATE TABLE db.example_table (col_a ROW(struct_col_a INTEGER, struct_col_b ROW(nested_col_a VARCHAR, nested_col_b VARCHAR)))",
"hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a INT, struct_col_b STRUCT<nested_col_a STRING, nested_col_b STRING>>)", "hive": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
"spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)", "spark": "CREATE TABLE db.example_table (col_a STRUCT<struct_col_a: INT, struct_col_b: STRUCT<nested_col_a: STRING, nested_col_b: STRING>>)",
}, },
) )

View file

@ -62,6 +62,13 @@ class TestTSQL(Validator):
"SELECT 1 WHERE EXISTS(SELECT 1)", "SELECT 1 WHERE EXISTS(SELECT 1)",
) )
self.validate_all(
"SELECT CONVERT(DATETIME, '2006-04-25T15:50:59.997', 126)",
write={
"duckdb": "SELECT STRPTIME('2006-04-25T15:50:59.997', '%Y-%m-%dT%H:%M:%S.%f')",
"tsql": "SELECT CONVERT(DATETIME, '2006-04-25T15:50:59.997', 126)",
},
)
self.validate_all( self.validate_all(
"WITH A AS (SELECT 2 AS value), C AS (SELECT * FROM A) SELECT * INTO TEMP_NESTED_WITH FROM (SELECT * FROM C) AS temp", "WITH A AS (SELECT 2 AS value), C AS (SELECT * FROM A) SELECT * INTO TEMP_NESTED_WITH FROM (SELECT * FROM C) AS temp",
read={ read={
@ -569,6 +576,79 @@ class TestTSQL(Validator):
}, },
) )
def test_for_xml(self):
xml_possible_options = [
"RAW('ElementName')",
"RAW('ElementName'), BINARY BASE64",
"RAW('ElementName'), TYPE",
"RAW('ElementName'), ROOT('RootName')",
"RAW('ElementName'), BINARY BASE64, TYPE",
"RAW('ElementName'), BINARY BASE64, ROOT('RootName')",
"RAW('ElementName'), TYPE, ROOT('RootName')",
"RAW('ElementName'), BINARY BASE64, TYPE, ROOT('RootName')",
"RAW('ElementName'), XMLDATA",
"RAW('ElementName'), XMLSCHEMA('TargetNameSpaceURI')",
"RAW('ElementName'), XMLDATA, ELEMENTS XSINIL",
"RAW('ElementName'), XMLSCHEMA('TargetNameSpaceURI'), ELEMENTS ABSENT",
"RAW('ElementName'), XMLDATA, ELEMENTS ABSENT",
"RAW('ElementName'), XMLSCHEMA('TargetNameSpaceURI'), ELEMENTS XSINIL",
"AUTO",
"AUTO, BINARY BASE64",
"AUTO, TYPE",
"AUTO, ROOT('RootName')",
"AUTO, BINARY BASE64, TYPE",
"AUTO, TYPE, ROOT('RootName')",
"AUTO, BINARY BASE64, TYPE, ROOT('RootName')",
"AUTO, XMLDATA",
"AUTO, XMLSCHEMA('TargetNameSpaceURI')",
"AUTO, XMLDATA, ELEMENTS XSINIL",
"AUTO, XMLSCHEMA('TargetNameSpaceURI'), ELEMENTS ABSENT",
"AUTO, XMLDATA, ELEMENTS ABSENT",
"AUTO, XMLSCHEMA('TargetNameSpaceURI'), ELEMENTS XSINIL",
"EXPLICIT",
"EXPLICIT, BINARY BASE64",
"EXPLICIT, TYPE",
"EXPLICIT, ROOT('RootName')",
"EXPLICIT, BINARY BASE64, TYPE",
"EXPLICIT, TYPE, ROOT('RootName')",
"EXPLICIT, BINARY BASE64, TYPE, ROOT('RootName')",
"EXPLICIT, XMLDATA",
"EXPLICIT, XMLDATA, BINARY BASE64",
"EXPLICIT, XMLDATA, TYPE",
"EXPLICIT, XMLDATA, ROOT('RootName')",
"EXPLICIT, XMLDATA, BINARY BASE64, TYPE",
"EXPLICIT, XMLDATA, BINARY BASE64, TYPE, ROOT('RootName')",
"PATH('ElementName')",
"PATH('ElementName'), BINARY BASE64",
"PATH('ElementName'), TYPE",
"PATH('ElementName'), ROOT('RootName')",
"PATH('ElementName'), BINARY BASE64, TYPE",
"PATH('ElementName'), TYPE, ROOT('RootName')",
"PATH('ElementName'), BINARY BASE64, TYPE, ROOT('RootName')",
"PATH('ElementName'), ELEMENTS XSINIL",
"PATH('ElementName'), ELEMENTS ABSENT",
"PATH('ElementName'), BINARY BASE64, ELEMENTS XSINIL",
"PATH('ElementName'), TYPE, ELEMENTS ABSENT",
"PATH('ElementName'), ROOT('RootName'), ELEMENTS XSINIL",
"PATH('ElementName'), BINARY BASE64, TYPE, ROOT('RootName'), ELEMENTS ABSENT",
]
for xml_option in xml_possible_options:
with self.subTest(f"Testing FOR XML option: {xml_option}"):
self.validate_identity(f"SELECT * FROM t FOR XML {xml_option}")
self.validate_identity(
"SELECT * FROM t FOR XML PATH, BINARY BASE64, ELEMENTS XSINIL",
"""SELECT
*
FROM t
FOR XML
PATH,
BINARY BASE64,
ELEMENTS XSINIL""",
pretty=True,
)
def test_types(self): def test_types(self):
self.validate_identity("CAST(x AS XML)") self.validate_identity("CAST(x AS XML)")
self.validate_identity("CAST(x AS UNIQUEIDENTIFIER)") self.validate_identity("CAST(x AS UNIQUEIDENTIFIER)")
@ -904,18 +984,18 @@ class TestTSQL(Validator):
self.validate_identity("CREATE SCHEMA testSchema") self.validate_identity("CREATE SCHEMA testSchema")
self.validate_identity("CREATE VIEW t AS WITH cte AS (SELECT 1 AS c) SELECT c FROM cte") self.validate_identity("CREATE VIEW t AS WITH cte AS (SELECT 1 AS c) SELECT c FROM cte")
self.validate_identity("ALTER TABLE tbl SET SYSTEM_VERSIONING=OFF") self.validate_identity("ALTER TABLE tbl SET (SYSTEM_VERSIONING=OFF)")
self.validate_identity("ALTER TABLE tbl SET FILESTREAM_ON = 'test'") self.validate_identity("ALTER TABLE tbl SET (FILESTREAM_ON = 'test')")
self.validate_identity("ALTER TABLE tbl SET DATA_DELETION=ON") self.validate_identity("ALTER TABLE tbl SET (DATA_DELETION=ON)")
self.validate_identity("ALTER TABLE tbl SET DATA_DELETION=OFF") self.validate_identity("ALTER TABLE tbl SET (DATA_DELETION=OFF)")
self.validate_identity( self.validate_identity(
"ALTER TABLE tbl SET SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, DATA_CONSISTENCY_CHECK=OFF, HISTORY_RETENTION_PERIOD=5 DAYS)" "ALTER TABLE tbl SET (SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, DATA_CONSISTENCY_CHECK=OFF, HISTORY_RETENTION_PERIOD=5 DAYS))"
) )
self.validate_identity( self.validate_identity(
"ALTER TABLE tbl SET SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, HISTORY_RETENTION_PERIOD=INFINITE)" "ALTER TABLE tbl SET (SYSTEM_VERSIONING=ON(HISTORY_TABLE=db.tbl, HISTORY_RETENTION_PERIOD=INFINITE))"
) )
self.validate_identity( self.validate_identity(
"ALTER TABLE tbl SET DATA_DELETION=ON(FILTER_COLUMN=col, RETENTION_PERIOD=5 MONTHS)" "ALTER TABLE tbl SET (DATA_DELETION=ON(FILTER_COLUMN=col, RETENTION_PERIOD=5 MONTHS))"
) )
self.validate_identity("ALTER VIEW v AS SELECT a, b, c, d FROM foo") self.validate_identity("ALTER VIEW v AS SELECT a, b, c, d FROM foo")

View file

@ -957,7 +957,7 @@ class TestParser(unittest.TestCase):
ast = parse_one("YEAR(a) /* sqlglot.anon */") ast = parse_one("YEAR(a) /* sqlglot.anon */")
self.assertIsInstance(ast, exp.Year) self.assertIsInstance(ast, exp.Year)
def test_identifier_meta(self): def test_token_position_meta(self):
ast = parse_one( ast = parse_one(
"SELECT a, b FROM test_schema.test_table_a UNION ALL SELECT c, d FROM test_catalog.test_schema.test_table_b" "SELECT a, b FROM test_schema.test_table_a UNION ALL SELECT c, d FROM test_catalog.test_schema.test_table_b"
) )
@ -988,6 +988,12 @@ class TestParser(unittest.TestCase):
ast = parse_one("SELECT FOO()") ast = parse_one("SELECT FOO()")
self.assertEqual(ast.find(exp.Anonymous).meta, {"line": 1, "col": 10, "start": 7, "end": 9}) self.assertEqual(ast.find(exp.Anonymous).meta, {"line": 1, "col": 10, "start": 7, "end": 9})
ast = parse_one("SELECT * FROM t")
self.assertEqual(ast.find(exp.Star).meta, {"line": 1, "col": 8, "start": 7, "end": 7})
ast = parse_one("SELECT t.* FROM t")
self.assertEqual(ast.find(exp.Star).meta, {"line": 1, "col": 10, "start": 9, "end": 9})
def test_quoted_identifier_meta(self): def test_quoted_identifier_meta(self):
sql = 'SELECT "a" FROM "test_schema"."test_table_a"' sql = 'SELECT "a" FROM "test_schema"."test_table_a"'
ast = parse_one(sql) ast = parse_one(sql)

View file

@ -186,3 +186,18 @@ x"""
(TokenType.STRING, ") }}"), (TokenType.STRING, ") }}"),
], ],
) )
def test_partial_token_list(self):
tokenizer = Tokenizer()
try:
# This is expected to fail due to the unbalanced string quotes
tokenizer.tokenize("foo 'bar")
except TokenError as e:
self.assertIn("Error tokenizing 'foo 'ba'", str(e))
partial_tokens = tokenizer.tokens
self.assertEqual(len(partial_tokens), 1)
self.assertEqual(partial_tokens[0].token_type, TokenType.VAR)
self.assertEqual(partial_tokens[0].text, "foo")