Adding upstream version 0.2.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
2e01f2e4fb
commit
873fd2c844
15 changed files with 2102 additions and 0 deletions
47
.github/workflows/publish.yml
vendored
Normal file
47
.github/workflows/publish.yml
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
name: Build and Publish Package
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
publish-package:
|
||||
if: ${{ github.event.pull_request.merged == true && startsWith(github.event.pull_request.head.ref, 'release/v') }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out the main branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Install Poetry
|
||||
uses: snok/install-poetry@v1
|
||||
with:
|
||||
version: 1.6.1
|
||||
- name: Configure poetry
|
||||
run: poetry config --no-interaction pypi-token.pypi ${{ secrets.PYPI_TOKEN }}
|
||||
- name: Get this package's Version
|
||||
id: package_version
|
||||
run: echo "package_version=$(poetry version --short)" >> $GITHUB_OUTPUT
|
||||
- name: Build package
|
||||
run: poetry build --no-interaction
|
||||
- name: Publish package to PyPI
|
||||
run: poetry publish --no-interaction
|
||||
- name: Create a Github Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: v${{ steps.package_version.outputs.package_version }}
|
||||
target_commitish: main
|
||||
token: ${{ secrets.GH_RELEASE_TOKEN }}
|
||||
body_path: CHANGELOG.md
|
||||
files: |
|
||||
LICENSE
|
||||
dist/*harlequin*.whl
|
||||
dist/*harlequin*.tar.gz
|
57
.github/workflows/release.yml
vendored
Normal file
57
.github/workflows/release.yml
vendored
Normal file
|
@ -0,0 +1,57 @@
|
|||
name: Create Release Branch
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
newVersion:
|
||||
description: A version number for this release (e.g., "0.1.0")
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Check out the main branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
- name: Set up Python 3.10
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Install Poetry
|
||||
uses: snok/install-poetry@v1
|
||||
with:
|
||||
version: 1.6.1
|
||||
- name: Create release branch
|
||||
run: |
|
||||
git checkout -b release/v${{ github.event.inputs.newVersion }}
|
||||
git push --set-upstream origin release/v${{ github.event.inputs.newVersion }}
|
||||
- name: Bump version
|
||||
run: poetry version ${{ github.event.inputs.newVersion }} --no-interaction
|
||||
- name: Ensure package can be built
|
||||
run: poetry build --no-interaction
|
||||
- name: Update CHANGELOG
|
||||
uses: thomaseizinger/keep-a-changelog-new-release@v1
|
||||
with:
|
||||
version: ${{ github.event.inputs.newVersion }}
|
||||
- name: Commit Changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: Bumps version to ${{ github.event.inputs.newVersion }}
|
||||
- name: Create pull request into main
|
||||
uses: thomaseizinger/create-pull-request@1.3.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
head: release/v${{ github.event.inputs.newVersion }}
|
||||
base: main
|
||||
title: v${{ github.event.inputs.newVersion }}
|
||||
body: >
|
||||
This PR was automatically generated. It bumps the version number
|
||||
in pyproject.toml and updates CHANGELOG.md. You may have to close
|
||||
this PR and reopen it to get the required checks to run.
|
162
.gitignore
vendored
Normal file
162
.gitignore
vendored
Normal file
|
@ -0,0 +1,162 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.python-version
|
||||
.Python
|
||||
Pipfile
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
31
CHANGELOG.md
Normal file
31
CHANGELOG.md
Normal file
|
@ -0,0 +1,31 @@
|
|||
# harlequin-odbc CHANGELOG
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.2.0] - 2025-01-08
|
||||
|
||||
- Drops support for Python 3.8
|
||||
- Adds support for Python 3.13
|
||||
- Adds support for Harlequin 2.X
|
||||
|
||||
## [0.1.1] - 2024-01-09
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Renames package to use hyphen.
|
||||
|
||||
## [0.1.0] - 2024-01-09
|
||||
|
||||
### Features
|
||||
|
||||
- Adds a basic ODBC adapter.
|
||||
|
||||
[Unreleased]: https://github.com/tconbeer/harlequin-odbc/compare/0.2.0...HEAD
|
||||
|
||||
[0.2.0]: https://github.com/tconbeer/harlequin-odbc/compare/0.1.1...0.2.0
|
||||
|
||||
[0.1.1]: https://github.com/tconbeer/harlequin-odbc/compare/0.1.0...0.1.1
|
||||
|
||||
[0.1.0]: https://github.com/tconbeer/harlequin-odbc/compare/dbe2dbd1da1930117c1572ca751d9cd9d43928b6...0.1.0
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2023 Ted Conbeer
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
24
Makefile
Normal file
24
Makefile
Normal file
|
@ -0,0 +1,24 @@
|
|||
.PHONY: check
|
||||
check:
|
||||
ruff format .
|
||||
ruff check . --fix
|
||||
mypy
|
||||
pytest
|
||||
|
||||
.PHONY: init
|
||||
init:
|
||||
docker-compose up -d
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
docker-compose down
|
||||
|
||||
.PHONY: serve
|
||||
serve:
|
||||
harlequin -P None -a odbc "${ODBC_CONN_STR}"
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
ruff format .
|
||||
ruff check . --fix
|
||||
mypy
|
67
README.md
Normal file
67
README.md
Normal file
|
@ -0,0 +1,67 @@
|
|||
# harlequin-odbc
|
||||
|
||||
This repo provides the ODBC adapter for Harlequin.
|
||||
|
||||
## Installation
|
||||
|
||||
`harlequin-odbc` depends on `harlequin`, so installing this package will also install Harlequin.
|
||||
|
||||
### Pre-requisites
|
||||
|
||||
You will need an ODBC driver manager installed on your OS. Windows has one built-in, but for Unix-based OSes, you will need to download and install one before installing `harlequin-odbc`. You can install unixODBC with `brew install unixodbc` or `sudo apt install unixodbc`. See the [pyodbc docs](https://github.com/mkleehammer/pyodbc/wiki/Install) for more info.
|
||||
|
||||
Additionally, you will need to install the ODBC driver for your specific database (e.g., `ODBC Driver 18 for SQL Server` for MS SQL Server). For more information, see the docs for your specific database.
|
||||
|
||||
### Using pip
|
||||
|
||||
To install this adapter into an activated virtual environment:
|
||||
```bash
|
||||
pip install harlequin-odbc
|
||||
```
|
||||
|
||||
### Using poetry
|
||||
|
||||
```bash
|
||||
poetry add harlequin-odbc
|
||||
```
|
||||
|
||||
### Using pipx
|
||||
|
||||
If you do not already have Harlequin installed:
|
||||
|
||||
```bash
|
||||
pip install harlequin-odbc
|
||||
```
|
||||
|
||||
If you would like to add the ODBC adapter to an existing Harlequin installation:
|
||||
|
||||
```bash
|
||||
pipx inject harlequin harlequin-odbc
|
||||
```
|
||||
|
||||
### As an Extra
|
||||
Alternatively, you can install Harlequin with the `odbc` extra:
|
||||
|
||||
```bash
|
||||
pip install harlequin[odbc]
|
||||
```
|
||||
|
||||
```bash
|
||||
poetry add harlequin[odbc]
|
||||
```
|
||||
|
||||
```bash
|
||||
pipx install harlequin[odbc]
|
||||
```
|
||||
|
||||
## Usage and Configuration
|
||||
|
||||
You can open Harlequin with the ODBC adapter by selecting it with the `-a` option and passing an ODBC connection string:
|
||||
|
||||
```bash
|
||||
harlequin -a odbc 'Driver={ODBC Driver 18 for SQL Server};Server=tcp:harlequin-example.database.windows.net,1433;Database=dev;Uid=harlequin;Pwd=my_secret;Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;'
|
||||
```
|
||||
|
||||
The ODBC adapter does not accept other options.
|
||||
|
||||
For more information, see the [Harlequin Docs](https://harlequin.sh/docs/odbc/index).
|
15
docker-compose.yml
Normal file
15
docker-compose.yml
Normal file
|
@ -0,0 +1,15 @@
|
|||
services:
|
||||
|
||||
db:
|
||||
image: mcr.microsoft.com/mssql/server:2019-latest
|
||||
restart: always
|
||||
environment:
|
||||
ACCEPT_EULA: Y
|
||||
MSSQL_SA_PASSWORD: for-testing
|
||||
volumes:
|
||||
- sqlserver_data:/var/opt/mssql
|
||||
ports:
|
||||
- 1433:1433
|
||||
|
||||
volumes:
|
||||
sqlserver_data:
|
1299
poetry.lock
generated
Normal file
1299
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
67
pyproject.toml
Normal file
67
pyproject.toml
Normal file
|
@ -0,0 +1,67 @@
|
|||
[tool.poetry]
|
||||
name = "harlequin-odbc"
|
||||
version = "0.2.0"
|
||||
description = "A Harlequin adapter for ODBC drivers."
|
||||
authors = ["Ted Conbeer <tconbeer@users.noreply.github.com>"]
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
packages = [
|
||||
{ include = "harlequin_odbc", from = "src" },
|
||||
]
|
||||
|
||||
[tool.poetry.plugins."harlequin.adapter"]
|
||||
odbc = "harlequin_odbc:HarlequinOdbcAdapter"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<3.14"
|
||||
harlequin = ">=1.9.1,<3"
|
||||
pyodbc = "^5.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.6.0"
|
||||
pytest = "^7.4.3"
|
||||
mypy = "^1.7.0"
|
||||
pre-commit = "^3.5.0"
|
||||
importlib_metadata = { version = ">=4.6.0", python = "<3.10.0" }
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py38"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["A", "B", "E", "F", "I"]
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.8"
|
||||
files = [
|
||||
"src/**/*.py",
|
||||
"tests/**/*.py",
|
||||
]
|
||||
mypy_path = "src:stubs"
|
||||
|
||||
show_column_numbers = true
|
||||
|
||||
# show error messages from unrelated files
|
||||
follow_imports = "normal"
|
||||
|
||||
# be strict
|
||||
disallow_untyped_calls = true
|
||||
disallow_untyped_defs = true
|
||||
check_untyped_defs = true
|
||||
disallow_untyped_decorators = true
|
||||
disallow_incomplete_defs = true
|
||||
disallow_subclassing_any = true
|
||||
strict_optional = true
|
||||
|
||||
warn_return_any = true
|
||||
warn_no_return = true
|
||||
warn_redundant_casts = true
|
||||
warn_unused_ignores = true
|
||||
warn_unused_configs = true
|
||||
|
||||
no_implicit_reexport = true
|
||||
strict_equality = true
|
3
src/harlequin_odbc/__init__.py
Normal file
3
src/harlequin_odbc/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
from harlequin_odbc.adapter import HarlequinOdbcAdapter
|
||||
|
||||
__all__ = ["HarlequinOdbcAdapter"]
|
201
src/harlequin_odbc/adapter.py
Normal file
201
src/harlequin_odbc/adapter.py
Normal file
|
@ -0,0 +1,201 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Sequence
|
||||
|
||||
import pyodbc # type: ignore
|
||||
from harlequin import (
|
||||
HarlequinAdapter,
|
||||
HarlequinConnection,
|
||||
HarlequinCursor,
|
||||
)
|
||||
from harlequin.autocomplete.completion import HarlequinCompletion
|
||||
from harlequin.catalog import Catalog, CatalogItem
|
||||
from harlequin.exception import (
|
||||
HarlequinConfigError,
|
||||
HarlequinConnectionError,
|
||||
HarlequinQueryError,
|
||||
)
|
||||
from textual_fastdatatable.backend import AutoBackendType
|
||||
|
||||
from harlequin_odbc.cli_options import ODBC_OPTIONS
|
||||
|
||||
|
||||
class HarlequinOdbcCursor(HarlequinCursor):
|
||||
def __init__(self, cur: pyodbc.Cursor) -> None:
|
||||
self.cur = cur
|
||||
self._limit: int | None = None
|
||||
|
||||
def columns(self) -> list[tuple[str, str]]:
|
||||
# todo: use getTypeInfo
|
||||
type_mapping = {
|
||||
"bool": "t/f",
|
||||
"int": "##",
|
||||
"float": "#.#",
|
||||
"Decimal": "#.#",
|
||||
"str": "s",
|
||||
"bytes": "0b",
|
||||
"date": "d",
|
||||
"time": "t",
|
||||
"datetime": "dt",
|
||||
"UUID": "uid",
|
||||
}
|
||||
return [
|
||||
(
|
||||
col_name if col_name else "(No column name)",
|
||||
type_mapping.get(col_type.__name__, "?"),
|
||||
)
|
||||
for col_name, col_type, *_ in self.cur.description
|
||||
]
|
||||
|
||||
def set_limit(self, limit: int) -> HarlequinOdbcCursor:
|
||||
self._limit = limit
|
||||
return self
|
||||
|
||||
def fetchall(self) -> AutoBackendType:
|
||||
try:
|
||||
if self._limit is None:
|
||||
return self.cur.fetchall()
|
||||
else:
|
||||
return self.cur.fetchmany(self._limit)
|
||||
except Exception as e:
|
||||
raise HarlequinQueryError(
|
||||
msg=str(e),
|
||||
title="Harlequin encountered an error while executing your query.",
|
||||
) from e
|
||||
|
||||
|
||||
class HarlequinOdbcConnection(HarlequinConnection):
|
||||
def __init__(
|
||||
self,
|
||||
conn_str: Sequence[str],
|
||||
init_message: str = "",
|
||||
) -> None:
|
||||
assert len(conn_str) == 1
|
||||
self.init_message = init_message
|
||||
try:
|
||||
self.conn = pyodbc.connect(conn_str[0], autocommit=True)
|
||||
self.aux_conn = pyodbc.connect(conn_str[0], autocommit=True)
|
||||
except Exception as e:
|
||||
raise HarlequinConnectionError(
|
||||
msg=str(e), title="Harlequin could not connect to your database."
|
||||
) from e
|
||||
|
||||
def execute(self, query: str) -> HarlequinOdbcCursor | None:
|
||||
try:
|
||||
cur = self.conn.cursor()
|
||||
cur.execute(query)
|
||||
except Exception as e:
|
||||
raise HarlequinQueryError(
|
||||
msg=str(e),
|
||||
title="Harlequin encountered an error while executing your query.",
|
||||
) from e
|
||||
else:
|
||||
if cur.description is not None:
|
||||
return HarlequinOdbcCursor(cur)
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_catalog(self) -> Catalog:
|
||||
raw_catalog = self._list_tables()
|
||||
db_items: list[CatalogItem] = []
|
||||
for db, schemas in raw_catalog.items():
|
||||
schema_items: list[CatalogItem] = []
|
||||
for schema, relations in schemas.items():
|
||||
rel_items: list[CatalogItem] = []
|
||||
for rel, rel_type in relations:
|
||||
cols = self._list_columns_in_relation(
|
||||
catalog_name=db, schema_name=schema, rel_name=rel
|
||||
)
|
||||
col_items = [
|
||||
CatalogItem(
|
||||
qualified_identifier=f'"{db}"."{schema}"."{rel}"."{col}"',
|
||||
query_name=f'"{col}"',
|
||||
label=col,
|
||||
type_label=col_type,
|
||||
)
|
||||
for col, col_type in cols
|
||||
]
|
||||
rel_items.append(
|
||||
CatalogItem(
|
||||
qualified_identifier=f'"{db}"."{schema}"."{rel}"',
|
||||
query_name=f'"{db}"."{schema}"."{rel}"',
|
||||
label=rel,
|
||||
type_label=rel_type,
|
||||
children=col_items,
|
||||
)
|
||||
)
|
||||
schema_items.append(
|
||||
CatalogItem(
|
||||
qualified_identifier=f'"{db}"."{schema}"',
|
||||
query_name=f'"{db}"."{schema}"',
|
||||
label=schema,
|
||||
type_label="s",
|
||||
children=rel_items,
|
||||
)
|
||||
)
|
||||
db_items.append(
|
||||
CatalogItem(
|
||||
qualified_identifier=f'"{db}"',
|
||||
query_name=f'"{db}"',
|
||||
label=db,
|
||||
type_label="db",
|
||||
children=schema_items,
|
||||
)
|
||||
)
|
||||
return Catalog(items=db_items)
|
||||
|
||||
def _list_tables(self) -> dict[str, dict[str, list[tuple[str, str]]]]:
|
||||
cur = self.aux_conn.cursor()
|
||||
rel_type_map = {
|
||||
"TABLE": "t",
|
||||
"VIEW": "v",
|
||||
"SYSTEM TABLE": "st",
|
||||
"GLOBAL TEMPORARY": "tmp",
|
||||
"LOCAL TEMPORARY": "tmp",
|
||||
}
|
||||
catalog: dict[str, dict[str, list[tuple[str, str]]]] = {}
|
||||
for db_name, schema_name, rel_name, rel_type, *_ in cur.tables():
|
||||
if db_name not in catalog:
|
||||
catalog[db_name] = {
|
||||
schema_name: [
|
||||
(rel_name, rel_type_map.get(rel_type, str(rel_type).lower()))
|
||||
]
|
||||
}
|
||||
elif schema_name not in catalog[db_name]:
|
||||
catalog[db_name][schema_name] = [
|
||||
(rel_name, rel_type_map.get(rel_type, rel_type))
|
||||
]
|
||||
else:
|
||||
catalog[db_name][schema_name].append(
|
||||
(rel_name, rel_type_map.get(rel_type, rel_type))
|
||||
)
|
||||
return catalog
|
||||
|
||||
def _list_columns_in_relation(
|
||||
self, catalog_name: str, schema_name: str, rel_name: str
|
||||
) -> list[tuple[str, str]]:
|
||||
cur = self.aux_conn.cursor()
|
||||
raw_cols = cur.columns(table=rel_name, catalog=catalog_name, schema=schema_name)
|
||||
return [(col[3], col[5]) for col in raw_cols]
|
||||
|
||||
def get_completions(self) -> list[HarlequinCompletion]:
|
||||
return []
|
||||
|
||||
|
||||
class HarlequinOdbcAdapter(HarlequinAdapter):
|
||||
ADAPTER_OPTIONS = ODBC_OPTIONS
|
||||
|
||||
def __init__(self, conn_str: Sequence[str], **_: Any) -> None:
|
||||
self.conn_str = conn_str
|
||||
if len(conn_str) != 1:
|
||||
raise HarlequinConfigError(
|
||||
title="Harlequin could not initialize the ODBC adapter.",
|
||||
msg=(
|
||||
"The ODBC adapter expects exactly one connection string. "
|
||||
f"It received:\n{conn_str}"
|
||||
),
|
||||
)
|
||||
|
||||
def connect(self) -> HarlequinOdbcConnection:
|
||||
conn = HarlequinOdbcConnection(self.conn_str)
|
||||
return conn
|
5
src/harlequin_odbc/cli_options.py
Normal file
5
src/harlequin_odbc/cli_options.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from harlequin import HarlequinAdapterOption
|
||||
|
||||
ODBC_OPTIONS: list[HarlequinAdapterOption] = []
|
0
src/harlequin_odbc/py.typed
Normal file
0
src/harlequin_odbc/py.typed
Normal file
103
tests/test_adapter.py
Normal file
103
tests/test_adapter.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
import os
|
||||
import sys
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
from harlequin.adapter import HarlequinAdapter, HarlequinConnection, HarlequinCursor
|
||||
from harlequin.catalog import Catalog, CatalogItem
|
||||
from harlequin.exception import HarlequinConnectionError, HarlequinQueryError
|
||||
from textual_fastdatatable.backend import create_backend
|
||||
|
||||
from harlequin_odbc.adapter import (
|
||||
HarlequinOdbcAdapter,
|
||||
HarlequinOdbcConnection,
|
||||
HarlequinOdbcCursor,
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
from importlib_metadata import entry_points
|
||||
else:
|
||||
from importlib.metadata import entry_points
|
||||
|
||||
CONN_STR = os.environ["ODBC_CONN_STR"]
|
||||
|
||||
|
||||
def test_plugin_discovery() -> None:
|
||||
PLUGIN_NAME = "odbc"
|
||||
eps = entry_points(group="harlequin.adapter")
|
||||
assert eps[PLUGIN_NAME]
|
||||
adapter_cls = eps[PLUGIN_NAME].load()
|
||||
assert issubclass(adapter_cls, HarlequinAdapter)
|
||||
assert adapter_cls == HarlequinOdbcAdapter
|
||||
|
||||
|
||||
def test_connect() -> None:
|
||||
conn = HarlequinOdbcAdapter(conn_str=(CONN_STR,)).connect()
|
||||
assert isinstance(conn, HarlequinConnection)
|
||||
|
||||
|
||||
def test_init_extra_kwargs() -> None:
|
||||
assert HarlequinOdbcAdapter(conn_str=(CONN_STR,), foo=1, bar="baz").connect()
|
||||
|
||||
|
||||
def test_connect_raises_connection_error() -> None:
|
||||
with pytest.raises(HarlequinConnectionError):
|
||||
_ = HarlequinOdbcAdapter(conn_str=("foo",)).connect()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def connection() -> Generator[HarlequinOdbcConnection, None, None]:
|
||||
conn = HarlequinOdbcAdapter(conn_str=(CONN_STR,)).connect()
|
||||
conn.execute("drop schema if exists test;")
|
||||
conn.execute("create schema test;")
|
||||
yield conn
|
||||
conn.execute("drop table if exists test.foo;")
|
||||
conn.execute("drop schema if exists test;")
|
||||
|
||||
|
||||
def test_get_catalog(connection: HarlequinOdbcConnection) -> None:
|
||||
catalog = connection.get_catalog()
|
||||
assert isinstance(catalog, Catalog)
|
||||
assert catalog.items
|
||||
assert isinstance(catalog.items[0], CatalogItem)
|
||||
|
||||
|
||||
def test_execute_ddl(connection: HarlequinOdbcConnection) -> None:
|
||||
cur = connection.execute("create table test.foo (a int)")
|
||||
assert cur is None
|
||||
|
||||
|
||||
def test_execute_select(connection: HarlequinOdbcConnection) -> None:
|
||||
cur = connection.execute("select 1 as a")
|
||||
assert isinstance(cur, HarlequinOdbcCursor)
|
||||
# assert cur.columns() == [("a", "##")]
|
||||
data = cur.fetchall()
|
||||
backend = create_backend(data)
|
||||
assert backend.column_count == 1
|
||||
assert backend.row_count == 1
|
||||
|
||||
|
||||
def test_execute_select_dupe_cols(connection: HarlequinOdbcConnection) -> None:
|
||||
cur = connection.execute("select 1 as a, 2 as a, 3 as a")
|
||||
assert isinstance(cur, HarlequinCursor)
|
||||
assert len(cur.columns()) == 3
|
||||
data = cur.fetchall()
|
||||
backend = create_backend(data)
|
||||
assert backend.column_count == 3
|
||||
assert backend.row_count == 1
|
||||
|
||||
|
||||
def test_set_limit(connection: HarlequinOdbcConnection) -> None:
|
||||
cur = connection.execute("select 1 as a union all select 2 union all select 3")
|
||||
assert isinstance(cur, HarlequinCursor)
|
||||
cur = cur.set_limit(2)
|
||||
assert isinstance(cur, HarlequinCursor)
|
||||
data = cur.fetchall()
|
||||
backend = create_backend(data)
|
||||
assert backend.column_count == 1
|
||||
assert backend.row_count == 2
|
||||
|
||||
|
||||
def test_execute_raises_query_error(connection: HarlequinOdbcConnection) -> None:
|
||||
with pytest.raises(HarlequinQueryError):
|
||||
_ = connection.execute("selec;")
|
Loading…
Add table
Reference in a new issue