Adding upstream version 0.12.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
d887bee5ca
commit
148efc9122
69 changed files with 12923 additions and 0 deletions
94
tests/unit_tests/test_arrow_backend.py
Normal file
94
tests/unit_tests/test_arrow_backend.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from typing import Sequence
|
||||
|
||||
import pyarrow as pa
|
||||
from textual_fastdatatable import ArrowBackend
|
||||
|
||||
|
||||
def test_from_records(records: list[tuple[str | int, ...]]) -> None:
|
||||
backend = ArrowBackend.from_records(records, has_header=True)
|
||||
assert backend.column_count == 3
|
||||
assert backend.row_count == 5
|
||||
assert tuple(backend.columns) == records[0]
|
||||
|
||||
|
||||
def test_from_records_no_header(records: list[tuple[str | int, ...]]) -> None:
|
||||
backend = ArrowBackend.from_records(records[1:], has_header=False)
|
||||
assert backend.column_count == 3
|
||||
assert backend.row_count == 5
|
||||
assert tuple(backend.columns) == ("f0", "f1", "f2")
|
||||
|
||||
|
||||
def test_from_pydict(pydict: dict[str, Sequence[str | int]]) -> None:
|
||||
backend = ArrowBackend.from_pydict(pydict)
|
||||
assert backend.column_count == 3
|
||||
assert backend.row_count == 5
|
||||
assert backend.source_row_count == 5
|
||||
assert tuple(backend.columns) == tuple(pydict.keys())
|
||||
|
||||
|
||||
def test_from_pydict_with_limit(pydict: dict[str, Sequence[str | int]]) -> None:
|
||||
backend = ArrowBackend.from_pydict(pydict, max_rows=2)
|
||||
assert backend.column_count == 3
|
||||
assert backend.row_count == 2
|
||||
assert backend.source_row_count == 5
|
||||
assert tuple(backend.columns) == tuple(pydict.keys())
|
||||
|
||||
|
||||
def test_from_parquet(pydict: dict[str, Sequence[str | int]], tmp_path: Path) -> None:
|
||||
tbl = pa.Table.from_pydict(pydict)
|
||||
p = tmp_path / "test.parquet"
|
||||
pa.parquet.write_table(tbl, str(p))
|
||||
|
||||
backend = ArrowBackend.from_parquet(p)
|
||||
assert backend.data.equals(tbl)
|
||||
|
||||
|
||||
def test_empty_query() -> None:
|
||||
data: dict[str, list] = {"a": []}
|
||||
backend = ArrowBackend.from_pydict(data)
|
||||
assert backend.column_content_widths == [0]
|
||||
|
||||
|
||||
def test_dupe_column_labels() -> None:
|
||||
arr = pa.array([0, 1, 2, 3])
|
||||
tab = pa.table([arr] * 3, names=["a", "a", "a"])
|
||||
backend = ArrowBackend(data=tab)
|
||||
assert backend.column_count == 3
|
||||
assert backend.row_count == 4
|
||||
assert backend.get_row_at(2) == [2, 2, 2]
|
||||
|
||||
|
||||
def test_timestamp_with_tz() -> None:
|
||||
"""
|
||||
Ensure datetimes with offsets but no names do not crash the data table
|
||||
when casting to string.
|
||||
"""
|
||||
dt = datetime(2024, 1, 1, hour=15, tzinfo=timezone(offset=timedelta(hours=-5)))
|
||||
arr = pa.array([dt, dt, dt])
|
||||
tab = pa.table([arr], names=["created_at"])
|
||||
backend = ArrowBackend(data=tab)
|
||||
assert backend.column_content_widths == [29]
|
||||
|
||||
|
||||
def test_mixed_types() -> None:
|
||||
data = [(1000,), ("hi",)]
|
||||
backend = ArrowBackend.from_records(records=data)
|
||||
assert backend
|
||||
assert backend.row_count == 2
|
||||
assert backend.get_row_at(0) == ["1000"]
|
||||
assert backend.get_row_at(1) == ["hi"]
|
||||
|
||||
|
||||
def test_negative_timestamps() -> None:
|
||||
dt = datetime(1, 1, 1, tzinfo=timezone.utc)
|
||||
arr = pa.array([dt, dt, dt], type=pa.timestamp("s", tz="America/New_York"))
|
||||
tab = pa.table([arr], names=["created_at"])
|
||||
backend = ArrowBackend(data=tab)
|
||||
assert backend.column_content_widths == [26]
|
||||
assert backend.get_column_at(0) == [datetime.min, datetime.min, datetime.min]
|
||||
assert backend.get_row_at(0) == [datetime.min]
|
||||
assert backend.get_cell_at(0, 0) is datetime.min
|
109
tests/unit_tests/test_backends.py
Normal file
109
tests/unit_tests/test_backends.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from textual_fastdatatable.backend import DataTableBackend
|
||||
|
||||
|
||||
def test_column_content_widths(backend: DataTableBackend) -> None:
|
||||
assert backend.column_content_widths == [1, 8, 6]
|
||||
|
||||
|
||||
def test_get_row_at(backend: DataTableBackend) -> None:
|
||||
assert backend.get_row_at(0) == [1, "a", "foo"]
|
||||
assert backend.get_row_at(4) == [5, "asdfasdf", "foofoo"]
|
||||
with pytest.raises(IndexError):
|
||||
backend.get_row_at(10)
|
||||
with pytest.raises(IndexError):
|
||||
backend.get_row_at(-1)
|
||||
|
||||
|
||||
def test_get_column_at(backend: DataTableBackend) -> None:
|
||||
assert backend.get_column_at(0) == [1, 2, 3, 4, 5]
|
||||
assert backend.get_column_at(2) == ["foo", "bar", "baz", "qux", "foofoo"]
|
||||
|
||||
with pytest.raises(IndexError):
|
||||
backend.get_column_at(10)
|
||||
|
||||
|
||||
def test_get_cell_at(backend: DataTableBackend) -> None:
|
||||
assert backend.get_cell_at(0, 0) == 1
|
||||
assert backend.get_cell_at(4, 1) == "asdfasdf"
|
||||
with pytest.raises(IndexError):
|
||||
backend.get_cell_at(10, 0)
|
||||
with pytest.raises(IndexError):
|
||||
backend.get_cell_at(0, 10)
|
||||
|
||||
|
||||
def test_append_column(backend: DataTableBackend) -> None:
|
||||
original_table = backend.data
|
||||
backend.append_column("new")
|
||||
assert backend.column_count == 4
|
||||
assert backend.row_count == 5
|
||||
assert backend.get_column_at(3) == [None] * backend.row_count
|
||||
|
||||
backend.append_column("def", default="zzz")
|
||||
assert backend.column_count == 5
|
||||
assert backend.row_count == 5
|
||||
assert backend.get_column_at(4) == ["zzz"] * backend.row_count
|
||||
|
||||
assert backend.data.select(["first column", "two", "three"]).equals(original_table)
|
||||
|
||||
|
||||
def test_append_rows(backend: DataTableBackend) -> None:
|
||||
original_table = backend.data
|
||||
backend.append_rows([(6, "w", "x"), (7, "y", "z")])
|
||||
assert backend.column_count == 3
|
||||
assert backend.row_count == 7
|
||||
assert backend.column_content_widths == [1, 8, 6]
|
||||
|
||||
backend.append_rows([(999, "w" * 12, "x" * 15)])
|
||||
assert backend.column_count == 3
|
||||
assert backend.row_count == 8
|
||||
assert backend.column_content_widths == [3, 12, 15]
|
||||
|
||||
assert backend.data.slice(0, 5).equals(original_table)
|
||||
|
||||
|
||||
def test_drop_row(backend: DataTableBackend) -> None:
|
||||
backend.drop_row(0)
|
||||
assert backend.row_count == 4
|
||||
assert backend.column_count == 3
|
||||
assert backend.column_content_widths == [1, 8, 6]
|
||||
|
||||
backend.drop_row(3)
|
||||
assert backend.row_count == 3
|
||||
assert backend.column_count == 3
|
||||
assert backend.column_content_widths == [1, 1, 3]
|
||||
|
||||
with pytest.raises(IndexError):
|
||||
backend.drop_row(3)
|
||||
|
||||
|
||||
def test_update_cell(backend: DataTableBackend) -> None:
|
||||
backend.update_cell(0, 0, 0)
|
||||
assert backend.get_column_at(0) == [0, 2, 3, 4, 5]
|
||||
assert backend.row_count == 5
|
||||
assert backend.column_count == 3
|
||||
assert backend.column_content_widths == [1, 8, 6]
|
||||
|
||||
backend.update_cell(3, 1, "z" * 50)
|
||||
assert backend.get_row_at(3) == [4, "z" * 50, "qux"]
|
||||
assert backend.row_count == 5
|
||||
assert backend.column_count == 3
|
||||
assert backend.column_content_widths == [1, 50, 6]
|
||||
|
||||
|
||||
def test_sort(backend: DataTableBackend) -> None:
|
||||
original_table = backend.data
|
||||
original_col_one = list(backend.get_column_at(0)).copy()
|
||||
original_col_two = list(backend.get_column_at(1)).copy()
|
||||
backend.sort(by="two")
|
||||
assert backend.get_column_at(0) != original_col_one
|
||||
assert backend.get_column_at(1) == sorted(original_col_two)
|
||||
|
||||
backend.sort(by=[("two", "descending")])
|
||||
assert backend.get_column_at(0) != original_col_one
|
||||
assert backend.get_column_at(1) == sorted(original_col_two, reverse=True)
|
||||
|
||||
backend.sort(by=[("first column", "ascending")])
|
||||
assert backend.data.equals(original_table)
|
54
tests/unit_tests/test_create_backend.py
Normal file
54
tests/unit_tests/test_create_backend.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
from datetime import date, datetime
|
||||
|
||||
import pyarrow as pa
|
||||
from textual_fastdatatable.backend import create_backend
|
||||
|
||||
MAX_32BIT_INT = 2**31 - 1
|
||||
MAX_64BIT_INT = 2**63 - 1
|
||||
|
||||
|
||||
def test_empty_sequence() -> None:
|
||||
backend = create_backend(data=[])
|
||||
assert backend
|
||||
assert backend.row_count == 0
|
||||
assert backend.column_count == 0
|
||||
assert backend.columns == []
|
||||
assert backend.column_content_widths == []
|
||||
|
||||
|
||||
def test_infinity_timestamps() -> None:
|
||||
from_py = create_backend(
|
||||
data={"dt": [date.max, date.min], "ts": [datetime.max, datetime.min]}
|
||||
)
|
||||
assert from_py
|
||||
assert from_py.row_count == 2
|
||||
|
||||
from_arrow = create_backend(
|
||||
data=pa.table(
|
||||
{
|
||||
"dt32": [
|
||||
pa.scalar(MAX_32BIT_INT, type=pa.date32()),
|
||||
pa.scalar(-MAX_32BIT_INT, type=pa.date32()),
|
||||
],
|
||||
"dt64": [
|
||||
pa.scalar(MAX_64BIT_INT, type=pa.date64()),
|
||||
pa.scalar(-MAX_64BIT_INT, type=pa.date64()),
|
||||
],
|
||||
"ts": [
|
||||
pa.scalar(MAX_64BIT_INT, type=pa.timestamp("s")),
|
||||
pa.scalar(-MAX_64BIT_INT, type=pa.timestamp("s")),
|
||||
],
|
||||
"tns": [
|
||||
pa.scalar(MAX_64BIT_INT, type=pa.timestamp("ns")),
|
||||
pa.scalar(-MAX_64BIT_INT, type=pa.timestamp("ns")),
|
||||
],
|
||||
}
|
||||
)
|
||||
)
|
||||
assert from_arrow
|
||||
assert from_arrow.row_count == 2
|
||||
assert from_arrow.get_row_at(0) == [date.max, date.max, datetime.max, datetime.max]
|
||||
assert from_arrow.get_row_at(1) == [date.min, date.min, datetime.min, datetime.min]
|
||||
assert from_arrow.get_column_at(0) == [date.max, date.min]
|
||||
assert from_arrow.get_column_at(2) == [datetime.max, datetime.min]
|
||||
assert from_arrow.get_cell_at(0, 0) == date.max
|
Loading…
Add table
Add a link
Reference in a new issue