Adding upstream version 2.6.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
8c9bf727ad
commit
ac48298504
43 changed files with 4471 additions and 0 deletions
21
.github/dependabot.yml
vendored
Normal file
21
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
version: 2
|
||||
|
||||
updates:
|
||||
# GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
commit-message:
|
||||
prefix: ⬆
|
||||
# Python
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
groups:
|
||||
python-packages:
|
||||
patterns:
|
||||
- "*"
|
||||
commit-message:
|
||||
prefix: ⬆
|
102
.github/workflows/ci.yml
vendored
Normal file
102
.github/workflows/ci.yml
vendored
Normal file
|
@ -0,0 +1,102 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- '**'
|
||||
pull_request: {}
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: set up python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: install
|
||||
run: |
|
||||
pip install -r requirements/pyproject.txt && pip install -r requirements/linting.txt
|
||||
|
||||
- uses: pre-commit/action@v3.0.1
|
||||
with:
|
||||
extra_args: --all-files --verbose
|
||||
|
||||
test:
|
||||
name: test py${{ matrix.python-version }} on ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu, macos, windows]
|
||||
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: set up python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- run: |
|
||||
pip install -r requirements/pyproject.txt && pip install -r requirements/testing.txt
|
||||
|
||||
- run: pip freeze
|
||||
|
||||
- run: make test
|
||||
env:
|
||||
CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }}-with-deps
|
||||
|
||||
- run: coverage xml
|
||||
- uses: codecov/codecov-action@v4
|
||||
|
||||
# https://github.com/marketplace/actions/alls-green#why used for branch protection checks
|
||||
check:
|
||||
if: always()
|
||||
needs: [lint, test]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Decide whether the needed jobs succeeded or failed
|
||||
uses: re-actors/alls-green@release/v1
|
||||
with:
|
||||
jobs: ${{ toJSON(needs) }}
|
||||
|
||||
release:
|
||||
name: Release
|
||||
needs: [check]
|
||||
if: "success() && startsWith(github.ref, 'refs/tags/')"
|
||||
runs-on: ubuntu-latest
|
||||
environment: release
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: set up python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: install
|
||||
run: pip install -U build
|
||||
|
||||
- name: check GITHUB_REF matches package version
|
||||
uses: samuelcolvin/check-python-version@v4.1
|
||||
with:
|
||||
version_file_path: pydantic_extra_types/__init__.py
|
||||
|
||||
- name: build
|
||||
run: python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
27
.gitignore
vendored
Normal file
27
.gitignore
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
.idea/
|
||||
env/
|
||||
venv/
|
||||
.venv/
|
||||
env3*/
|
||||
Pipfile
|
||||
*.lock
|
||||
*.py[cod]
|
||||
*.egg-info/
|
||||
/build/
|
||||
dist/
|
||||
.cache/
|
||||
.mypy_cache/
|
||||
test.py
|
||||
.coverage
|
||||
.hypothesis
|
||||
/htmlcov/
|
||||
/site/
|
||||
/site.zip
|
||||
.pytest_cache/
|
||||
.vscode/
|
||||
_build/
|
||||
.auto-format
|
||||
/sandbox/
|
||||
/.ghtopdep_cache/
|
||||
/worktrees/
|
||||
.ruff_cache/
|
24
.pre-commit-config.yaml
Normal file
24
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,24 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
args: ['--unsafe']
|
||||
- id: check-toml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: lint
|
||||
name: Lint
|
||||
entry: make lint
|
||||
types: [python]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
- id: mypy
|
||||
name: Mypy
|
||||
entry: make mypy
|
||||
types: [python]
|
||||
language: system
|
||||
pass_filenames: false
|
82
HISTORY.md
Normal file
82
HISTORY.md
Normal file
|
@ -0,0 +1,82 @@
|
|||
# CHANGELOG
|
||||
|
||||
## v2.6.0
|
||||
|
||||
* Allow python-ulid 2.x on Python 3.9 and later by @musicinmybrain in <https://github.com/pydantic/pydantic-extra-types/pull/131>
|
||||
* Do not pin the ”major” version of pycountry by @musicinmybrain in <https://github.com/pydantic/pydantic-extra-types/pull/132>
|
||||
* 🤖 Create dependabot.yml for updating GitHub action by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/134>
|
||||
* :memo: Refactor Documentation for ISBN and MAC address modules by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/124>
|
||||
* Add language code definitions and test by @07pepa in <https://github.com/pydantic/pydantic-extra-types/pull/141>
|
||||
* :memo: Create a `changelog` to match release notes by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/142>
|
||||
* Add currency code ISO 4217 and its subset that includes only currencies by @07pepa in <https://github.com/pydantic/pydantic-extra-types/pull/143>
|
||||
* 🔨 Update code formatting and linting configurations by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/144>
|
||||
* 👷 Add Python checking for dependencies by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/145>
|
||||
* 🐛 fix single quote issue by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/148>
|
||||
|
||||
## v2.5.0
|
||||
|
||||
* Add Pendulum DT support by @theunkn0wn1 in <https://github.com/pydantic/pydantic-extra-types/pull/110>
|
||||
|
||||
## v2.4.1
|
||||
|
||||
* Fix refs blocking docs build by @sydney-runkle in <https://github.com/pydantic/pydantic-extra-types/pull/125>
|
||||
|
||||
## v2.4.0
|
||||
|
||||
* Add: New type ISBN by @lucasmucidas in <https://github.com/pydantic/pydantic-extra-types/pull/116>
|
||||
* fix validate_digits actually allowing non digit characters by @romaincaillon in <https://github.com/pydantic/pydantic-extra-types/pull/120>
|
||||
* ♻️ refactor the `validate_brand` method & add new types by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/56>
|
||||
* ✅ Drop python 3.7 & support 3.12 by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/122>
|
||||
|
||||
## v2.3.0
|
||||
|
||||
* Upgrade pydantic version to >=2.5.2 by @hramezani in <https://github.com/pydantic/pydantic-extra-types/pull/113>
|
||||
|
||||
## v.2.2.0
|
||||
|
||||
* Add `long` and `short` format to `as_hex` by @DJRHails in <https://github.com/pydantic/pydantic-extra-types/pull/93>
|
||||
* Refactor documentation by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/98>
|
||||
* ✨ add `ULID` type by @JeanArhancet in <https://github.com/pydantic/pydantic-extra-types/pull/73>
|
||||
* Added `__get_pydantic_json_schema__` method with `format='tel'` by @hasansezertasan in <https://github.com/pydantic/pydantic-extra-types/pull/106>
|
||||
|
||||
## v2.1.0
|
||||
|
||||
* ✨ add `MacAddress` type by @JeanArhancet in <https://github.com/pydantic/pydantic-extra-types/pull/71>
|
||||
* :memo: fix usage of `MAC address` by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/72>
|
||||
* Add docstrings for payment cards by @tpdorsey in <https://github.com/pydantic/pydantic-extra-types/pull/77>
|
||||
* Fix mac adddress validation by @JeanArhancet in <https://github.com/pydantic/pydantic-extra-types/pull/79>
|
||||
* Remove work in progress part from README.md by @hramezani in <https://github.com/pydantic/pydantic-extra-types/pull/81>
|
||||
* Add `Latitude`, `Longitude` and `Coordinate` by @JeanArhancet in <https://github.com/pydantic/pydantic-extra-types/pull/76>
|
||||
* Refactor: use stdlib and remove useless code by @eumiro in <https://github.com/pydantic/pydantic-extra-types/pull/86>
|
||||
* Make Latitude and Longitude evaluated by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/90>
|
||||
|
||||
## v2.0.0
|
||||
|
||||
* Migrate `Color` & `Payment Card` by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/2>
|
||||
* add `pydantic` to classifiers by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/13>
|
||||
* remove dependencies caching by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/16>
|
||||
* :bug: deprecate `__modify_schema__` method by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/20>
|
||||
* Fix Color JSON schema generation by @dmontagu in <https://github.com/pydantic/pydantic-extra-types/pull/21>
|
||||
* fix issues of `pydantic_core.core_schema` has no attribute `xxx` by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/23>
|
||||
* Fix Failed tests for `color` type by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/26>
|
||||
* Created Country type by @HomiGrotas in <https://github.com/pydantic/pydantic-extra-types/pull/14>
|
||||
* Add phone number types by @JamesHutchison in <https://github.com/pydantic/pydantic-extra-types/pull/25>
|
||||
* make `phonenumbers` a requirement by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/29>
|
||||
* chore(feat): Add ABARouting number type by @RevinderDev in <https://github.com/pydantic/pydantic-extra-types/pull/30>
|
||||
* add missing countries by @EssaAlshammri in <https://github.com/pydantic/pydantic-extra-types/pull/32>
|
||||
* chore: resolve `pydantic-core` dependency conflict by @hirotasoshu in <https://github.com/pydantic/pydantic-extra-types/pull/45>
|
||||
* Add `MIR` card brand by @hirotasoshu in <https://github.com/pydantic/pydantic-extra-types/pull/46>
|
||||
* fix dependencies version by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/48>
|
||||
* 📝 Add documentation for `Color` and `PaymentCardNumber` by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/50>
|
||||
* Add hooky by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/51>
|
||||
* ♻️ Simplify project structure by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/52>
|
||||
* 👷 Add coverage check on the pipeline by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/53>
|
||||
* ♻️ refactor country type using `pycountry` by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/54>
|
||||
* ✅ Add 100% coverage by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/57>
|
||||
* Add support for transparent Color by @CollinHeist in <https://github.com/pydantic/pydantic-extra-types/pull/59>
|
||||
* 📝 Add documentation for `PhoneNumber` and `ABARoutingNumber` by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/60>
|
||||
* 📝 Refactor README by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/61>
|
||||
* 🚚 Rename `routing_number.md` to `routing_numbers.md` by @Kludex in <https://github.com/pydantic/pydantic-extra-types/pull/62>
|
||||
* :memo: fix code in `payment` documentation by @yezz123 in <https://github.com/pydantic/pydantic-extra-types/pull/63>
|
||||
* uprev pydantic to b3 by @samuelcolvin in <https://github.com/pydantic/pydantic-extra-types/pull/69>
|
||||
* Prepare for release 2.0.0 by @hramezani in <https://github.com/pydantic/pydantic-extra-types/pull/70>
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2023 Samuel Colvin and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
70
Makefile
Normal file
70
Makefile
Normal file
|
@ -0,0 +1,70 @@
|
|||
.DEFAULT_GOAL := all
|
||||
sources = pydantic_extra_types tests
|
||||
|
||||
.PHONY: install
|
||||
install:
|
||||
python -m pip install -U pip
|
||||
pip install -r requirements/all.txt
|
||||
pip install -e .
|
||||
|
||||
.PHONY: refresh-lockfiles
|
||||
refresh-lockfiles:
|
||||
@echo "Updating requirements/*.txt files using pip-compile"
|
||||
find requirements/ -name '*.txt' ! -name 'all.txt' -type f -delete
|
||||
pip-compile -q --no-emit-index-url --resolver backtracking -o requirements/linting.txt requirements/linting.in
|
||||
pip-compile -q --no-emit-index-url --resolver backtracking -o requirements/testing.txt requirements/testing.in
|
||||
pip-compile -q --no-emit-index-url --resolver backtracking --extra all -o requirements/pyproject.txt pyproject.toml
|
||||
pip install --dry-run -r requirements/all.txt
|
||||
|
||||
.PHONY: format
|
||||
format:
|
||||
ruff --fix $(sources)
|
||||
ruff format $(sources)
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
ruff $(sources)
|
||||
ruff format --check $(sources)
|
||||
|
||||
.PHONY: mypy
|
||||
mypy:
|
||||
mypy pydantic_extra_types
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
coverage run -m pytest --durations=10
|
||||
|
||||
.PHONY: testcov
|
||||
testcov: test
|
||||
@echo "building coverage html"
|
||||
@coverage html
|
||||
|
||||
.PHONY: testcov-compile
|
||||
testcov-compile: build-trace test
|
||||
@echo "building coverage html"
|
||||
@coverage html
|
||||
|
||||
.PHONY: all
|
||||
all: lint mypy testcov
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf `find . -name __pycache__`
|
||||
rm -f `find . -type f -name '*.py[co]'`
|
||||
rm -f `find . -type f -name '*~'`
|
||||
rm -f `find . -type f -name '.*~'`
|
||||
rm -rf .cache
|
||||
rm -rf .pytest_cache
|
||||
rm -rf .mypy_cache
|
||||
rm -rf htmlcov
|
||||
rm -rf *.egg-info
|
||||
rm -f .coverage
|
||||
rm -f .coverage.*
|
||||
rm -rf build
|
||||
rm -rf dist
|
||||
rm -rf coverage.xml
|
||||
rm -rf .ruff_cache
|
||||
|
||||
.PHONY: pre-commit
|
||||
pre-commit:
|
||||
pre-commit run --all-files --show-diff-on-failure
|
10
README.md
Normal file
10
README.md
Normal file
|
@ -0,0 +1,10 @@
|
|||
# Pydantic Extra Types
|
||||
|
||||
[![CI](https://github.com/pydantic/pydantic-extra-types/workflows/CI/badge.svg?event=push)](https://github.com/pydantic/pydantic-extra-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
|
||||
[![Coverage](https://codecov.io/gh/pydantic/pydantic-extra-types/branch/main/graph/badge.svg)](https://codecov.io/gh/pydantic/pydantic-extra-types)
|
||||
[![pypi](https://img.shields.io/pypi/v/pydantic-extra-types.svg)](https://pypi.python.org/pypi/pydantic-extra-types)
|
||||
[![license](https://img.shields.io/github/license/pydantic/pydantic-extra-types.svg)](https://github.com/pydantic/pydantic-extra-types/blob/main/LICENSE)
|
||||
|
||||
A place for pydantic types that probably shouldn't exist in the main pydantic lib.
|
||||
|
||||
See [pydantic/pydantic#5012](https://github.com/pydantic/pydantic/issues/5012) for more info.
|
1
pydantic_extra_types/__init__.py
Normal file
1
pydantic_extra_types/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__version__ = '2.6.0'
|
636
pydantic_extra_types/color.py
Normal file
636
pydantic_extra_types/color.py
Normal file
|
@ -0,0 +1,636 @@
|
|||
"""
|
||||
Color definitions are used as per the CSS3
|
||||
[CSS Color Module Level 3](http://www.w3.org/TR/css3-color/#svg-color) specification.
|
||||
|
||||
A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`.
|
||||
|
||||
In these cases the _last_ color when sorted alphabetically takes preferences,
|
||||
eg. `Color((0, 255, 255)).as_named() == 'cyan'` because "cyan" comes after "aqua".
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
import re
|
||||
from colorsys import hls_to_rgb, rgb_to_hls
|
||||
from typing import Any, Callable, Literal, Tuple, Union, cast
|
||||
|
||||
from pydantic import GetJsonSchemaHandler
|
||||
from pydantic._internal import _repr
|
||||
from pydantic.json_schema import JsonSchemaValue
|
||||
from pydantic_core import CoreSchema, PydanticCustomError, core_schema
|
||||
|
||||
ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]]
|
||||
ColorType = Union[ColorTuple, str, 'Color']
|
||||
HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]]
|
||||
|
||||
|
||||
class RGBA:
|
||||
"""
|
||||
Internal use only as a representation of a color.
|
||||
"""
|
||||
|
||||
__slots__ = 'r', 'g', 'b', 'alpha', '_tuple'
|
||||
|
||||
def __init__(self, r: float, g: float, b: float, alpha: float | None):
|
||||
self.r = r
|
||||
self.g = g
|
||||
self.b = b
|
||||
self.alpha = alpha
|
||||
|
||||
self._tuple: tuple[float, float, float, float | None] = (r, g, b, alpha)
|
||||
|
||||
def __getitem__(self, item: Any) -> Any:
|
||||
return self._tuple[item]
|
||||
|
||||
|
||||
# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached
|
||||
_r_255 = r'(\d{1,3}(?:\.\d+)?)'
|
||||
_r_comma = r'\s*,\s*'
|
||||
_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)'
|
||||
_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?'
|
||||
_r_sl = r'(\d{1,3}(?:\.\d+)?)%'
|
||||
r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*'
|
||||
r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*'
|
||||
# CSS3 RGB examples: rgb(0, 0, 0), rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 50%)
|
||||
r_rgb = rf'\s*rgba?\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}(?:{_r_comma}{_r_alpha})?\s*\)\s*'
|
||||
# CSS3 HSL examples: hsl(270, 60%, 50%), hsla(270, 60%, 50%, 0.5), hsla(270, 60%, 50%, 50%)
|
||||
r_hsl = rf'\s*hsla?\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}(?:{_r_comma}{_r_alpha})?\s*\)\s*'
|
||||
# CSS4 RGB examples: rgb(0 0 0), rgb(0 0 0 / 0.5), rgb(0 0 0 / 50%), rgba(0 0 0 / 50%)
|
||||
r_rgb_v4_style = rf'\s*rgba?\(\s*{_r_255}\s+{_r_255}\s+{_r_255}(?:\s*/\s*{_r_alpha})?\s*\)\s*'
|
||||
# CSS4 HSL examples: hsl(270 60% 50%), hsl(270 60% 50% / 0.5), hsl(270 60% 50% / 50%), hsla(270 60% 50% / 50%)
|
||||
r_hsl_v4_style = rf'\s*hsla?\(\s*{_r_h}\s+{_r_sl}\s+{_r_sl}(?:\s*/\s*{_r_alpha})?\s*\)\s*'
|
||||
|
||||
# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used
|
||||
repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'}
|
||||
rads = 2 * math.pi
|
||||
|
||||
|
||||
class Color(_repr.Representation):
|
||||
"""
|
||||
Represents a color.
|
||||
"""
|
||||
|
||||
__slots__ = '_original', '_rgba'
|
||||
|
||||
def __init__(self, value: ColorType) -> None:
|
||||
self._rgba: RGBA
|
||||
self._original: ColorType
|
||||
if isinstance(value, (tuple, list)):
|
||||
self._rgba = parse_tuple(value)
|
||||
elif isinstance(value, str):
|
||||
self._rgba = parse_str(value)
|
||||
elif isinstance(value, Color):
|
||||
self._rgba = value._rgba
|
||||
value = value._original
|
||||
else:
|
||||
raise PydanticCustomError(
|
||||
'color_error',
|
||||
'value is not a valid color: value must be a tuple, list or string',
|
||||
)
|
||||
|
||||
# if we've got here value must be a valid color
|
||||
self._original = value
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> JsonSchemaValue:
|
||||
field_schema: dict[str, Any] = {}
|
||||
field_schema.update(type='string', format='color')
|
||||
return field_schema
|
||||
|
||||
def original(self) -> ColorType:
|
||||
"""
|
||||
Original value passed to `Color`.
|
||||
"""
|
||||
return self._original
|
||||
|
||||
def as_named(self, *, fallback: bool = False) -> str:
|
||||
"""
|
||||
Returns the name of the color if it can be found in `COLORS_BY_VALUE` dictionary,
|
||||
otherwise returns the hexadecimal representation of the color or raises `ValueError`.
|
||||
|
||||
Args:
|
||||
fallback: If True, falls back to returning the hexadecimal representation of
|
||||
the color instead of raising a ValueError when no named color is found.
|
||||
|
||||
Returns:
|
||||
The name of the color, or the hexadecimal representation of the color.
|
||||
|
||||
Raises:
|
||||
ValueError: When no named color is found and fallback is `False`.
|
||||
"""
|
||||
if self._rgba.alpha is None:
|
||||
rgb = cast(Tuple[int, int, int], self.as_rgb_tuple())
|
||||
try:
|
||||
return COLORS_BY_VALUE[rgb]
|
||||
except KeyError as e:
|
||||
if fallback:
|
||||
return self.as_hex()
|
||||
else:
|
||||
raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e
|
||||
else:
|
||||
return self.as_hex()
|
||||
|
||||
def as_hex(self, format: Literal['short', 'long'] = 'short') -> str:
|
||||
"""Returns the hexadecimal representation of the color.
|
||||
|
||||
Hex string representing the color can be 3, 4, 6, or 8 characters depending on whether the string
|
||||
a "short" representation of the color is possible and whether there's an alpha channel.
|
||||
|
||||
Returns:
|
||||
The hexadecimal representation of the color.
|
||||
"""
|
||||
values = [float_to_255(c) for c in self._rgba[:3]]
|
||||
if self._rgba.alpha is not None:
|
||||
values.append(float_to_255(self._rgba.alpha))
|
||||
|
||||
as_hex = ''.join(f'{v:02x}' for v in values)
|
||||
if format == 'short' and all(c in repeat_colors for c in values):
|
||||
as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2))
|
||||
return '#' + as_hex
|
||||
|
||||
def as_rgb(self) -> str:
|
||||
"""
|
||||
Color as an `rgb(<r>, <g>, <b>)` or `rgba(<r>, <g>, <b>, <a>)` string.
|
||||
"""
|
||||
if self._rgba.alpha is None:
|
||||
return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})'
|
||||
else:
|
||||
return (
|
||||
f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, '
|
||||
f'{round(self._alpha_float(), 2)})'
|
||||
)
|
||||
|
||||
def as_rgb_tuple(self, *, alpha: bool | None = None) -> ColorTuple:
|
||||
"""
|
||||
Returns the color as an RGB or RGBA tuple.
|
||||
|
||||
Args:
|
||||
alpha: Whether to include the alpha channel. There are three options for this input:
|
||||
|
||||
- `None` (default): Include alpha only if it's set. (e.g. not `None`)
|
||||
- `True`: Always include alpha.
|
||||
- `False`: Always omit alpha.
|
||||
|
||||
Returns:
|
||||
A tuple that contains the values of the red, green, and blue channels in the range 0 to 255.
|
||||
If alpha is included, it is in the range 0 to 1.
|
||||
"""
|
||||
r, g, b = (float_to_255(c) for c in self._rgba[:3])
|
||||
if alpha is None:
|
||||
if self._rgba.alpha is None:
|
||||
return r, g, b
|
||||
else:
|
||||
return r, g, b, self._alpha_float()
|
||||
elif alpha:
|
||||
return r, g, b, self._alpha_float()
|
||||
else:
|
||||
# alpha is False
|
||||
return r, g, b
|
||||
|
||||
def as_hsl(self) -> str:
|
||||
"""
|
||||
Color as an `hsl(<h>, <s>, <l>)` or `hsl(<h>, <s>, <l>, <a>)` string.
|
||||
"""
|
||||
if self._rgba.alpha is None:
|
||||
h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore
|
||||
return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})'
|
||||
else:
|
||||
h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore
|
||||
return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})'
|
||||
|
||||
def as_hsl_tuple(self, *, alpha: bool | None = None) -> HslColorTuple:
|
||||
"""
|
||||
Returns the color as an HSL or HSLA tuple.
|
||||
|
||||
Args:
|
||||
alpha: Whether to include the alpha channel.
|
||||
|
||||
- `None` (default): Include the alpha channel only if it's set (e.g. not `None`).
|
||||
- `True`: Always include alpha.
|
||||
- `False`: Always omit alpha.
|
||||
|
||||
Returns:
|
||||
The color as a tuple of hue, saturation, lightness, and alpha (if included).
|
||||
All elements are in the range 0 to 1.
|
||||
|
||||
Note:
|
||||
This is HSL as used in HTML and most other places, not HLS as used in Python's `colorsys`.
|
||||
"""
|
||||
h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b)
|
||||
if alpha is None:
|
||||
if self._rgba.alpha is None:
|
||||
return h, s, l
|
||||
else:
|
||||
return h, s, l, self._alpha_float()
|
||||
if alpha:
|
||||
return h, s, l, self._alpha_float()
|
||||
else:
|
||||
# alpha is False
|
||||
return h, s, l
|
||||
|
||||
def _alpha_float(self) -> float:
|
||||
return 1 if self._rgba.alpha is None else self._rgba.alpha
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source: type[Any], handler: Callable[[Any], CoreSchema]
|
||||
) -> core_schema.CoreSchema:
|
||||
return core_schema.with_info_plain_validator_function(
|
||||
cls._validate, serialization=core_schema.to_string_ser_schema()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: Any, _: Any) -> Color:
|
||||
return cls(__input_value)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.as_named(fallback=True)
|
||||
|
||||
def __repr_args__(self) -> _repr.ReprArgs:
|
||||
return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())]
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple()
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.as_rgb_tuple())
|
||||
|
||||
|
||||
def parse_tuple(value: tuple[Any, ...]) -> RGBA:
|
||||
"""Parse a tuple or list to get RGBA values.
|
||||
|
||||
Args:
|
||||
value: A tuple or list.
|
||||
|
||||
Returns:
|
||||
An `RGBA` tuple parsed from the input tuple.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If tuple is not valid.
|
||||
"""
|
||||
if len(value) == 3:
|
||||
r, g, b = (parse_color_value(v) for v in value)
|
||||
return RGBA(r, g, b, None)
|
||||
elif len(value) == 4:
|
||||
r, g, b = (parse_color_value(v) for v in value[:3])
|
||||
return RGBA(r, g, b, parse_float_alpha(value[3]))
|
||||
else:
|
||||
raise PydanticCustomError('color_error', 'value is not a valid color: tuples must have length 3 or 4')
|
||||
|
||||
|
||||
def parse_str(value: str) -> RGBA:
|
||||
"""
|
||||
Parse a string representing a color to an RGBA tuple.
|
||||
|
||||
Possible formats for the input string include:
|
||||
|
||||
* named color, see `COLORS_BY_NAME`
|
||||
* hex short eg. `<prefix>fff` (prefix can be `#`, `0x` or nothing)
|
||||
* hex long eg. `<prefix>ffffff` (prefix can be `#`, `0x` or nothing)
|
||||
* `rgb(<r>, <g>, <b>)`
|
||||
* `rgba(<r>, <g>, <b>, <a>)`
|
||||
* `transparent`
|
||||
|
||||
Args:
|
||||
value: A string representing a color.
|
||||
|
||||
Returns:
|
||||
An `RGBA` tuple parsed from the input string.
|
||||
|
||||
Raises:
|
||||
ValueError: If the input string cannot be parsed to an RGBA tuple.
|
||||
"""
|
||||
value_lower = value.lower()
|
||||
try:
|
||||
r, g, b = COLORS_BY_NAME[value_lower]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
return ints_to_rgba(r, g, b, None)
|
||||
|
||||
m = re.fullmatch(r_hex_short, value_lower)
|
||||
if m:
|
||||
*rgb, a = m.groups()
|
||||
r, g, b = (int(v * 2, 16) for v in rgb)
|
||||
if a:
|
||||
alpha: float | None = int(a * 2, 16) / 255
|
||||
else:
|
||||
alpha = None
|
||||
return ints_to_rgba(r, g, b, alpha)
|
||||
|
||||
m = re.fullmatch(r_hex_long, value_lower)
|
||||
if m:
|
||||
*rgb, a = m.groups()
|
||||
r, g, b = (int(v, 16) for v in rgb)
|
||||
if a:
|
||||
alpha = int(a, 16) / 255
|
||||
else:
|
||||
alpha = None
|
||||
return ints_to_rgba(r, g, b, alpha)
|
||||
|
||||
m = re.fullmatch(r_rgb, value_lower) or re.fullmatch(r_rgb_v4_style, value_lower)
|
||||
if m:
|
||||
return ints_to_rgba(*m.groups()) # type: ignore
|
||||
|
||||
m = re.fullmatch(r_hsl, value_lower) or re.fullmatch(r_hsl_v4_style, value_lower)
|
||||
if m:
|
||||
return parse_hsl(*m.groups()) # type: ignore
|
||||
|
||||
if value_lower == 'transparent':
|
||||
return RGBA(0, 0, 0, 0)
|
||||
|
||||
raise PydanticCustomError(
|
||||
'color_error',
|
||||
'value is not a valid color: string not recognised as a valid color',
|
||||
)
|
||||
|
||||
|
||||
def ints_to_rgba(
|
||||
r: int | str,
|
||||
g: int | str,
|
||||
b: int | str,
|
||||
alpha: float | None = None,
|
||||
) -> RGBA:
|
||||
"""
|
||||
Converts integer or string values for RGB color and an optional alpha value to an `RGBA` object.
|
||||
|
||||
Args:
|
||||
r: An integer or string representing the red color value.
|
||||
g: An integer or string representing the green color value.
|
||||
b: An integer or string representing the blue color value.
|
||||
alpha: A float representing the alpha value. Defaults to None.
|
||||
|
||||
Returns:
|
||||
An instance of the `RGBA` class with the corresponding color and alpha values.
|
||||
"""
|
||||
return RGBA(
|
||||
parse_color_value(r),
|
||||
parse_color_value(g),
|
||||
parse_color_value(b),
|
||||
parse_float_alpha(alpha),
|
||||
)
|
||||
|
||||
|
||||
def parse_color_value(value: int | str, max_val: int = 255) -> float:
|
||||
"""
|
||||
Parse the color value provided and return a number between 0 and 1.
|
||||
|
||||
Args:
|
||||
value: An integer or string color value.
|
||||
max_val: Maximum range value. Defaults to 255.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the value is not a valid color.
|
||||
|
||||
Returns:
|
||||
A number between 0 and 1.
|
||||
"""
|
||||
try:
|
||||
color = float(value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError(
|
||||
'color_error',
|
||||
'value is not a valid color: color values must be a valid number',
|
||||
)
|
||||
if 0 <= color <= max_val:
|
||||
return color / max_val
|
||||
else:
|
||||
raise PydanticCustomError(
|
||||
'color_error',
|
||||
'value is not a valid color: color values must be in the range 0 to {max_val}',
|
||||
{'max_val': max_val},
|
||||
)
|
||||
|
||||
|
||||
def parse_float_alpha(value: None | str | float | int) -> float | None:
|
||||
"""
|
||||
Parse an alpha value checking it's a valid float in the range 0 to 1.
|
||||
|
||||
Args:
|
||||
value: The input value to parse.
|
||||
|
||||
Returns:
|
||||
The parsed value as a float, or `None` if the value was None or equal 1.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the input value cannot be successfully parsed as a float in the expected range.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
if isinstance(value, str) and value.endswith('%'):
|
||||
alpha = float(value[:-1]) / 100
|
||||
else:
|
||||
alpha = float(value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError(
|
||||
'color_error',
|
||||
'value is not a valid color: alpha values must be a valid float',
|
||||
)
|
||||
|
||||
if math.isclose(alpha, 1):
|
||||
return None
|
||||
elif 0 <= alpha <= 1:
|
||||
return alpha
|
||||
else:
|
||||
raise PydanticCustomError(
|
||||
'color_error',
|
||||
'value is not a valid color: alpha values must be in the range 0 to 1',
|
||||
)
|
||||
|
||||
|
||||
def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: float | None = None) -> RGBA:
|
||||
"""
|
||||
Parse raw hue, saturation, lightness, and alpha values and convert to RGBA.
|
||||
|
||||
Args:
|
||||
h: The hue value.
|
||||
h_units: The unit for hue value.
|
||||
sat: The saturation value.
|
||||
light: The lightness value.
|
||||
alpha: Alpha value.
|
||||
|
||||
Returns:
|
||||
An instance of `RGBA`.
|
||||
"""
|
||||
s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100)
|
||||
|
||||
h_value = float(h)
|
||||
if h_units in {None, 'deg'}:
|
||||
h_value = h_value % 360 / 360
|
||||
elif h_units == 'rad':
|
||||
h_value = h_value % rads / rads
|
||||
else:
|
||||
# turns
|
||||
h_value = h_value % 1
|
||||
|
||||
r, g, b = hls_to_rgb(h_value, l_value, s_value)
|
||||
return RGBA(r, g, b, parse_float_alpha(alpha))
|
||||
|
||||
|
||||
def float_to_255(c: float) -> int:
|
||||
"""
|
||||
Converts a float value between 0 and 1 (inclusive) to an integer between 0 and 255 (inclusive).
|
||||
|
||||
Args:
|
||||
c: The float value to be converted. Must be between 0 and 1 (inclusive).
|
||||
|
||||
Returns:
|
||||
The integer equivalent of the given float value rounded to the nearest whole number.
|
||||
"""
|
||||
return round(c * 255)
|
||||
|
||||
|
||||
COLORS_BY_NAME = {
|
||||
'aliceblue': (240, 248, 255),
|
||||
'antiquewhite': (250, 235, 215),
|
||||
'aqua': (0, 255, 255),
|
||||
'aquamarine': (127, 255, 212),
|
||||
'azure': (240, 255, 255),
|
||||
'beige': (245, 245, 220),
|
||||
'bisque': (255, 228, 196),
|
||||
'black': (0, 0, 0),
|
||||
'blanchedalmond': (255, 235, 205),
|
||||
'blue': (0, 0, 255),
|
||||
'blueviolet': (138, 43, 226),
|
||||
'brown': (165, 42, 42),
|
||||
'burlywood': (222, 184, 135),
|
||||
'cadetblue': (95, 158, 160),
|
||||
'chartreuse': (127, 255, 0),
|
||||
'chocolate': (210, 105, 30),
|
||||
'coral': (255, 127, 80),
|
||||
'cornflowerblue': (100, 149, 237),
|
||||
'cornsilk': (255, 248, 220),
|
||||
'crimson': (220, 20, 60),
|
||||
'cyan': (0, 255, 255),
|
||||
'darkblue': (0, 0, 139),
|
||||
'darkcyan': (0, 139, 139),
|
||||
'darkgoldenrod': (184, 134, 11),
|
||||
'darkgray': (169, 169, 169),
|
||||
'darkgreen': (0, 100, 0),
|
||||
'darkgrey': (169, 169, 169),
|
||||
'darkkhaki': (189, 183, 107),
|
||||
'darkmagenta': (139, 0, 139),
|
||||
'darkolivegreen': (85, 107, 47),
|
||||
'darkorange': (255, 140, 0),
|
||||
'darkorchid': (153, 50, 204),
|
||||
'darkred': (139, 0, 0),
|
||||
'darksalmon': (233, 150, 122),
|
||||
'darkseagreen': (143, 188, 143),
|
||||
'darkslateblue': (72, 61, 139),
|
||||
'darkslategray': (47, 79, 79),
|
||||
'darkslategrey': (47, 79, 79),
|
||||
'darkturquoise': (0, 206, 209),
|
||||
'darkviolet': (148, 0, 211),
|
||||
'deeppink': (255, 20, 147),
|
||||
'deepskyblue': (0, 191, 255),
|
||||
'dimgray': (105, 105, 105),
|
||||
'dimgrey': (105, 105, 105),
|
||||
'dodgerblue': (30, 144, 255),
|
||||
'firebrick': (178, 34, 34),
|
||||
'floralwhite': (255, 250, 240),
|
||||
'forestgreen': (34, 139, 34),
|
||||
'fuchsia': (255, 0, 255),
|
||||
'gainsboro': (220, 220, 220),
|
||||
'ghostwhite': (248, 248, 255),
|
||||
'gold': (255, 215, 0),
|
||||
'goldenrod': (218, 165, 32),
|
||||
'gray': (128, 128, 128),
|
||||
'green': (0, 128, 0),
|
||||
'greenyellow': (173, 255, 47),
|
||||
'grey': (128, 128, 128),
|
||||
'honeydew': (240, 255, 240),
|
||||
'hotpink': (255, 105, 180),
|
||||
'indianred': (205, 92, 92),
|
||||
'indigo': (75, 0, 130),
|
||||
'ivory': (255, 255, 240),
|
||||
'khaki': (240, 230, 140),
|
||||
'lavender': (230, 230, 250),
|
||||
'lavenderblush': (255, 240, 245),
|
||||
'lawngreen': (124, 252, 0),
|
||||
'lemonchiffon': (255, 250, 205),
|
||||
'lightblue': (173, 216, 230),
|
||||
'lightcoral': (240, 128, 128),
|
||||
'lightcyan': (224, 255, 255),
|
||||
'lightgoldenrodyellow': (250, 250, 210),
|
||||
'lightgray': (211, 211, 211),
|
||||
'lightgreen': (144, 238, 144),
|
||||
'lightgrey': (211, 211, 211),
|
||||
'lightpink': (255, 182, 193),
|
||||
'lightsalmon': (255, 160, 122),
|
||||
'lightseagreen': (32, 178, 170),
|
||||
'lightskyblue': (135, 206, 250),
|
||||
'lightslategray': (119, 136, 153),
|
||||
'lightslategrey': (119, 136, 153),
|
||||
'lightsteelblue': (176, 196, 222),
|
||||
'lightyellow': (255, 255, 224),
|
||||
'lime': (0, 255, 0),
|
||||
'limegreen': (50, 205, 50),
|
||||
'linen': (250, 240, 230),
|
||||
'magenta': (255, 0, 255),
|
||||
'maroon': (128, 0, 0),
|
||||
'mediumaquamarine': (102, 205, 170),
|
||||
'mediumblue': (0, 0, 205),
|
||||
'mediumorchid': (186, 85, 211),
|
||||
'mediumpurple': (147, 112, 219),
|
||||
'mediumseagreen': (60, 179, 113),
|
||||
'mediumslateblue': (123, 104, 238),
|
||||
'mediumspringgreen': (0, 250, 154),
|
||||
'mediumturquoise': (72, 209, 204),
|
||||
'mediumvioletred': (199, 21, 133),
|
||||
'midnightblue': (25, 25, 112),
|
||||
'mintcream': (245, 255, 250),
|
||||
'mistyrose': (255, 228, 225),
|
||||
'moccasin': (255, 228, 181),
|
||||
'navajowhite': (255, 222, 173),
|
||||
'navy': (0, 0, 128),
|
||||
'oldlace': (253, 245, 230),
|
||||
'olive': (128, 128, 0),
|
||||
'olivedrab': (107, 142, 35),
|
||||
'orange': (255, 165, 0),
|
||||
'orangered': (255, 69, 0),
|
||||
'orchid': (218, 112, 214),
|
||||
'palegoldenrod': (238, 232, 170),
|
||||
'palegreen': (152, 251, 152),
|
||||
'paleturquoise': (175, 238, 238),
|
||||
'palevioletred': (219, 112, 147),
|
||||
'papayawhip': (255, 239, 213),
|
||||
'peachpuff': (255, 218, 185),
|
||||
'peru': (205, 133, 63),
|
||||
'pink': (255, 192, 203),
|
||||
'plum': (221, 160, 221),
|
||||
'powderblue': (176, 224, 230),
|
||||
'purple': (128, 0, 128),
|
||||
'red': (255, 0, 0),
|
||||
'rosybrown': (188, 143, 143),
|
||||
'royalblue': (65, 105, 225),
|
||||
'saddlebrown': (139, 69, 19),
|
||||
'salmon': (250, 128, 114),
|
||||
'sandybrown': (244, 164, 96),
|
||||
'seagreen': (46, 139, 87),
|
||||
'seashell': (255, 245, 238),
|
||||
'sienna': (160, 82, 45),
|
||||
'silver': (192, 192, 192),
|
||||
'skyblue': (135, 206, 235),
|
||||
'slateblue': (106, 90, 205),
|
||||
'slategray': (112, 128, 144),
|
||||
'slategrey': (112, 128, 144),
|
||||
'snow': (255, 250, 250),
|
||||
'springgreen': (0, 255, 127),
|
||||
'steelblue': (70, 130, 180),
|
||||
'tan': (210, 180, 140),
|
||||
'teal': (0, 128, 128),
|
||||
'thistle': (216, 191, 216),
|
||||
'tomato': (255, 99, 71),
|
||||
'turquoise': (64, 224, 208),
|
||||
'violet': (238, 130, 238),
|
||||
'wheat': (245, 222, 179),
|
||||
'white': (255, 255, 255),
|
||||
'whitesmoke': (245, 245, 245),
|
||||
'yellow': (255, 255, 0),
|
||||
'yellowgreen': (154, 205, 50),
|
||||
}
|
||||
|
||||
COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()}
|
145
pydantic_extra_types/coordinate.py
Normal file
145
pydantic_extra_types/coordinate.py
Normal file
|
@ -0,0 +1,145 @@
|
|||
"""
|
||||
The `pydantic_extra_types.coordinate` module provides the [`Latitude`][pydantic_extra_types.coordinate.Latitude],
|
||||
[`Longitude`][pydantic_extra_types.coordinate.Longitude], and
|
||||
[`Coordinate`][pydantic_extra_types.coordinate.Coordinate] data types.
|
||||
"""
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, ClassVar, Tuple, Type
|
||||
|
||||
from pydantic import GetCoreSchemaHandler
|
||||
from pydantic._internal import _repr
|
||||
from pydantic_core import ArgsKwargs, PydanticCustomError, core_schema
|
||||
|
||||
|
||||
class Latitude(float):
|
||||
"""Latitude value should be between -90 and 90, inclusive.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
from pydantic_extra_types.coordinate import Latitude
|
||||
|
||||
class Location(BaseModel):
|
||||
latitude: Latitude
|
||||
|
||||
location = Location(latitude=41.40338)
|
||||
print(location)
|
||||
#> latitude=41.40338
|
||||
```
|
||||
"""
|
||||
|
||||
min: ClassVar[float] = -90.00
|
||||
max: ClassVar[float] = 90.00
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: Type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
return core_schema.float_schema(ge=cls.min, le=cls.max)
|
||||
|
||||
|
||||
class Longitude(float):
|
||||
"""Longitude value should be between -180 and 180, inclusive.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.coordinate import Longitude
|
||||
|
||||
class Location(BaseModel):
|
||||
longitude: Longitude
|
||||
|
||||
location = Location(longitude=2.17403)
|
||||
print(location)
|
||||
#> longitude=2.17403
|
||||
```
|
||||
"""
|
||||
|
||||
min: ClassVar[float] = -180.00
|
||||
max: ClassVar[float] = 180.00
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: Type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
return core_schema.float_schema(ge=cls.min, le=cls.max)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Coordinate(_repr.Representation):
|
||||
"""Coordinate parses Latitude and Longitude.
|
||||
|
||||
You can use the `Coordinate` data type for storing coordinates. Coordinates can be
|
||||
defined using one of the following formats:
|
||||
|
||||
1. Tuple: `(Latitude, Longitude)`. For example: `(41.40338, 2.17403)`.
|
||||
2. `Coordinate` instance: `Coordinate(latitude=Latitude, longitude=Longitude)`.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.coordinate import Coordinate
|
||||
|
||||
class Location(BaseModel):
|
||||
coordinate: Coordinate
|
||||
|
||||
location = Location(coordinate=(41.40338, 2.17403))
|
||||
#> coordinate=Coordinate(latitude=41.40338, longitude=2.17403)
|
||||
```
|
||||
"""
|
||||
|
||||
_NULL_ISLAND: ClassVar[Tuple[float, float]] = (0.0, 0.0)
|
||||
|
||||
latitude: Latitude
|
||||
longitude: Longitude
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: Type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
schema_chain = [
|
||||
core_schema.no_info_wrap_validator_function(cls._parse_str, core_schema.str_schema()),
|
||||
core_schema.no_info_wrap_validator_function(
|
||||
cls._parse_tuple,
|
||||
handler.generate_schema(Tuple[float, float]),
|
||||
),
|
||||
handler(source),
|
||||
]
|
||||
|
||||
chain_length = len(schema_chain)
|
||||
chain_schemas = [core_schema.chain_schema(schema_chain[x:]) for x in range(chain_length - 1, -1, -1)]
|
||||
return core_schema.no_info_wrap_validator_function(
|
||||
cls._parse_args,
|
||||
core_schema.union_schema(chain_schemas), # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _parse_args(cls, value: Any, handler: core_schema.ValidatorFunctionWrapHandler) -> Any:
|
||||
if isinstance(value, ArgsKwargs) and not value.kwargs:
|
||||
n_args = len(value.args)
|
||||
if n_args == 0:
|
||||
value = cls._NULL_ISLAND
|
||||
elif n_args == 1:
|
||||
value = value.args[0]
|
||||
return handler(value)
|
||||
|
||||
@classmethod
|
||||
def _parse_str(cls, value: Any, handler: core_schema.ValidatorFunctionWrapHandler) -> Any:
|
||||
if not isinstance(value, str):
|
||||
return value
|
||||
try:
|
||||
value = tuple(float(x) for x in value.split(','))
|
||||
except ValueError:
|
||||
raise PydanticCustomError(
|
||||
'coordinate_error',
|
||||
'value is not a valid coordinate: string is not recognized as a valid coordinate',
|
||||
)
|
||||
return ArgsKwargs(args=value)
|
||||
|
||||
@classmethod
|
||||
def _parse_tuple(cls, value: Any, handler: core_schema.ValidatorFunctionWrapHandler) -> Any:
|
||||
if not isinstance(value, tuple):
|
||||
return value
|
||||
return ArgsKwargs(args=handler(value))
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'{self.latitude},{self.longitude}'
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return isinstance(other, Coordinate) and self.latitude == other.latitude and self.longitude == other.longitude
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.latitude, self.longitude))
|
281
pydantic_extra_types/country.py
Normal file
281
pydantic_extra_types/country.py
Normal file
|
@ -0,0 +1,281 @@
|
|||
"""
|
||||
Country definitions that are based on the [ISO 3166](https://en.wikipedia.org/wiki/List_of_ISO_3166_country_codes).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
from typing import Any
|
||||
|
||||
from pydantic import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
try:
|
||||
import pycountry
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
raise RuntimeError(
|
||||
'The `country` module requires "pycountry" to be installed. You can install it with "pip install pycountry".'
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CountryInfo:
|
||||
alpha2: str
|
||||
alpha3: str
|
||||
numeric_code: str
|
||||
short_name: str
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _countries() -> list[CountryInfo]:
|
||||
return [
|
||||
CountryInfo(
|
||||
alpha2=country.alpha_2,
|
||||
alpha3=country.alpha_3,
|
||||
numeric_code=country.numeric,
|
||||
short_name=country.name,
|
||||
)
|
||||
for country in pycountry.countries
|
||||
]
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _index_by_alpha2() -> dict[str, CountryInfo]:
|
||||
return {country.alpha2: country for country in _countries()}
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _index_by_alpha3() -> dict[str, CountryInfo]:
|
||||
return {country.alpha3: country for country in _countries()}
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _index_by_numeric_code() -> dict[str, CountryInfo]:
|
||||
return {country.numeric_code: country for country in _countries()}
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _index_by_short_name() -> dict[str, CountryInfo]:
|
||||
return {country.short_name: country for country in _countries()}
|
||||
|
||||
|
||||
class CountryAlpha2(str):
|
||||
"""CountryAlpha2 parses country codes in the [ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2)
|
||||
format.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.country import CountryAlpha2
|
||||
|
||||
class Product(BaseModel):
|
||||
made_in: CountryAlpha2
|
||||
|
||||
product = Product(made_in='ES')
|
||||
print(product)
|
||||
#> made_in='ES'
|
||||
```
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> CountryAlpha2:
|
||||
if __input_value not in _index_by_alpha2():
|
||||
raise PydanticCustomError('country_alpha2', 'Invalid country alpha2 code')
|
||||
return cls(__input_value)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source: type[Any], handler: GetCoreSchemaHandler
|
||||
) -> core_schema.AfterValidatorFunctionSchema:
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(to_upper=True),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> dict[str, Any]:
|
||||
json_schema = handler(schema)
|
||||
json_schema.update({'pattern': r'^\w{2}$'})
|
||||
return json_schema
|
||||
|
||||
@property
|
||||
def alpha3(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 alpha-3](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3) format."""
|
||||
return _index_by_alpha2()[self].alpha3
|
||||
|
||||
@property
|
||||
def numeric_code(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 numeric](https://en.wikipedia.org/wiki/ISO_3166-1_numeric) format."""
|
||||
return _index_by_alpha2()[self].numeric_code
|
||||
|
||||
@property
|
||||
def short_name(self) -> str:
|
||||
"""The country short name."""
|
||||
return _index_by_alpha2()[self].short_name
|
||||
|
||||
|
||||
class CountryAlpha3(str):
|
||||
"""CountryAlpha3 parses country codes in the [ISO 3166-1 alpha-3](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3)
|
||||
format.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.country import CountryAlpha3
|
||||
|
||||
class Product(BaseModel):
|
||||
made_in: CountryAlpha3
|
||||
|
||||
product = Product(made_in="USA")
|
||||
print(product)
|
||||
#> made_in='USA'
|
||||
```
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> CountryAlpha3:
|
||||
if __input_value not in _index_by_alpha3():
|
||||
raise PydanticCustomError('country_alpha3', 'Invalid country alpha3 code')
|
||||
return cls(__input_value)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source: type[Any], handler: GetCoreSchemaHandler
|
||||
) -> core_schema.AfterValidatorFunctionSchema:
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(to_upper=True),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> dict[str, Any]:
|
||||
json_schema = handler(schema)
|
||||
json_schema.update({'pattern': r'^\w{3}$'})
|
||||
return json_schema
|
||||
|
||||
@property
|
||||
def alpha2(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) format."""
|
||||
return _index_by_alpha3()[self].alpha2
|
||||
|
||||
@property
|
||||
def numeric_code(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 numeric](https://en.wikipedia.org/wiki/ISO_3166-1_numeric) format."""
|
||||
return _index_by_alpha3()[self].numeric_code
|
||||
|
||||
@property
|
||||
def short_name(self) -> str:
|
||||
"""The country short name."""
|
||||
return _index_by_alpha3()[self].short_name
|
||||
|
||||
|
||||
class CountryNumericCode(str):
|
||||
"""CountryNumericCode parses country codes in the
|
||||
[ISO 3166-1 numeric](https://en.wikipedia.org/wiki/ISO_3166-1_numeric) format.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.country import CountryNumericCode
|
||||
|
||||
class Product(BaseModel):
|
||||
made_in: CountryNumericCode
|
||||
|
||||
product = Product(made_in="840")
|
||||
print(product)
|
||||
#> made_in='840'
|
||||
```
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> CountryNumericCode:
|
||||
if __input_value not in _index_by_numeric_code():
|
||||
raise PydanticCustomError('country_numeric_code', 'Invalid country numeric code')
|
||||
return cls(__input_value)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source: type[Any], handler: GetCoreSchemaHandler
|
||||
) -> core_schema.AfterValidatorFunctionSchema:
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(to_upper=True),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> dict[str, Any]:
|
||||
json_schema = handler(schema)
|
||||
json_schema.update({'pattern': r'^[0-9]{3}$'})
|
||||
return json_schema
|
||||
|
||||
@property
|
||||
def alpha2(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) format."""
|
||||
return _index_by_numeric_code()[self].alpha2
|
||||
|
||||
@property
|
||||
def alpha3(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 alpha-3](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3) format."""
|
||||
return _index_by_numeric_code()[self].alpha3
|
||||
|
||||
@property
|
||||
def short_name(self) -> str:
|
||||
"""The country short name."""
|
||||
return _index_by_numeric_code()[self].short_name
|
||||
|
||||
|
||||
class CountryShortName(str):
|
||||
"""CountryShortName parses country codes in the short name format.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.country import CountryShortName
|
||||
|
||||
class Product(BaseModel):
|
||||
made_in: CountryShortName
|
||||
|
||||
product = Product(made_in="United States")
|
||||
print(product)
|
||||
#> made_in='United States'
|
||||
```
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> CountryShortName:
|
||||
if __input_value not in _index_by_short_name():
|
||||
raise PydanticCustomError('country_short_name', 'Invalid country short name')
|
||||
return cls(__input_value)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source: type[Any], handler: GetCoreSchemaHandler
|
||||
) -> core_schema.AfterValidatorFunctionSchema:
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(),
|
||||
serialization=core_schema.to_string_ser_schema(),
|
||||
)
|
||||
|
||||
@property
|
||||
def alpha2(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) format."""
|
||||
return _index_by_short_name()[self].alpha2
|
||||
|
||||
@property
|
||||
def alpha3(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 alpha-3](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3) format."""
|
||||
return _index_by_short_name()[self].alpha3
|
||||
|
||||
@property
|
||||
def numeric_code(self) -> str:
|
||||
"""The country code in the [ISO 3166-1 numeric](https://en.wikipedia.org/wiki/ISO_3166-1_numeric) format."""
|
||||
return _index_by_short_name()[self].numeric_code
|
179
pydantic_extra_types/currency_code.py
Normal file
179
pydantic_extra_types/currency_code.py
Normal file
|
@ -0,0 +1,179 @@
|
|||
"""
|
||||
Currency definitions that are based on the [ISO4217](https://en.wikipedia.org/wiki/ISO_4217).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
try:
|
||||
import pycountry
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
raise RuntimeError(
|
||||
'The `currency_code` module requires "pycountry" to be installed. You can install it with "pip install '
|
||||
'pycountry".'
|
||||
)
|
||||
|
||||
# List of codes that should not be usually used within regular transactions
|
||||
_CODES_FOR_BONDS_METAL_TESTING = {
|
||||
'XTS', # testing
|
||||
'XAU', # gold
|
||||
'XAG', # silver
|
||||
'XPD', # palladium
|
||||
'XPT', # platinum
|
||||
'XBA', # Bond Markets Unit European Composite Unit (EURCO)
|
||||
'XBB', # Bond Markets Unit European Monetary Unit (E.M.U.-6)
|
||||
'XBC', # Bond Markets Unit European Unit of Account 9 (E.U.A.-9)
|
||||
'XBD', # Bond Markets Unit European Unit of Account 17 (E.U.A.-17)
|
||||
'XXX', # no currency
|
||||
'XDR', # SDR (Special Drawing Right)
|
||||
}
|
||||
|
||||
|
||||
class ISO4217(str):
|
||||
"""ISO4217 parses Currency in the [ISO 4217](https://en.wikipedia.org/wiki/ISO_4217) format.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.currency_code import ISO4217
|
||||
|
||||
class Currency(BaseModel):
|
||||
alpha_3: ISO4217
|
||||
|
||||
currency = Currency(alpha_3='AED')
|
||||
print(currency)
|
||||
# > alpha_3='AED'
|
||||
```
|
||||
"""
|
||||
|
||||
allowed_countries_list = [country.alpha_3 for country in pycountry.currencies]
|
||||
allowed_currencies = set(allowed_countries_list)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, currency_code: str, _: core_schema.ValidationInfo) -> str:
|
||||
"""
|
||||
Validate a ISO 4217 language code from the provided str value.
|
||||
|
||||
Args:
|
||||
currency_code: The str value to be validated.
|
||||
_: The Pydantic ValidationInfo.
|
||||
|
||||
Returns:
|
||||
The validated ISO 4217 currency code.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the ISO 4217 currency code is not valid.
|
||||
"""
|
||||
if currency_code not in cls.allowed_currencies:
|
||||
raise PydanticCustomError(
|
||||
'ISO4217', 'Invalid ISO 4217 currency code. See https://en.wikipedia.org/wiki/ISO_4217'
|
||||
)
|
||||
return currency_code
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, _: type[Any], __: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(min_length=3, max_length=3),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> dict[str, Any]:
|
||||
json_schema = handler(schema)
|
||||
json_schema.update({'enum': cls.allowed_countries_list})
|
||||
return json_schema
|
||||
|
||||
|
||||
class Currency(str):
|
||||
"""Currency parses currency subset of the [ISO 4217](https://en.wikipedia.org/wiki/ISO_4217) format.
|
||||
It excludes bonds testing codes and precious metals.
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.currency_code import Currency
|
||||
|
||||
class currency(BaseModel):
|
||||
alpha_3: Currency
|
||||
|
||||
cur = currency(alpha_3='AED')
|
||||
print(cur)
|
||||
# > alpha_3='AED'
|
||||
```
|
||||
"""
|
||||
|
||||
allowed_countries_list = list(
|
||||
filter(lambda x: x not in _CODES_FOR_BONDS_METAL_TESTING, ISO4217.allowed_countries_list)
|
||||
)
|
||||
allowed_currencies = set(allowed_countries_list)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, currency_symbol: str, _: core_schema.ValidationInfo) -> str:
|
||||
"""
|
||||
Validate a subset of the [ISO4217](https://en.wikipedia.org/wiki/ISO_4217) format.
|
||||
It excludes bonds testing codes and precious metals.
|
||||
|
||||
Args:
|
||||
currency_symbol: The str value to be validated.
|
||||
_: The Pydantic ValidationInfo.
|
||||
|
||||
Returns:
|
||||
The validated ISO 4217 currency code.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the ISO 4217 currency code is not valid or is bond, precious metal or testing code.
|
||||
"""
|
||||
if currency_symbol not in cls.allowed_currencies:
|
||||
raise PydanticCustomError(
|
||||
'InvalidCurrency',
|
||||
'Invalid currency code.'
|
||||
' See https://en.wikipedia.org/wiki/ISO_4217. '
|
||||
'Bonds, testing and precious metals codes are not allowed.',
|
||||
)
|
||||
return currency_symbol
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, _: type[Any], __: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
"""
|
||||
Return a Pydantic CoreSchema with the currency subset of the
|
||||
[ISO4217](https://en.wikipedia.org/wiki/ISO_4217) format.
|
||||
It excludes bonds testing codes and precious metals.
|
||||
|
||||
Args:
|
||||
_: The source type.
|
||||
__: The handler to get the CoreSchema.
|
||||
|
||||
Returns:
|
||||
A Pydantic CoreSchema with the subset of the currency subset of the
|
||||
[ISO4217](https://en.wikipedia.org/wiki/ISO_4217) format.
|
||||
It excludes bonds testing codes and precious metals.
|
||||
"""
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(min_length=3, max_length=3),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Return a Pydantic JSON Schema with subset of the [ISO4217](https://en.wikipedia.org/wiki/ISO_4217) format.
|
||||
Excluding bonds testing codes and precious metals.
|
||||
|
||||
Args:
|
||||
schema: The Pydantic CoreSchema.
|
||||
handler: The handler to get the JSON Schema.
|
||||
|
||||
Returns:
|
||||
A Pydantic JSON Schema with the subset of the ISO4217 currency code validation. without bonds testing codes
|
||||
and precious metals.
|
||||
|
||||
"""
|
||||
json_schema = handler(schema)
|
||||
json_schema.update({'enum': cls.allowed_countries_list})
|
||||
return json_schema
|
152
pydantic_extra_types/isbn.py
Normal file
152
pydantic_extra_types/isbn.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
"""
|
||||
The `pydantic_extra_types.isbn` module provides functionality to recieve and validate ISBN.
|
||||
|
||||
ISBN (International Standard Book Number) is a numeric commercial book identifier which is intended to be unique. This module provides a ISBN type for Pydantic models.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import GetCoreSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
|
||||
def isbn10_digit_calc(isbn: str) -> str:
|
||||
"""Calc a ISBN-10 last digit from the provided str value. More information of validation algorithm on [Wikipedia](https://en.wikipedia.org/wiki/ISBN#Check_digits)
|
||||
|
||||
Args:
|
||||
isbn: The str value representing the ISBN in 10 digits.
|
||||
|
||||
Returns:
|
||||
The calculated last digit of the ISBN-10 value.
|
||||
"""
|
||||
total = sum(int(digit) * (10 - idx) for idx, digit in enumerate(isbn[:9]))
|
||||
|
||||
for check_digit in range(1, 11):
|
||||
if (total + check_digit) % 11 == 0:
|
||||
valid_check_digit = 'X' if check_digit == 10 else str(check_digit)
|
||||
|
||||
return valid_check_digit
|
||||
|
||||
|
||||
def isbn13_digit_calc(isbn: str) -> str:
|
||||
"""Calc a ISBN-13 last digit from the provided str value. More information of validation algorithm on [Wikipedia](https://en.wikipedia.org/wiki/ISBN#Check_digits)
|
||||
|
||||
Args:
|
||||
isbn: The str value representing the ISBN in 13 digits.
|
||||
|
||||
Returns:
|
||||
The calculated last digit of the ISBN-13 value.
|
||||
"""
|
||||
total = sum(int(digit) * (1 if idx % 2 == 0 else 3) for idx, digit in enumerate(isbn[:12]))
|
||||
|
||||
check_digit = (10 - (total % 10)) % 10
|
||||
|
||||
return str(check_digit)
|
||||
|
||||
|
||||
class ISBN(str):
|
||||
"""Represents a ISBN and provides methods for conversion, validation, and serialization.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.isbn import ISBN
|
||||
|
||||
|
||||
class Book(BaseModel):
|
||||
isbn: ISBN
|
||||
|
||||
book = Book(isbn="8537809667")
|
||||
print(book)
|
||||
#> isbn='9788537809662'
|
||||
```
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
"""
|
||||
Return a Pydantic CoreSchema with the ISBN validation.
|
||||
|
||||
Args:
|
||||
source: The source type to be converted.
|
||||
handler: The handler to get the CoreSchema.
|
||||
|
||||
Returns:
|
||||
A Pydantic CoreSchema with the ISBN validation.
|
||||
|
||||
"""
|
||||
return core_schema.with_info_before_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: Any) -> str:
|
||||
"""
|
||||
Validate a ISBN from the provided str value.
|
||||
|
||||
Args:
|
||||
__input_value: The str value to be validated.
|
||||
_: The source type to be converted.
|
||||
|
||||
Returns:
|
||||
The validated ISBN.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the ISBN is not valid.
|
||||
"""
|
||||
cls.validate_isbn_format(__input_value)
|
||||
|
||||
return cls.convert_isbn10_to_isbn13(__input_value)
|
||||
|
||||
@staticmethod
|
||||
def validate_isbn_format(value: str) -> None:
|
||||
"""Validate a ISBN format from the provided str value.
|
||||
|
||||
Args:
|
||||
value: The str value representing the ISBN in 10 or 13 digits.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the ISBN is not valid.
|
||||
"""
|
||||
|
||||
isbn_length = len(value)
|
||||
|
||||
if isbn_length not in (10, 13):
|
||||
raise PydanticCustomError('isbn_length', f'Length for ISBN must be 10 or 13 digits, not {isbn_length}')
|
||||
|
||||
if isbn_length == 10:
|
||||
if not value[:-1].isdigit() or ((value[-1] != 'X') and (not value[-1].isdigit())):
|
||||
raise PydanticCustomError('isbn10_invalid_characters', 'First 9 digits of ISBN-10 must be integers')
|
||||
if isbn10_digit_calc(value) != value[-1]:
|
||||
raise PydanticCustomError('isbn_invalid_digit_check_isbn10', 'Provided digit is invalid for given ISBN')
|
||||
|
||||
if isbn_length == 13:
|
||||
if not value.isdigit():
|
||||
raise PydanticCustomError('isbn13_invalid_characters', 'All digits of ISBN-13 must be integers')
|
||||
if value[:3] not in ('978', '979'):
|
||||
raise PydanticCustomError(
|
||||
'isbn_invalid_early_characters', 'The first 3 digits of ISBN-13 must be 978 or 979'
|
||||
)
|
||||
if isbn13_digit_calc(value) != value[-1]:
|
||||
raise PydanticCustomError('isbn_invalid_digit_check_isbn13', 'Provided digit is invalid for given ISBN')
|
||||
|
||||
@staticmethod
|
||||
def convert_isbn10_to_isbn13(value: str) -> str:
|
||||
"""Convert an ISBN-10 to ISBN-13.
|
||||
|
||||
Args:
|
||||
value: The ISBN-10 value to be converted.
|
||||
|
||||
Returns:
|
||||
The converted ISBN or the original value if no conversion is necessary.
|
||||
"""
|
||||
|
||||
if len(value) == 10:
|
||||
base_isbn = f'978{value[:-1]}'
|
||||
isbn13_digit = isbn13_digit_calc(base_isbn)
|
||||
return ISBN(f'{base_isbn}{isbn13_digit}')
|
||||
|
||||
return ISBN(value)
|
182
pydantic_extra_types/language_code.py
Normal file
182
pydantic_extra_types/language_code.py
Normal file
|
@ -0,0 +1,182 @@
|
|||
"""
|
||||
Language definitions that are based on the [ISO 639-3](https://en.wikipedia.org/wiki/ISO_639-3) & [ISO 639-5](https://en.wikipedia.org/wiki/ISO_639-5).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
try:
|
||||
import pycountry
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
raise RuntimeError(
|
||||
'The `language_code` module requires "pycountry" to be installed.'
|
||||
' You can install it with "pip install pycountry".'
|
||||
)
|
||||
|
||||
|
||||
class ISO639_3(str):
|
||||
"""ISO639_3 parses Language in the [ISO 639-3 alpha-3](https://en.wikipedia.org/wiki/ISO_639-3_alpha-3)
|
||||
format.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.language_code import ISO639_3
|
||||
|
||||
class Language(BaseModel):
|
||||
alpha_3: ISO639_3
|
||||
|
||||
lang = Language(alpha_3='ssr')
|
||||
print(lang)
|
||||
# > alpha_3='ssr'
|
||||
```
|
||||
"""
|
||||
|
||||
allowed_values_list = [lang.alpha_3 for lang in pycountry.languages]
|
||||
allowed_values = set(allowed_values_list)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> ISO639_3:
|
||||
"""
|
||||
Validate a ISO 639-3 language code from the provided str value.
|
||||
|
||||
Args:
|
||||
__input_value: The str value to be validated.
|
||||
_: The Pydantic ValidationInfo.
|
||||
|
||||
Returns:
|
||||
The validated ISO 639-3 language code.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the ISO 639-3 language code is not valid.
|
||||
"""
|
||||
if __input_value not in cls.allowed_values:
|
||||
raise PydanticCustomError(
|
||||
'ISO649_3', 'Invalid ISO 639-3 language code. See https://en.wikipedia.org/wiki/ISO_639-3'
|
||||
)
|
||||
return cls(__input_value)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, _: type[Any], __: GetCoreSchemaHandler
|
||||
) -> core_schema.AfterValidatorFunctionSchema:
|
||||
"""
|
||||
Return a Pydantic CoreSchema with the ISO 639-3 language code validation.
|
||||
|
||||
Args:
|
||||
_: The source type.
|
||||
__: The handler to get the CoreSchema.
|
||||
|
||||
Returns:
|
||||
A Pydantic CoreSchema with the ISO 639-3 language code validation.
|
||||
|
||||
"""
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(min_length=3, max_length=3),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Return a Pydantic JSON Schema with the ISO 639-3 language code validation.
|
||||
|
||||
Args:
|
||||
schema: The Pydantic CoreSchema.
|
||||
handler: The handler to get the JSON Schema.
|
||||
|
||||
Returns:
|
||||
A Pydantic JSON Schema with the ISO 639-3 language code validation.
|
||||
|
||||
"""
|
||||
json_schema = handler(schema)
|
||||
json_schema.update({'enum': cls.allowed_values_list})
|
||||
return json_schema
|
||||
|
||||
|
||||
class ISO639_5(str):
|
||||
"""ISO639_5 parses Language in the [ISO 639-5 alpha-3](https://en.wikipedia.org/wiki/ISO_639-5_alpha-3)
|
||||
format.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.language_code import ISO639_5
|
||||
|
||||
class Language(BaseModel):
|
||||
alpha_3: ISO639_5
|
||||
|
||||
lang = Language(alpha_3='gem')
|
||||
print(lang)
|
||||
# > alpha_3='gem'
|
||||
```
|
||||
"""
|
||||
|
||||
allowed_values_list = [lang.alpha_3 for lang in pycountry.language_families]
|
||||
allowed_values_list.sort()
|
||||
allowed_values = set(allowed_values_list)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> ISO639_5:
|
||||
"""
|
||||
Validate a ISO 639-5 language code from the provided str value.
|
||||
|
||||
Args:
|
||||
__input_value: The str value to be validated.
|
||||
_: The Pydantic ValidationInfo.
|
||||
|
||||
Returns:
|
||||
The validated ISO 639-3 language code.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the ISO 639-5 language code is not valid.
|
||||
"""
|
||||
if __input_value not in cls.allowed_values:
|
||||
raise PydanticCustomError(
|
||||
'ISO649_5', 'Invalid ISO 639-5 language code. See https://en.wikipedia.org/wiki/ISO_639-5'
|
||||
)
|
||||
return cls(__input_value)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, _: type[Any], __: GetCoreSchemaHandler
|
||||
) -> core_schema.AfterValidatorFunctionSchema:
|
||||
"""
|
||||
Return a Pydantic CoreSchema with the ISO 639-5 language code validation.
|
||||
|
||||
Args:
|
||||
_: The source type.
|
||||
__: The handler to get the CoreSchema.
|
||||
|
||||
Returns:
|
||||
A Pydantic CoreSchema with the ISO 639-5 language code validation.
|
||||
|
||||
"""
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(min_length=3, max_length=3),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Return a Pydantic JSON Schema with the ISO 639-5 language code validation.
|
||||
|
||||
Args:
|
||||
schema: The Pydantic CoreSchema.
|
||||
handler: The handler to get the JSON Schema.
|
||||
|
||||
Returns:
|
||||
A Pydantic JSON Schema with the ISO 639-5 language code validation.
|
||||
|
||||
"""
|
||||
json_schema = handler(schema)
|
||||
json_schema.update({'enum': cls.allowed_values_list})
|
||||
return json_schema
|
125
pydantic_extra_types/mac_address.py
Normal file
125
pydantic_extra_types/mac_address.py
Normal file
|
@ -0,0 +1,125 @@
|
|||
"""
|
||||
The MAC address module provides functionality to parse and validate MAC addresses in different
|
||||
formats, such as IEEE 802 MAC-48, EUI-48, EUI-64, or a 20-octet format.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import GetCoreSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
|
||||
class MacAddress(str):
|
||||
"""Represents a MAC address and provides methods for conversion, validation, and serialization.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.mac_address import MacAddress
|
||||
|
||||
|
||||
class Network(BaseModel):
|
||||
mac_address: MacAddress
|
||||
|
||||
|
||||
network = Network(mac_address="00:00:5e:00:53:01")
|
||||
print(network)
|
||||
#> mac_address='00:00:5e:00:53:01'
|
||||
```
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
"""
|
||||
Return a Pydantic CoreSchema with the MAC address validation.
|
||||
|
||||
Args:
|
||||
source: The source type to be converted.
|
||||
handler: The handler to get the CoreSchema.
|
||||
|
||||
Returns:
|
||||
A Pydantic CoreSchema with the MAC address validation.
|
||||
|
||||
"""
|
||||
return core_schema.with_info_before_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: Any) -> str:
|
||||
"""
|
||||
Validate a MAC Address from the provided str value.
|
||||
|
||||
Args:
|
||||
__input_value: The str value to be validated.
|
||||
_: The source type to be converted.
|
||||
|
||||
Returns:
|
||||
str: The parsed MAC address.
|
||||
|
||||
"""
|
||||
return cls.validate_mac_address(__input_value.encode())
|
||||
|
||||
@staticmethod
|
||||
def validate_mac_address(value: bytes) -> str:
|
||||
"""
|
||||
Validate a MAC Address from the provided byte value.
|
||||
"""
|
||||
if len(value) < 14:
|
||||
raise PydanticCustomError(
|
||||
'mac_address_len',
|
||||
'Length for a {mac_address} MAC address must be {required_length}',
|
||||
{'mac_address': value.decode(), 'required_length': 14},
|
||||
)
|
||||
|
||||
if value[2] in [ord(':'), ord('-')]:
|
||||
if (len(value) + 1) % 3 != 0:
|
||||
raise PydanticCustomError(
|
||||
'mac_address_format', 'Must have the format xx:xx:xx:xx:xx:xx or xx-xx-xx-xx-xx-xx'
|
||||
)
|
||||
n = (len(value) + 1) // 3
|
||||
if n not in (6, 8, 20):
|
||||
raise PydanticCustomError(
|
||||
'mac_address_format',
|
||||
'Length for a {mac_address} MAC address must be {required_length}',
|
||||
{'mac_address': value.decode(), 'required_length': (6, 8, 20)},
|
||||
)
|
||||
mac_address = bytearray(n)
|
||||
x = 0
|
||||
for i in range(n):
|
||||
try:
|
||||
byte_value = int(value[x : x + 2], 16)
|
||||
mac_address[i] = byte_value
|
||||
x += 3
|
||||
except ValueError as e:
|
||||
raise PydanticCustomError('mac_address_format', 'Unrecognized format') from e
|
||||
|
||||
elif value[4] == ord('.'):
|
||||
if (len(value) + 1) % 5 != 0:
|
||||
raise PydanticCustomError('mac_address_format', 'Must have the format xx.xx.xx.xx.xx.xx')
|
||||
n = 2 * (len(value) + 1) // 5
|
||||
if n not in (6, 8, 20):
|
||||
raise PydanticCustomError(
|
||||
'mac_address_format',
|
||||
'Length for a {mac_address} MAC address must be {required_length}',
|
||||
{'mac_address': value.decode(), 'required_length': (6, 8, 20)},
|
||||
)
|
||||
mac_address = bytearray(n)
|
||||
x = 0
|
||||
for i in range(0, n, 2):
|
||||
try:
|
||||
byte_value = int(value[x : x + 2], 16)
|
||||
mac_address[i] = byte_value
|
||||
byte_value = int(value[x + 2 : x + 4], 16)
|
||||
mac_address[i + 1] = byte_value
|
||||
x += 5
|
||||
except ValueError as e:
|
||||
raise PydanticCustomError('mac_address_format', 'Unrecognized format') from e
|
||||
|
||||
else:
|
||||
raise PydanticCustomError('mac_address_format', 'Unrecognized format')
|
||||
|
||||
return ':'.join(f'{b:02x}' for b in mac_address)
|
199
pydantic_extra_types/payment.py
Normal file
199
pydantic_extra_types/payment.py
Normal file
|
@ -0,0 +1,199 @@
|
|||
"""
|
||||
The `pydantic_extra_types.payment` module provides the
|
||||
[`PaymentCardNumber`][pydantic_extra_types.payment.PaymentCardNumber] data type.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from pydantic import GetCoreSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
|
||||
class PaymentCardBrand(str, Enum):
|
||||
"""Payment card brands supported by the [`PaymentCardNumber`][pydantic_extra_types.payment.PaymentCardNumber]."""
|
||||
|
||||
amex = 'American Express'
|
||||
mastercard = 'Mastercard'
|
||||
visa = 'Visa'
|
||||
mir = 'Mir'
|
||||
maestro = 'Maestro'
|
||||
discover = 'Discover'
|
||||
verve = 'Verve'
|
||||
dankort = 'Dankort'
|
||||
troy = 'Troy'
|
||||
unionpay = 'UnionPay'
|
||||
jcb = 'JCB'
|
||||
other = 'other'
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.value
|
||||
|
||||
|
||||
class PaymentCardNumber(str):
|
||||
"""A [payment card number](https://en.wikipedia.org/wiki/Payment_card_number)."""
|
||||
|
||||
strip_whitespace: ClassVar[bool] = True
|
||||
"""Whether to strip whitespace from the input value."""
|
||||
min_length: ClassVar[int] = 12
|
||||
"""The minimum length of the card number."""
|
||||
max_length: ClassVar[int] = 19
|
||||
"""The maximum length of the card number."""
|
||||
bin: str
|
||||
"""The first 6 digits of the card number."""
|
||||
last4: str
|
||||
"""The last 4 digits of the card number."""
|
||||
brand: PaymentCardBrand
|
||||
"""The brand of the card."""
|
||||
|
||||
def __init__(self, card_number: str):
|
||||
self.validate_digits(card_number)
|
||||
|
||||
card_number = self.validate_luhn_check_digit(card_number)
|
||||
|
||||
self.bin = card_number[:6]
|
||||
self.last4 = card_number[-4:]
|
||||
self.brand = self.validate_brand(card_number)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls.validate,
|
||||
core_schema.str_schema(
|
||||
min_length=cls.min_length, max_length=cls.max_length, strip_whitespace=cls.strip_whitespace
|
||||
),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> PaymentCardNumber:
|
||||
"""Validate the `PaymentCardNumber` instance.
|
||||
|
||||
Args:
|
||||
__input_value: The input value to validate.
|
||||
_: The validation info.
|
||||
|
||||
Returns:
|
||||
The validated `PaymentCardNumber` instance.
|
||||
"""
|
||||
return cls(__input_value)
|
||||
|
||||
@property
|
||||
def masked(self) -> str:
|
||||
"""The masked card number."""
|
||||
num_masked = len(self) - 10 # len(bin) + len(last4) == 10
|
||||
return f'{self.bin}{"*" * num_masked}{self.last4}'
|
||||
|
||||
@classmethod
|
||||
def validate_digits(cls, card_number: str) -> None:
|
||||
"""Validate that the card number is all digits.
|
||||
|
||||
Args:
|
||||
card_number: The card number to validate.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the card number is not all digits.
|
||||
"""
|
||||
if not card_number or not all('0' <= c <= '9' for c in card_number):
|
||||
raise PydanticCustomError('payment_card_number_digits', 'Card number is not all digits')
|
||||
|
||||
@classmethod
|
||||
def validate_luhn_check_digit(cls, card_number: str) -> str:
|
||||
"""Validate the payment card number.
|
||||
Based on the [Luhn algorithm](https://en.wikipedia.org/wiki/Luhn_algorithm).
|
||||
|
||||
Args:
|
||||
card_number: The card number to validate.
|
||||
|
||||
Returns:
|
||||
The validated card number.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the card number is not valid.
|
||||
"""
|
||||
sum_ = int(card_number[-1])
|
||||
length = len(card_number)
|
||||
parity = length % 2
|
||||
for i in range(length - 1):
|
||||
digit = int(card_number[i])
|
||||
if i % 2 == parity:
|
||||
digit *= 2
|
||||
if digit > 9:
|
||||
digit -= 9
|
||||
sum_ += digit
|
||||
valid = sum_ % 10 == 0
|
||||
if not valid:
|
||||
raise PydanticCustomError('payment_card_number_luhn', 'Card number is not luhn valid')
|
||||
return card_number
|
||||
|
||||
@staticmethod
|
||||
def validate_brand(card_number: str) -> PaymentCardBrand:
|
||||
"""Validate length based on
|
||||
[BIN](https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN))
|
||||
for major brands.
|
||||
|
||||
Args:
|
||||
card_number: The card number to validate.
|
||||
|
||||
Returns:
|
||||
The validated card brand.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the card number is not valid.
|
||||
"""
|
||||
brand = PaymentCardBrand.other
|
||||
|
||||
if card_number[0] == '4':
|
||||
brand = PaymentCardBrand.visa
|
||||
required_length = [13, 16, 19]
|
||||
elif 51 <= int(card_number[:2]) <= 55:
|
||||
brand = PaymentCardBrand.mastercard
|
||||
required_length = [16]
|
||||
elif card_number[:2] in {'34', '37'}:
|
||||
brand = PaymentCardBrand.amex
|
||||
required_length = [15]
|
||||
elif 2200 <= int(card_number[:4]) <= 2204:
|
||||
brand = PaymentCardBrand.mir
|
||||
required_length = list(range(16, 20))
|
||||
elif card_number[:4] in {'5018', '5020', '5038', '5893', '6304', '6759', '6761', '6762', '6763'} or card_number[
|
||||
:6
|
||||
] in (
|
||||
'676770',
|
||||
'676774',
|
||||
):
|
||||
brand = PaymentCardBrand.maestro
|
||||
required_length = list(range(12, 20))
|
||||
elif card_number.startswith('65') or 644 <= int(card_number[:3]) <= 649 or card_number.startswith('6011'):
|
||||
brand = PaymentCardBrand.discover
|
||||
required_length = list(range(16, 20))
|
||||
elif (
|
||||
506099 <= int(card_number[:6]) <= 506198
|
||||
or 650002 <= int(card_number[:6]) <= 650027
|
||||
or 507865 <= int(card_number[:6]) <= 507964
|
||||
):
|
||||
brand = PaymentCardBrand.verve
|
||||
required_length = [16, 18, 19]
|
||||
elif card_number[:4] in {'5019', '4571'}:
|
||||
brand = PaymentCardBrand.dankort
|
||||
required_length = [16]
|
||||
elif card_number.startswith('9792'):
|
||||
brand = PaymentCardBrand.troy
|
||||
required_length = [16]
|
||||
elif card_number[:2] in {'62', '81'}:
|
||||
brand = PaymentCardBrand.unionpay
|
||||
required_length = [16, 19]
|
||||
elif 3528 <= int(card_number[:4]) <= 3589:
|
||||
brand = PaymentCardBrand.jcb
|
||||
required_length = [16, 19]
|
||||
|
||||
valid = len(card_number) in required_length if brand != PaymentCardBrand.other else True
|
||||
|
||||
if not valid:
|
||||
raise PydanticCustomError(
|
||||
'payment_card_number_brand',
|
||||
f'Length for a {brand} card must be {" or ".join(map(str, required_length))}',
|
||||
{'brand': brand, 'required_length': required_length},
|
||||
)
|
||||
|
||||
return brand
|
74
pydantic_extra_types/pendulum_dt.py
Normal file
74
pydantic_extra_types/pendulum_dt.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
"""
|
||||
Native Pendulum DateTime object implementation. This is a copy of the Pendulum DateTime object, but with a Pydantic
|
||||
CoreSchema implementation. This allows Pydantic to validate the DateTime object.
|
||||
"""
|
||||
|
||||
try:
|
||||
from pendulum import DateTime as _DateTime
|
||||
from pendulum import parse
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
raise RuntimeError(
|
||||
'The `pendulum_dt` module requires "pendulum" to be installed. You can install it with "pip install pendulum".'
|
||||
)
|
||||
from typing import Any, List, Type
|
||||
|
||||
from pydantic import GetCoreSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
|
||||
class DateTime(_DateTime):
|
||||
"""
|
||||
A `pendulum.DateTime` object. At runtime, this type decomposes into pendulum.DateTime automatically.
|
||||
This type exists because Pydantic throws a fit on unknown types.
|
||||
|
||||
```python
|
||||
from pydantic import BaseModel
|
||||
from pydantic_extra_types.pendulum_dt import DateTime
|
||||
|
||||
class test_model(BaseModel):
|
||||
dt: DateTime
|
||||
|
||||
print(test_model(dt='2021-01-01T00:00:00+00:00'))
|
||||
|
||||
#> test_model(dt=DateTime(2021, 1, 1, 0, 0, 0, tzinfo=FixedTimezone(0, name="+00:00")))
|
||||
```
|
||||
"""
|
||||
|
||||
__slots__: List[str] = []
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: Type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
"""
|
||||
Return a Pydantic CoreSchema with the Datetime validation
|
||||
|
||||
Args:
|
||||
source: The source type to be converted.
|
||||
handler: The handler to get the CoreSchema.
|
||||
|
||||
Returns:
|
||||
A Pydantic CoreSchema with the Datetime validation.
|
||||
"""
|
||||
return core_schema.no_info_wrap_validator_function(cls._validate, core_schema.datetime_schema())
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, value: Any, handler: core_schema.ValidatorFunctionWrapHandler) -> Any:
|
||||
"""
|
||||
Validate the datetime object and return it.
|
||||
|
||||
Args:
|
||||
value: The value to validate.
|
||||
handler: The handler to get the CoreSchema.
|
||||
|
||||
Returns:
|
||||
The validated value or raises a PydanticCustomError.
|
||||
"""
|
||||
# if we are passed an existing instance, pass it straight through.
|
||||
if isinstance(value, _DateTime):
|
||||
return handler(value)
|
||||
|
||||
# otherwise, parse it.
|
||||
try:
|
||||
data = parse(value)
|
||||
except Exception as exc:
|
||||
raise PydanticCustomError('value_error', 'value is not a valid timestamp') from exc
|
||||
return handler(data)
|
68
pydantic_extra_types/phone_numbers.py
Normal file
68
pydantic_extra_types/phone_numbers.py
Normal file
|
@ -0,0 +1,68 @@
|
|||
"""
|
||||
The `pydantic_extra_types.phone_numbers` module provides the
|
||||
[`PhoneNumber`][pydantic_extra_types.phone_numbers.PhoneNumber] data type.
|
||||
|
||||
This class depends on the [phonenumbers] package, which is a Python port of Google's [libphonenumber].
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Callable, ClassVar, Generator
|
||||
|
||||
from pydantic import GetCoreSchemaHandler, GetJsonSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
try:
|
||||
import phonenumbers
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
raise RuntimeError(
|
||||
'`PhoneNumber` requires "phonenumbers" to be installed. You can install it with "pip install phonenumbers"'
|
||||
)
|
||||
|
||||
GeneratorCallableStr = Generator[Callable[..., str], None, None]
|
||||
|
||||
|
||||
class PhoneNumber(str):
|
||||
"""
|
||||
A wrapper around [phonenumbers](https://pypi.org/project/phonenumbers/) package, which
|
||||
is a Python port of Google's [libphonenumber](https://github.com/google/libphonenumber/).
|
||||
"""
|
||||
|
||||
supported_regions: list[str] = sorted(phonenumbers.SUPPORTED_REGIONS)
|
||||
"""The supported regions."""
|
||||
supported_formats: list[str] = sorted([f for f in phonenumbers.PhoneNumberFormat.__dict__.keys() if f.isupper()])
|
||||
"""The supported phone number formats."""
|
||||
|
||||
default_region_code: ClassVar[str | None] = None
|
||||
"""The default region code to use when parsing phone numbers without an international prefix."""
|
||||
phone_format: str = 'RFC3966'
|
||||
"""The format of the phone number."""
|
||||
min_length: int = 7
|
||||
"""The minimum length of the phone number."""
|
||||
max_length: int = 64
|
||||
"""The maximum length of the phone number."""
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_json_schema__(
|
||||
cls, schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
||||
) -> dict[str, Any]:
|
||||
json_schema = handler(schema)
|
||||
json_schema.update({'format': 'phone'})
|
||||
return json_schema
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(min_length=cls.min_length, max_length=cls.max_length),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, phone_number: str, _: core_schema.ValidationInfo) -> str:
|
||||
try:
|
||||
parsed_number = phonenumbers.parse(phone_number, cls.default_region_code)
|
||||
except phonenumbers.phonenumberutil.NumberParseException as exc:
|
||||
raise PydanticCustomError('value_error', 'value is not a valid phone number') from exc
|
||||
if not phonenumbers.is_valid_number(parsed_number):
|
||||
raise PydanticCustomError('value_error', 'value is not a valid phone number')
|
||||
|
||||
return phonenumbers.format_number(parsed_number, getattr(phonenumbers.PhoneNumberFormat, cls.phone_format))
|
0
pydantic_extra_types/py.typed
Normal file
0
pydantic_extra_types/py.typed
Normal file
89
pydantic_extra_types/routing_number.py
Normal file
89
pydantic_extra_types/routing_number.py
Normal file
|
@ -0,0 +1,89 @@
|
|||
"""
|
||||
The `pydantic_extra_types.routing_number` module provides the
|
||||
[`ABARoutingNumber`][pydantic_extra_types.routing_number.ABARoutingNumber] data type.
|
||||
"""
|
||||
from typing import Any, ClassVar, Type
|
||||
|
||||
from pydantic import GetCoreSchemaHandler
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
|
||||
class ABARoutingNumber(str):
|
||||
"""The `ABARoutingNumber` data type is a string of 9 digits representing an ABA routing transit number.
|
||||
|
||||
The algorithm used to validate the routing number is described in the
|
||||
[ABA routing transit number](https://en.wikipedia.org/wiki/ABA_routing_transit_number#Check_digit)
|
||||
Wikipedia article.
|
||||
|
||||
```py
|
||||
from pydantic import BaseModel
|
||||
|
||||
from pydantic_extra_types.routing_number import ABARoutingNumber
|
||||
|
||||
class BankAccount(BaseModel):
|
||||
routing_number: ABARoutingNumber
|
||||
|
||||
account = BankAccount(routing_number='122105155')
|
||||
print(account)
|
||||
#> routing_number='122105155'
|
||||
```
|
||||
"""
|
||||
|
||||
strip_whitespace: ClassVar[bool] = True
|
||||
min_length: ClassVar[int] = 9
|
||||
max_length: ClassVar[int] = 9
|
||||
|
||||
def __init__(self, routing_number: str):
|
||||
self._validate_digits(routing_number)
|
||||
self._routing_number = self._validate_routing_number(routing_number)
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(
|
||||
cls, source: Type[Any], handler: GetCoreSchemaHandler
|
||||
) -> core_schema.AfterValidatorFunctionSchema:
|
||||
return core_schema.with_info_after_validator_function(
|
||||
cls._validate,
|
||||
core_schema.str_schema(
|
||||
min_length=cls.min_length,
|
||||
max_length=cls.max_length,
|
||||
strip_whitespace=cls.strip_whitespace,
|
||||
strict=False,
|
||||
),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _validate(cls, __input_value: str, _: core_schema.ValidationInfo) -> 'ABARoutingNumber':
|
||||
return cls(__input_value)
|
||||
|
||||
@classmethod
|
||||
def _validate_digits(cls, routing_number: str) -> None:
|
||||
"""Check that the routing number is all digits.
|
||||
|
||||
Args:
|
||||
routing_number: The routing number to validate.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the routing number is not all digits.
|
||||
"""
|
||||
if not routing_number.isdigit():
|
||||
raise PydanticCustomError('aba_routing_number', 'routing number is not all digits')
|
||||
|
||||
@classmethod
|
||||
def _validate_routing_number(cls, routing_number: str) -> str:
|
||||
"""Check [digit algorithm](https://en.wikipedia.org/wiki/ABA_routing_transit_number#Check_digit) for
|
||||
[ABA routing transit number](https://www.routingnumber.com/).
|
||||
|
||||
Args:
|
||||
routing_number: The routing number to validate.
|
||||
|
||||
Raises:
|
||||
PydanticCustomError: If the routing number is incorrect.
|
||||
"""
|
||||
checksum = (
|
||||
3 * (sum(map(int, [routing_number[0], routing_number[3], routing_number[6]])))
|
||||
+ 7 * (sum(map(int, [routing_number[1], routing_number[4], routing_number[7]])))
|
||||
+ sum(map(int, [routing_number[2], routing_number[5], routing_number[8]]))
|
||||
)
|
||||
if checksum % 10 != 0:
|
||||
raise PydanticCustomError('aba_routing_number', 'Incorrect ABA routing transit number')
|
||||
return routing_number
|
62
pydantic_extra_types/ulid.py
Normal file
62
pydantic_extra_types/ulid.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
"""
|
||||
The `pydantic_extra_types.ULID` module provides the [`ULID`] data type.
|
||||
|
||||
This class depends on the [python-ulid] package, which is a validate by the [ULID-spec](https://github.com/ulid/spec#implementations-in-other-languages).
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Union
|
||||
|
||||
from pydantic import GetCoreSchemaHandler
|
||||
from pydantic._internal import _repr
|
||||
from pydantic_core import PydanticCustomError, core_schema
|
||||
|
||||
try:
|
||||
from ulid import ULID as _ULID
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
raise RuntimeError(
|
||||
'The `ulid` module requires "python-ulid" to be installed. You can install it with "pip install python-ulid".'
|
||||
)
|
||||
|
||||
UlidType = Union[str, bytes, int]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ULID(_repr.Representation):
|
||||
"""
|
||||
A wrapper around [python-ulid](https://pypi.org/project/python-ulid/) package, which
|
||||
is a validate by the [ULID-spec](https://github.com/ulid/spec#implementations-in-other-languages).
|
||||
"""
|
||||
|
||||
ulid: _ULID
|
||||
|
||||
@classmethod
|
||||
def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
||||
return core_schema.no_info_wrap_validator_function(
|
||||
cls._validate_ulid,
|
||||
core_schema.union_schema(
|
||||
[
|
||||
core_schema.is_instance_schema(_ULID),
|
||||
core_schema.int_schema(),
|
||||
core_schema.bytes_schema(),
|
||||
core_schema.str_schema(),
|
||||
]
|
||||
),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _validate_ulid(cls, value: Any, handler: core_schema.ValidatorFunctionWrapHandler) -> Any:
|
||||
ulid: _ULID
|
||||
try:
|
||||
if isinstance(value, int):
|
||||
ulid = _ULID.from_int(value)
|
||||
elif isinstance(value, str):
|
||||
ulid = _ULID.from_str(value)
|
||||
elif isinstance(value, _ULID):
|
||||
ulid = value
|
||||
else:
|
||||
ulid = _ULID.from_bytes(value)
|
||||
except ValueError:
|
||||
raise PydanticCustomError('ulid_format', 'Unrecognized format')
|
||||
return handler(ulid)
|
120
pyproject.toml
Normal file
120
pyproject.toml
Normal file
|
@ -0,0 +1,120 @@
|
|||
[build-system]
|
||||
requires = ['hatchling']
|
||||
build-backend = 'hatchling.build'
|
||||
|
||||
[tool.hatch.version]
|
||||
path = 'pydantic_extra_types/__init__.py'
|
||||
|
||||
[project]
|
||||
name = 'pydantic-extra-types'
|
||||
description = 'Extra Pydantic types.'
|
||||
authors = [
|
||||
{name = 'Samuel Colvin', email = 's@muelcolvin.com'},
|
||||
{name = 'Yasser Tahiri', email = 'hello@yezz.me'},
|
||||
]
|
||||
license = 'MIT'
|
||||
readme = 'README.md'
|
||||
classifiers = [
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3 :: Only',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: Information Technology',
|
||||
'Intended Audience :: System Administrators',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Operating System :: Unix',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Environment :: Console',
|
||||
'Environment :: MacOS X',
|
||||
'Framework :: Pydantic',
|
||||
'Framework :: Pydantic :: 2',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
'Topic :: Internet',
|
||||
]
|
||||
requires-python = '>=3.8'
|
||||
dependencies = [
|
||||
'pydantic>=2.5.2',
|
||||
]
|
||||
dynamic = ['version']
|
||||
|
||||
[project.optional-dependencies]
|
||||
all = [
|
||||
'phonenumbers>=8,<9',
|
||||
'pycountry>=23',
|
||||
'python-ulid>=1,<2; python_version<"3.9"',
|
||||
'python-ulid>=1,<3; python_version>="3.9"',
|
||||
'pendulum>=3.0.0,<4.0.0'
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = 'https://github.com/pydantic/pydantic-extra-types'
|
||||
Source = 'https://github.com/pydantic/pydantic-extra-types'
|
||||
Changelog = 'https://github.com/pydantic/pydantic-extra-types/releases'
|
||||
Documentation = 'https://docs.pydantic.dev/latest/'
|
||||
|
||||
[tool.ruff.lint.pyupgrade]
|
||||
keep-runtime-typing = true
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
target-version = 'py38'
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ['Q', 'RUF100', 'C90', 'UP', 'I']
|
||||
flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'}
|
||||
isort = { known-first-party = ['pydantic_extra_types', 'tests'] }
|
||||
mccabe = { max-complexity = 14 }
|
||||
pydocstyle = { convention = 'google' }
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = 'single'
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
'pydantic_extra_types/color.py' = ['E741']
|
||||
|
||||
[tool.coverage.run]
|
||||
source = ['pydantic_extra_types']
|
||||
branch = true
|
||||
context = '${CONTEXT}'
|
||||
|
||||
[tool.coverage.paths]
|
||||
source = [
|
||||
'pydantic_extra_types/',
|
||||
'/Users/runner/work/pydantic-extra-types/pydantic-extra-types/pydantic_extra_types/',
|
||||
'D:\a\pydantic-extra-types\pydantic-extra-types\pydantic_extra_types',
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
precision = 2
|
||||
fail_under = 100
|
||||
show_missing = true
|
||||
skip_covered = true
|
||||
exclude_lines = [
|
||||
'pragma: no cover',
|
||||
'raise NotImplementedError',
|
||||
'if TYPE_CHECKING:',
|
||||
'@overload',
|
||||
]
|
||||
|
||||
|
||||
[tool.mypy]
|
||||
strict = true
|
||||
plugins = 'pydantic.mypy'
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
filterwarnings = [
|
||||
'error',
|
||||
# This ignore will be removed when pycountry will drop py36 & support py311
|
||||
'ignore:::pkg_resources',
|
||||
]
|
||||
|
||||
# configuring https://github.com/pydantic/hooky
|
||||
[tool.hooky]
|
||||
reviewers = ['yezz123', 'Kludex']
|
||||
require_change_file = false
|
3
requirements/all.txt
Normal file
3
requirements/all.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
-r ./pyproject.txt
|
||||
-r ./linting.txt
|
||||
-r ./testing.txt
|
4
requirements/linting.in
Normal file
4
requirements/linting.in
Normal file
|
@ -0,0 +1,4 @@
|
|||
pre-commit
|
||||
mypy
|
||||
annotated-types
|
||||
ruff
|
37
requirements/linting.txt
Normal file
37
requirements/linting.txt
Normal file
|
@ -0,0 +1,37 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.11
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile --no-emit-index-url --output-file=requirements/linting.txt requirements/linting.in
|
||||
#
|
||||
annotated-types==0.6.0
|
||||
# via -r requirements/linting.in
|
||||
cfgv==3.4.0
|
||||
# via pre-commit
|
||||
distlib==0.3.8
|
||||
# via virtualenv
|
||||
filelock==3.13.1
|
||||
# via virtualenv
|
||||
identify==2.5.35
|
||||
# via pre-commit
|
||||
mypy==1.8.0
|
||||
# via -r requirements/linting.in
|
||||
mypy-extensions==1.0.0
|
||||
# via mypy
|
||||
nodeenv==1.8.0
|
||||
# via pre-commit
|
||||
platformdirs==4.2.0
|
||||
# via virtualenv
|
||||
pre-commit==3.6.2
|
||||
# via -r requirements/linting.in
|
||||
pyyaml==6.0.1
|
||||
# via pre-commit
|
||||
ruff==0.2.2
|
||||
# via -r requirements/linting.in
|
||||
typing-extensions==4.10.0
|
||||
# via mypy
|
||||
virtualenv==20.25.1
|
||||
# via pre-commit
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
34
requirements/pyproject.txt
Normal file
34
requirements/pyproject.txt
Normal file
|
@ -0,0 +1,34 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.11
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile --extra=all --no-emit-index-url --output-file=requirements/pyproject.txt pyproject.toml
|
||||
#
|
||||
annotated-types==0.6.0
|
||||
# via pydantic
|
||||
pendulum==3.0.0
|
||||
# via pydantic-extra-types (pyproject.toml)
|
||||
phonenumbers==8.13.31
|
||||
# via pydantic-extra-types (pyproject.toml)
|
||||
pycountry==23.12.11
|
||||
# via pydantic-extra-types (pyproject.toml)
|
||||
pydantic==2.6.3
|
||||
# via pydantic-extra-types (pyproject.toml)
|
||||
pydantic-core==2.16.3
|
||||
# via pydantic
|
||||
python-dateutil==2.8.2
|
||||
# via
|
||||
# pendulum
|
||||
# time-machine
|
||||
python-ulid==1.1.0
|
||||
# via pydantic-extra-types (pyproject.toml)
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
time-machine==2.13.0
|
||||
# via pendulum
|
||||
typing-extensions==4.10.0
|
||||
# via
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
tzdata==2024.1
|
||||
# via pendulum
|
6
requirements/testing.in
Normal file
6
requirements/testing.in
Normal file
|
@ -0,0 +1,6 @@
|
|||
dirty-equals
|
||||
coverage[toml]
|
||||
pytest
|
||||
codecov
|
||||
pytest-cov
|
||||
pytest-pretty
|
50
requirements/testing.txt
Normal file
50
requirements/testing.txt
Normal file
|
@ -0,0 +1,50 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.11
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile --no-emit-index-url --output-file=requirements/testing.txt requirements/testing.in
|
||||
#
|
||||
certifi==2024.2.2
|
||||
# via requests
|
||||
charset-normalizer==3.3.2
|
||||
# via requests
|
||||
codecov==2.1.13
|
||||
# via -r requirements/testing.in
|
||||
coverage[toml]==7.4.3
|
||||
# via
|
||||
# -r requirements/testing.in
|
||||
# codecov
|
||||
# pytest-cov
|
||||
dirty-equals==0.7.1.post0
|
||||
# via -r requirements/testing.in
|
||||
idna==3.6
|
||||
# via requests
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
mdurl==0.1.2
|
||||
# via markdown-it-py
|
||||
packaging==23.2
|
||||
# via pytest
|
||||
pluggy==1.4.0
|
||||
# via pytest
|
||||
pygments==2.17.2
|
||||
# via rich
|
||||
pytest==8.0.2
|
||||
# via
|
||||
# -r requirements/testing.in
|
||||
# pytest-cov
|
||||
# pytest-pretty
|
||||
pytest-cov==4.1.0
|
||||
# via -r requirements/testing.in
|
||||
pytest-pretty==1.2.0
|
||||
# via -r requirements/testing.in
|
||||
pytz==2024.1
|
||||
# via dirty-equals
|
||||
requests==2.31.0
|
||||
# via codecov
|
||||
rich==13.7.0
|
||||
# via pytest-pretty
|
||||
urllib3==2.2.1
|
||||
# via requests
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
196
tests/test_coordinate.py
Normal file
196
tests/test_coordinate.py
Normal file
|
@ -0,0 +1,196 @@
|
|||
from re import Pattern
|
||||
from typing import Any, Optional
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
from pydantic_core._pydantic_core import ArgsKwargs
|
||||
|
||||
from pydantic_extra_types.coordinate import Coordinate, Latitude, Longitude
|
||||
|
||||
|
||||
class Coord(BaseModel):
|
||||
coord: Coordinate
|
||||
|
||||
|
||||
class Lat(BaseModel):
|
||||
lat: Latitude
|
||||
|
||||
|
||||
class Lng(BaseModel):
|
||||
lng: Longitude
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'coord, result, error',
|
||||
[
|
||||
# Valid coordinates
|
||||
((20.0, 10.0), (20.0, 10.0), None),
|
||||
((-90.0, 0.0), (-90.0, 0.0), None),
|
||||
(('20.0', 10.0), (20.0, 10.0), None),
|
||||
((20.0, '10.0'), (20.0, 10.0), None),
|
||||
((45.678, -123.456), (45.678, -123.456), None),
|
||||
(('45.678, -123.456'), (45.678, -123.456), None),
|
||||
(Coordinate(20.0, 10.0), (20.0, 10.0), None),
|
||||
(Coordinate(latitude=0, longitude=0), (0, 0), None),
|
||||
(ArgsKwargs(args=()), (0, 0), None),
|
||||
(ArgsKwargs(args=(1, 0.0)), (1.0, 0), None),
|
||||
# # Invalid coordinates
|
||||
((), None, 'Field required'), # Empty tuple
|
||||
((10.0,), None, 'Field required'), # Tuple with only one value
|
||||
(('ten, '), None, 'string is not recognized as a valid coordinate'),
|
||||
((20.0, 10.0, 30.0), None, 'Tuple should have at most 2 items'), # Tuple with more than 2 values
|
||||
(ArgsKwargs(args=(1.0,)), None, 'Input should be a dictionary or an instance of Coordinate'),
|
||||
(
|
||||
'20.0, 10.0, 30.0',
|
||||
None,
|
||||
'Input should be a dictionary or an instance of Coordinate ',
|
||||
), # Str with more than 2 values
|
||||
('20.0, 10.0, 30.0', None, 'Unexpected positional argument'), # Str with more than 2 values
|
||||
(2, None, 'Input should be a dictionary or an instance of Coordinate'), # Wrong type
|
||||
],
|
||||
)
|
||||
def test_format_for_coordinate(coord: (Any, Any), result: (float, float), error: Optional[Pattern]):
|
||||
if error is None:
|
||||
_coord: Coordinate = Coord(coord=coord).coord
|
||||
print('vars(_coord)', vars(_coord))
|
||||
assert _coord.latitude == result[0]
|
||||
assert _coord.longitude == result[1]
|
||||
else:
|
||||
with pytest.raises(ValidationError, match=error):
|
||||
Coord(coord=coord).coord
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'coord, error',
|
||||
[
|
||||
# Valid coordinates
|
||||
((-90.0, 0.0), None),
|
||||
((50.0, 180.0), None),
|
||||
# Invalid coordinates
|
||||
((-91.0, 0.0), 'Input should be greater than or equal to -90'),
|
||||
((50.0, 181.0), 'Input should be less than or equal to 180'),
|
||||
],
|
||||
)
|
||||
def test_limit_for_coordinate(coord: (Any, Any), error: Optional[Pattern]):
|
||||
if error is None:
|
||||
_coord: Coordinate = Coord(coord=coord).coord
|
||||
assert _coord.latitude == coord[0]
|
||||
assert _coord.longitude == coord[1]
|
||||
else:
|
||||
with pytest.raises(ValidationError, match=error):
|
||||
Coord(coord=coord).coord
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'latitude, valid',
|
||||
[
|
||||
# Valid latitude
|
||||
(20.0, True),
|
||||
(3.0000000000000000000000, True),
|
||||
(90.0, True),
|
||||
('90.0', True),
|
||||
(-90.0, True),
|
||||
('-90.0', True),
|
||||
# Unvalid latitude
|
||||
(91.0, False),
|
||||
(-91.0, False),
|
||||
],
|
||||
)
|
||||
def test_format_latitude(latitude: float, valid: bool):
|
||||
if valid:
|
||||
_lat = Lat(lat=latitude).lat
|
||||
assert _lat == float(latitude)
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='1 validation error for Lat'):
|
||||
Lat(lat=latitude)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'longitude, valid',
|
||||
[
|
||||
# Valid latitude
|
||||
(20.0, True),
|
||||
(3.0000000000000000000000, True),
|
||||
(90.0, True),
|
||||
('90.0', True),
|
||||
(-90.0, True),
|
||||
('-90.0', True),
|
||||
(91.0, True),
|
||||
(-91.0, True),
|
||||
(180.0, True),
|
||||
(-180.0, True),
|
||||
# Unvalid latitude
|
||||
(181.0, False),
|
||||
(-181.0, False),
|
||||
],
|
||||
)
|
||||
def test_format_longitude(longitude: float, valid: bool):
|
||||
if valid:
|
||||
_lng = Lng(lng=longitude).lng
|
||||
assert _lng == float(longitude)
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='1 validation error for Lng'):
|
||||
Lng(lng=longitude)
|
||||
|
||||
|
||||
def test_str_repr():
|
||||
assert str(Coord(coord=(20.0, 10.0)).coord) == '20.0,10.0'
|
||||
assert str(Coord(coord=('20.0, 10.0')).coord) == '20.0,10.0'
|
||||
assert repr(Coord(coord=(20.0, 10.0)).coord) == 'Coordinate(latitude=20.0, longitude=10.0)'
|
||||
|
||||
|
||||
def test_eq():
|
||||
assert Coord(coord=(20.0, 10.0)).coord != Coord(coord='20.0,11.0').coord
|
||||
assert Coord(coord=('20.0, 10.0')).coord != Coord(coord='20.0,11.0').coord
|
||||
assert Coord(coord=('20.0, 10.0')).coord != Coord(coord='20.0,11.0').coord
|
||||
assert Coord(coord=(20.0, 10.0)).coord == Coord(coord='20.0,10.0').coord
|
||||
|
||||
|
||||
def test_hashable():
|
||||
assert hash(Coord(coord=(20.0, 10.0)).coord) == hash(Coord(coord=(20.0, 10.0)).coord)
|
||||
assert hash(Coord(coord=(20.0, 11.0)).coord) != hash(Coord(coord=(20.0, 10.0)).coord)
|
||||
|
||||
|
||||
def test_json_schema():
|
||||
class Model(BaseModel):
|
||||
value: Coordinate
|
||||
|
||||
assert Model.model_json_schema(mode='validation')['$defs']['Coordinate'] == {
|
||||
'properties': {
|
||||
'latitude': {'maximum': 90.0, 'minimum': -90.0, 'title': 'Latitude', 'type': 'number'},
|
||||
'longitude': {'maximum': 180.0, 'minimum': -180.0, 'title': 'Longitude', 'type': 'number'},
|
||||
},
|
||||
'required': ['latitude', 'longitude'],
|
||||
'title': 'Coordinate',
|
||||
'type': 'object',
|
||||
}
|
||||
assert Model.model_json_schema(mode='validation')['properties']['value'] == {
|
||||
'anyOf': [
|
||||
{'$ref': '#/$defs/Coordinate'},
|
||||
{
|
||||
'maxItems': 2,
|
||||
'minItems': 2,
|
||||
'prefixItems': [{'type': 'number'}, {'type': 'number'}],
|
||||
'type': 'array',
|
||||
},
|
||||
{'type': 'string'},
|
||||
],
|
||||
'title': 'Value',
|
||||
}
|
||||
assert Model.model_json_schema(mode='serialization') == {
|
||||
'$defs': {
|
||||
'Coordinate': {
|
||||
'properties': {
|
||||
'latitude': {'maximum': 90.0, 'minimum': -90.0, 'title': 'Latitude', 'type': 'number'},
|
||||
'longitude': {'maximum': 180.0, 'minimum': -180.0, 'title': 'Longitude', 'type': 'number'},
|
||||
},
|
||||
'required': ['latitude', 'longitude'],
|
||||
'title': 'Coordinate',
|
||||
'type': 'object',
|
||||
}
|
||||
},
|
||||
'properties': {'value': {'allOf': [{'$ref': '#/$defs/Coordinate'}], 'title': 'Value'}},
|
||||
'required': ['value'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
}
|
110
tests/test_country_code.py
Normal file
110
tests/test_country_code.py
Normal file
|
@ -0,0 +1,110 @@
|
|||
from string import printable
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types.country import (
|
||||
CountryAlpha2,
|
||||
CountryAlpha3,
|
||||
CountryInfo,
|
||||
CountryNumericCode,
|
||||
CountryShortName,
|
||||
_index_by_alpha2,
|
||||
_index_by_alpha3,
|
||||
_index_by_numeric_code,
|
||||
_index_by_short_name,
|
||||
)
|
||||
|
||||
PARAMS_AMOUNT = 20
|
||||
|
||||
|
||||
@pytest.fixture(scope='module', name='ProductAlpha2')
|
||||
def product_alpha2_fixture():
|
||||
class Product(BaseModel):
|
||||
made_in: CountryAlpha2
|
||||
|
||||
return Product
|
||||
|
||||
|
||||
@pytest.fixture(scope='module', name='ProductAlpha3')
|
||||
def product_alpha3_fixture():
|
||||
class Product(BaseModel):
|
||||
made_in: CountryAlpha3
|
||||
|
||||
return Product
|
||||
|
||||
|
||||
@pytest.fixture(scope='module', name='ProductShortName')
|
||||
def product_short_name_fixture():
|
||||
class Product(BaseModel):
|
||||
made_in: CountryShortName
|
||||
|
||||
return Product
|
||||
|
||||
|
||||
@pytest.fixture(scope='module', name='ProductNumericCode')
|
||||
def product_numeric_code_fixture():
|
||||
class Product(BaseModel):
|
||||
made_in: CountryNumericCode
|
||||
|
||||
return Product
|
||||
|
||||
|
||||
@pytest.mark.parametrize('alpha2, country_data', list(_index_by_alpha2().items())[:PARAMS_AMOUNT])
|
||||
def test_valid_alpha2(alpha2: str, country_data: CountryInfo, ProductAlpha2):
|
||||
banana = ProductAlpha2(made_in=alpha2)
|
||||
assert banana.made_in == country_data.alpha2
|
||||
assert banana.made_in.alpha3 == country_data.alpha3
|
||||
assert banana.made_in.numeric_code == country_data.numeric_code
|
||||
assert banana.made_in.short_name == country_data.short_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize('alpha2', list(printable))
|
||||
def test_invalid_alpha2(alpha2: str, ProductAlpha2):
|
||||
with pytest.raises(ValidationError, match='Invalid country alpha2 code'):
|
||||
ProductAlpha2(made_in=alpha2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('alpha3, country_data', list(_index_by_alpha3().items())[:PARAMS_AMOUNT])
|
||||
def test_valid_alpha3(alpha3: str, country_data: CountryInfo, ProductAlpha3):
|
||||
banana = ProductAlpha3(made_in=alpha3)
|
||||
assert banana.made_in == country_data.alpha3
|
||||
assert banana.made_in.alpha2 == country_data.alpha2
|
||||
assert banana.made_in.numeric_code == country_data.numeric_code
|
||||
assert banana.made_in.short_name == country_data.short_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize('alpha3', list(printable))
|
||||
def test_invalid_alpha3(alpha3: str, ProductAlpha3):
|
||||
with pytest.raises(ValidationError, match='Invalid country alpha3 code'):
|
||||
ProductAlpha3(made_in=alpha3)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('short_name, country_data', list(_index_by_short_name().items())[:PARAMS_AMOUNT])
|
||||
def test_valid_short_name(short_name: str, country_data: CountryInfo, ProductShortName):
|
||||
banana = ProductShortName(made_in=short_name)
|
||||
assert banana.made_in == country_data.short_name
|
||||
assert banana.made_in.alpha2 == country_data.alpha2
|
||||
assert banana.made_in.alpha3 == country_data.alpha3
|
||||
assert banana.made_in.numeric_code == country_data.numeric_code
|
||||
|
||||
|
||||
@pytest.mark.parametrize('short_name', list(printable))
|
||||
def test_invalid_short_name(short_name: str, ProductShortName):
|
||||
with pytest.raises(ValidationError, match='Invalid country short name'):
|
||||
ProductShortName(made_in=short_name)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('numeric_code, country_data', list(_index_by_numeric_code().items())[:PARAMS_AMOUNT])
|
||||
def test_valid_numeric_code(numeric_code: str, country_data: CountryInfo, ProductNumericCode):
|
||||
banana = ProductNumericCode(made_in=numeric_code)
|
||||
assert banana.made_in == country_data.numeric_code
|
||||
assert banana.made_in.alpha2 == country_data.alpha2
|
||||
assert banana.made_in.alpha3 == country_data.alpha3
|
||||
assert banana.made_in.short_name == country_data.short_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize('numeric_code', list(printable))
|
||||
def test_invalid_numeric_code(numeric_code: str, ProductNumericCode):
|
||||
with pytest.raises(ValidationError, match='Invalid country numeric code'):
|
||||
ProductNumericCode(made_in=numeric_code)
|
64
tests/test_currency_code.py
Normal file
64
tests/test_currency_code.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
import re
|
||||
|
||||
import pycountry
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types import currency_code
|
||||
|
||||
|
||||
class ISO4217CheckingModel(BaseModel):
|
||||
currency: currency_code.ISO4217
|
||||
|
||||
|
||||
class CurrencyCheckingModel(BaseModel):
|
||||
currency: currency_code.Currency
|
||||
|
||||
|
||||
forbidden_currencies = sorted(currency_code._CODES_FOR_BONDS_METAL_TESTING)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('currency', map(lambda code: code.alpha_3, pycountry.currencies))
|
||||
def test_ISO4217_code_ok(currency: str):
|
||||
model = ISO4217CheckingModel(currency=currency)
|
||||
assert model.currency == currency
|
||||
assert model.model_dump() == {'currency': currency} # test serialization
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'currency',
|
||||
filter(
|
||||
lambda code: code not in currency_code._CODES_FOR_BONDS_METAL_TESTING,
|
||||
map(lambda code: code.alpha_3, pycountry.currencies),
|
||||
),
|
||||
)
|
||||
def test_everyday_code_ok(currency: str):
|
||||
model = CurrencyCheckingModel(currency=currency)
|
||||
assert model.currency == currency
|
||||
assert model.model_dump() == {'currency': currency} # test serialization
|
||||
|
||||
|
||||
def test_ISO4217_fails():
|
||||
with pytest.raises(
|
||||
ValidationError,
|
||||
match=re.escape(
|
||||
'1 validation error for ISO4217CheckingModel\ncurrency\n '
|
||||
'Invalid ISO 4217 currency code. See https://en.wikipedia.org/wiki/ISO_4217 '
|
||||
"[type=ISO4217, input_value='OMG', input_type=str]"
|
||||
),
|
||||
):
|
||||
ISO4217CheckingModel(currency='OMG')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('forbidden_currency', forbidden_currencies)
|
||||
def test_forbidden_everyday(forbidden_currency):
|
||||
with pytest.raises(
|
||||
ValidationError,
|
||||
match=re.escape(
|
||||
'1 validation error for CurrencyCheckingModel\ncurrency\n '
|
||||
'Invalid currency code. See https://en.wikipedia.org/wiki/ISO_4217. '
|
||||
'Bonds, testing and precious metals codes are not allowed. '
|
||||
f"[type=InvalidCurrency, input_value='{forbidden_currency}', input_type=str]"
|
||||
),
|
||||
):
|
||||
CurrencyCheckingModel(currency=forbidden_currency)
|
154
tests/test_isbn.py
Normal file
154
tests/test_isbn.py
Normal file
|
@ -0,0 +1,154 @@
|
|||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types.isbn import ISBN
|
||||
|
||||
|
||||
class Book(BaseModel):
|
||||
isbn: ISBN
|
||||
|
||||
|
||||
isbn_length_test_cases = [
|
||||
# Valid ISBNs
|
||||
('8537809667', '9788537809662', True), # ISBN-10 as input
|
||||
('9788537809662', '9788537809662', True), # ISBN-13 as input
|
||||
('080442957X', '9780804429573', True), # ISBN-10 ending in "X" as input
|
||||
('9788584390670', '9788584390670', True), # ISBN-13 Starting with 978
|
||||
('9790306406156', '9790306406156', True), # ISBN-13 starting with 979
|
||||
# Invalid ISBNs
|
||||
('97885843906701', None, False), # Length: 14 (Higher)
|
||||
('978858439067', None, False), # Length: 12 (In Between)
|
||||
('97885843906', None, False), # Length: 11 (In Between)
|
||||
('978858439', None, False), # Length: 9 (Lower)
|
||||
('', None, False), # Length: 0 (Lower)
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_isbn, output_isbn, valid', isbn_length_test_cases)
|
||||
def test_isbn_length(input_isbn: Any, output_isbn: str, valid: bool) -> None:
|
||||
if valid:
|
||||
assert Book(isbn=ISBN(input_isbn)).isbn == output_isbn
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='isbn_length'):
|
||||
Book(isbn=ISBN(input_isbn))
|
||||
|
||||
|
||||
isbn10_digits_test_cases = [
|
||||
# Valid ISBNs
|
||||
('8537809667', '9788537809662', True), # ISBN-10 as input
|
||||
('080442957X', '9780804429573', True), # ISBN-10 ending in "X" as input
|
||||
# Invalid ISBNs
|
||||
('@80442957X', None, False), # Non Integer in [0] position
|
||||
('8@37809667', None, False), # Non Integer in [1] position
|
||||
('85@7809667', None, False), # Non Integer in [2] position
|
||||
('853@809667', None, False), # Non Integer in [3] position
|
||||
('8537@09667', None, False), # Non Integer in [4] position
|
||||
('85378@9667', None, False), # Non Integer in [5] position
|
||||
('853780@667', None, False), # Non Integer in [6] position
|
||||
('8537809@67', None, False), # Non Integer in [7] position
|
||||
('85378096@7', None, False), # Non Integer in [8] position
|
||||
('853780966@', None, False), # Non Integer or X in [9] position
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_isbn, output_isbn, valid', isbn10_digits_test_cases)
|
||||
def test_isbn10_digits(input_isbn: Any, output_isbn: str, valid: bool) -> None:
|
||||
if valid:
|
||||
assert Book(isbn=ISBN(input_isbn)).isbn == output_isbn
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='isbn10_invalid_characters'):
|
||||
Book(isbn=ISBN(input_isbn))
|
||||
|
||||
|
||||
isbn13_digits_test_cases = [
|
||||
# Valid ISBNs
|
||||
('9788537809662', '9788537809662', True), # ISBN-13 as input
|
||||
('9780306406157', '9780306406157', True), # ISBN-13 as input
|
||||
('9788584390670', '9788584390670', True), # ISBN-13 Starting with 978
|
||||
('9790306406156', '9790306406156', True), # ISBN-13 starting with 979
|
||||
# Invalid ISBNs
|
||||
('@788537809662', None, False), # Non Integer in [0] position
|
||||
('9@88537809662', None, False), # Non Integer in [1] position
|
||||
('97@8537809662', None, False), # Non Integer in [2] position
|
||||
('978@537809662', None, False), # Non Integer in [3] position
|
||||
('9788@37809662', None, False), # Non Integer in [4] position
|
||||
('97885@7809662', None, False), # Non Integer in [5] position
|
||||
('978853@809662', None, False), # Non Integer in [6] position
|
||||
('9788537@09662', None, False), # Non Integer in [7] position
|
||||
('97885378@9662', None, False), # Non Integer in [8] position
|
||||
('978853780@662', None, False), # Non Integer in [9] position
|
||||
('9788537809@62', None, False), # Non Integer in [10] position
|
||||
('97885378096@2', None, False), # Non Integer in [11] position
|
||||
('978853780966@', None, False), # Non Integer in [12] position
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_isbn, output_isbn, valid', isbn13_digits_test_cases)
|
||||
def test_isbn13_digits(input_isbn: Any, output_isbn: str, valid: bool) -> None:
|
||||
if valid:
|
||||
assert Book(isbn=ISBN(input_isbn)).isbn == output_isbn
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='isbn13_invalid_characters'):
|
||||
Book(isbn=ISBN(input_isbn))
|
||||
|
||||
|
||||
isbn13_early_digits_test_cases = [
|
||||
# Valid ISBNs
|
||||
('9780306406157', '9780306406157', True), # ISBN-13 as input
|
||||
('9788584390670', '9788584390670', True), # ISBN-13 Starting with 978
|
||||
('9790306406156', '9790306406156', True), # ISBN-13 starting with 979
|
||||
# Invalid ISBNs
|
||||
('1788584390670', None, False), # Does not start with 978 or 979
|
||||
('9288584390670', None, False), # Does not start with 978 or 979
|
||||
('9738584390670', None, False), # Does not start with 978 or 979
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_isbn, output_isbn, valid', isbn13_early_digits_test_cases)
|
||||
def test_isbn13_early_digits(input_isbn: Any, output_isbn: str, valid: bool) -> None:
|
||||
if valid:
|
||||
assert Book(isbn=ISBN(input_isbn)).isbn == output_isbn
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='isbn_invalid_early_characters'):
|
||||
Book(isbn=ISBN(input_isbn))
|
||||
|
||||
|
||||
isbn_last_digit_test_cases = [
|
||||
# Valid ISBNs
|
||||
('8537809667', '9788537809662', True), # ISBN-10 as input
|
||||
('9788537809662', '9788537809662', True), # ISBN-13 as input
|
||||
('080442957X', '9780804429573', True), # ISBN-10 ending in "X" as input
|
||||
('9788584390670', '9788584390670', True), # ISBN-13 Starting with 978
|
||||
('9790306406156', '9790306406156', True), # ISBN-13 starting with 979
|
||||
# Invalid ISBNs
|
||||
('8537809663', None, False), # ISBN-10 as input with wrong last digit
|
||||
('9788537809661', None, False), # ISBN-13 as input with wrong last digit
|
||||
('080442953X', None, False), # ISBN-10 ending in "X" as input with wrong last digit
|
||||
('9788584390671', None, False), # ISBN-13 Starting with 978 with wrong last digit
|
||||
('9790306406155', None, False), # ISBN-13 starting with 979 with wrong last digit
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_isbn, output_isbn, valid', isbn_last_digit_test_cases)
|
||||
def test_isbn_last_digit(input_isbn: Any, output_isbn: str, valid: bool) -> None:
|
||||
if valid:
|
||||
assert Book(isbn=ISBN(input_isbn)).isbn == output_isbn
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='isbn_invalid_digit_check_isbn'):
|
||||
Book(isbn=ISBN(input_isbn))
|
||||
|
||||
|
||||
isbn_conversion_test_cases = [
|
||||
# Valid ISBNs
|
||||
('8537809667', '9788537809662'),
|
||||
('080442957X', '9780804429573'),
|
||||
('9788584390670', '9788584390670'),
|
||||
('9790306406156', '9790306406156'),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_isbn, output_isbn', isbn_conversion_test_cases)
|
||||
def test_isbn_conversion(input_isbn: Any, output_isbn: str) -> None:
|
||||
assert Book(isbn=ISBN(input_isbn)).isbn == output_isbn
|
296
tests/test_json_schema.py
Normal file
296
tests/test_json_schema.py
Normal file
|
@ -0,0 +1,296 @@
|
|||
import pycountry
|
||||
import pytest
|
||||
from pydantic import BaseModel
|
||||
|
||||
import pydantic_extra_types
|
||||
from pydantic_extra_types.color import Color
|
||||
from pydantic_extra_types.coordinate import Coordinate, Latitude, Longitude
|
||||
from pydantic_extra_types.country import (
|
||||
CountryAlpha2,
|
||||
CountryAlpha3,
|
||||
CountryNumericCode,
|
||||
CountryShortName,
|
||||
)
|
||||
from pydantic_extra_types.currency_code import ISO4217, Currency
|
||||
from pydantic_extra_types.isbn import ISBN
|
||||
from pydantic_extra_types.language_code import ISO639_3, ISO639_5
|
||||
from pydantic_extra_types.mac_address import MacAddress
|
||||
from pydantic_extra_types.payment import PaymentCardNumber
|
||||
from pydantic_extra_types.pendulum_dt import DateTime
|
||||
from pydantic_extra_types.ulid import ULID
|
||||
|
||||
languages = [lang.alpha_3 for lang in pycountry.languages]
|
||||
language_families = [lang.alpha_3 for lang in pycountry.language_families]
|
||||
languages.sort()
|
||||
language_families.sort()
|
||||
|
||||
currencies = [currency.alpha_3 for currency in pycountry.currencies]
|
||||
currencies.sort()
|
||||
everyday_currencies = [
|
||||
currency.alpha_3
|
||||
for currency in pycountry.currencies
|
||||
if currency.alpha_3 not in pydantic_extra_types.currency_code._CODES_FOR_BONDS_METAL_TESTING
|
||||
]
|
||||
|
||||
everyday_currencies.sort()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'cls,expected',
|
||||
[
|
||||
(
|
||||
Color,
|
||||
{
|
||||
'properties': {'x': {'format': 'color', 'title': 'X', 'type': 'string'}},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
PaymentCardNumber,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'maxLength': 19,
|
||||
'minLength': 12,
|
||||
'title': 'X',
|
||||
'type': 'string',
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
CountryAlpha2,
|
||||
{
|
||||
'properties': {'x': {'pattern': '^\\w{2}$', 'title': 'X', 'type': 'string'}},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
CountryAlpha3,
|
||||
{
|
||||
'properties': {'x': {'pattern': '^\\w{3}$', 'title': 'X', 'type': 'string'}},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
CountryNumericCode,
|
||||
{
|
||||
'properties': {'x': {'pattern': '^[0-9]{3}$', 'title': 'X', 'type': 'string'}},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
CountryShortName,
|
||||
{
|
||||
'properties': {'x': {'title': 'X', 'type': 'string'}},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
MacAddress,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'title': 'X',
|
||||
'type': 'string',
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
Latitude,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'maximum': 90.0,
|
||||
'minimum': -90.0,
|
||||
'title': 'X',
|
||||
'type': 'number',
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
Longitude,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'maximum': 180.0,
|
||||
'minimum': -180.0,
|
||||
'title': 'X',
|
||||
'type': 'number',
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
Coordinate,
|
||||
{
|
||||
'$defs': {
|
||||
'Coordinate': {
|
||||
'properties': {
|
||||
'latitude': {'maximum': 90.0, 'minimum': -90.0, 'title': 'Latitude', 'type': 'number'},
|
||||
'longitude': {'maximum': 180.0, 'minimum': -180.0, 'title': 'Longitude', 'type': 'number'},
|
||||
},
|
||||
'required': ['latitude', 'longitude'],
|
||||
'title': 'Coordinate',
|
||||
'type': 'object',
|
||||
}
|
||||
},
|
||||
'properties': {
|
||||
'x': {
|
||||
'anyOf': [
|
||||
{'$ref': '#/$defs/Coordinate'},
|
||||
{
|
||||
'maxItems': 2,
|
||||
'minItems': 2,
|
||||
'prefixItems': [
|
||||
{'type': 'number'},
|
||||
{'type': 'number'},
|
||||
],
|
||||
'type': 'array',
|
||||
},
|
||||
{'type': 'string'},
|
||||
],
|
||||
'title': 'X',
|
||||
},
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
ULID,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'anyOf': [{'type': 'integer'}, {'format': 'binary', 'type': 'string'}, {'type': 'string'}],
|
||||
'title': 'X',
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
ISBN,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'title': 'X',
|
||||
'type': 'string',
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
DateTime,
|
||||
{
|
||||
'properties': {'x': {'format': 'date-time', 'title': 'X', 'type': 'string'}},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
ISO639_3,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'title': 'X',
|
||||
'type': 'string',
|
||||
'enum': languages,
|
||||
'maxLength': 3,
|
||||
'minLength': 3,
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
ISO639_5,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'title': 'X',
|
||||
'type': 'string',
|
||||
'enum': language_families,
|
||||
'maxLength': 3,
|
||||
'minLength': 3,
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
ISO4217,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'title': 'X',
|
||||
'type': 'string',
|
||||
'enum': currencies,
|
||||
'maxLength': 3,
|
||||
'minLength': 3,
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
(
|
||||
Currency,
|
||||
{
|
||||
'properties': {
|
||||
'x': {
|
||||
'title': 'X',
|
||||
'type': 'string',
|
||||
'enum': everyday_currencies,
|
||||
'maxLength': 3,
|
||||
'minLength': 3,
|
||||
}
|
||||
},
|
||||
'required': ['x'],
|
||||
'title': 'Model',
|
||||
'type': 'object',
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_json_schema(cls, expected):
|
||||
class Model(BaseModel):
|
||||
x: cls
|
||||
|
||||
assert Model.model_json_schema() == expected
|
53
tests/test_language_codes.py
Normal file
53
tests/test_language_codes.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
import re
|
||||
|
||||
import pycountry
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types import language_code
|
||||
|
||||
|
||||
class ISO3CheckingModel(BaseModel):
|
||||
lang: language_code.ISO639_3
|
||||
|
||||
|
||||
class ISO5CheckingModel(BaseModel):
|
||||
lang: language_code.ISO639_5
|
||||
|
||||
|
||||
@pytest.mark.parametrize('lang', map(lambda lang: lang.alpha_3, pycountry.languages))
|
||||
def test_iso_ISO639_3_code_ok(lang: str):
|
||||
model = ISO3CheckingModel(lang=lang)
|
||||
assert model.lang == lang
|
||||
assert model.model_dump() == {'lang': lang} # test serialization
|
||||
|
||||
|
||||
@pytest.mark.parametrize('lang', map(lambda lang: lang.alpha_3, pycountry.language_families))
|
||||
def test_iso_639_5_code_ok(lang: str):
|
||||
model = ISO5CheckingModel(lang=lang)
|
||||
assert model.lang == lang
|
||||
assert model.model_dump() == {'lang': lang} # test serialization
|
||||
|
||||
|
||||
def test_iso3_language_fail():
|
||||
with pytest.raises(
|
||||
ValidationError,
|
||||
match=re.escape(
|
||||
'1 validation error for ISO3CheckingModel\nlang\n '
|
||||
'Invalid ISO 639-3 language code. '
|
||||
"See https://en.wikipedia.org/wiki/ISO_639-3 [type=ISO649_3, input_value='LOL', input_type=str]"
|
||||
),
|
||||
):
|
||||
ISO3CheckingModel(lang='LOL')
|
||||
|
||||
|
||||
def test_iso5_language_fail():
|
||||
with pytest.raises(
|
||||
ValidationError,
|
||||
match=re.escape(
|
||||
'1 validation error for ISO5CheckingModel\nlang\n '
|
||||
'Invalid ISO 639-5 language code. '
|
||||
"See https://en.wikipedia.org/wiki/ISO_639-5 [type=ISO649_5, input_value='LOL', input_type=str]"
|
||||
),
|
||||
):
|
||||
ISO5CheckingModel(lang='LOL')
|
144
tests/test_mac_address.py
Normal file
144
tests/test_mac_address.py
Normal file
|
@ -0,0 +1,144 @@
|
|||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types.mac_address import MacAddress
|
||||
|
||||
|
||||
class Network(BaseModel):
|
||||
mac_address: MacAddress
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'mac_address, result, valid',
|
||||
[
|
||||
# Valid MAC addresses
|
||||
('00:00:5e:00:53:01', '00:00:5e:00:53:01', True),
|
||||
('02:00:5e:10:00:00:00:01', '02:00:5e:10:00:00:00:01', True),
|
||||
(
|
||||
'00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:01',
|
||||
'00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:01',
|
||||
True,
|
||||
),
|
||||
('00-00-5e-00-53-01', '00:00:5e:00:53:01', True),
|
||||
('02-00-5e-10-00-00-00-01', '02:00:5e:10:00:00:00:01', True),
|
||||
(
|
||||
'00-00-00-00-fe-80-00-00-00-00-00-00-02-00-5e-10-00-00-00-01',
|
||||
'00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:01',
|
||||
True,
|
||||
),
|
||||
('0000.5e00.5301', '00:00:5e:00:53:01', True),
|
||||
('0200.5e10.0000.0001', '02:00:5e:10:00:00:00:01', True),
|
||||
(
|
||||
'0000.0000.fe80.0000.0000.0000.0200.5e10.0000.0001',
|
||||
'00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:01',
|
||||
True,
|
||||
),
|
||||
# Invalid MAC addresses
|
||||
('0200.5e10.0000.001', None, False),
|
||||
('00-00-5e-00-53-0', None, False),
|
||||
('00:00:5e:00:53:1', None, False),
|
||||
('02:00:5e:10:00:00:00:1', None, False),
|
||||
('00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:1', None, False),
|
||||
('0200.5e10.0000.001', None, False), # Invalid length
|
||||
('00-00-5e-00-53-0', None, False), # Missing character
|
||||
('00:00:5e:00:53:1', None, False), # Missing leading zero
|
||||
('00:00:5g:00:53:01', None, False), # Invalid hex digit 'g'
|
||||
('00.00.5e.0.3.01.0.0.5e.0.53.01', None, False),
|
||||
('00-00-5e-00-53-01:', None, False), # Extra separator at the end
|
||||
('00000.5e000.5301', None, False),
|
||||
('000.5e0.530001', None, False),
|
||||
('0000.5e#0./301', None, False),
|
||||
(b'12.!4.5!.7/.#G.AB......', None, False),
|
||||
('12.!4.5!.7/.#G.AB', None, False),
|
||||
('00-00-5e-00-53-01-', None, False), # Extra separator at the end
|
||||
('00.00.5e.00.53.01.', None, False), # Extra separator at the end
|
||||
('00:00:5e:00:53:', None, False), # Incomplete MAC address
|
||||
(float(12345678910111213), None, False),
|
||||
],
|
||||
)
|
||||
def test_format_for_mac_address(mac_address: Any, result: str, valid: bool):
|
||||
if valid:
|
||||
assert Network(mac_address=MacAddress(mac_address)).mac_address == result
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='format'):
|
||||
Network(mac_address=MacAddress(mac_address))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'mac_address, result, valid',
|
||||
[
|
||||
# Valid MAC addresses
|
||||
('00:00:5e:00:53:01', '00:00:5e:00:53:01', True),
|
||||
('02:00:5e:10:00:00:00:01', '02:00:5e:10:00:00:00:01', True),
|
||||
(
|
||||
'00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:01',
|
||||
'00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:01',
|
||||
True,
|
||||
),
|
||||
('00-00-5e-00-53-01', '00:00:5e:00:53:01', True),
|
||||
('02-00-5e-10-00-00-00-01', '02:00:5e:10:00:00:00:01', True),
|
||||
(
|
||||
'00-00-00-00-fe-80-00-00-00-00-00-00-02-00-5e-10-00-00-00-01',
|
||||
'00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:01',
|
||||
True,
|
||||
),
|
||||
('0000.5e00.5301', '00:00:5e:00:53:01', True),
|
||||
('0200.5e10.0000.0001', '02:00:5e:10:00:00:00:01', True),
|
||||
(
|
||||
'0000.0000.fe80.0000.0000.0000.0200.5e10.0000.0001',
|
||||
'00:00:00:00:fe:80:00:00:00:00:00:00:02:00:5e:10:00:00:00:01',
|
||||
True,
|
||||
),
|
||||
# Invalid MAC addresses
|
||||
('0', None, False),
|
||||
('00:00:00', None, False),
|
||||
('00-00-5e-00-53-01-01', None, False),
|
||||
('0000.0000.fe80.0000.0000.0000.0200.5e10.0000.0001.0000.0001', None, False),
|
||||
],
|
||||
)
|
||||
def test_length_for_mac_address(mac_address: str, result: str, valid: bool):
|
||||
if valid:
|
||||
assert Network(mac_address=MacAddress(mac_address)).mac_address == result
|
||||
else:
|
||||
with pytest.raises(ValueError, match='Length'):
|
||||
Network(mac_address=MacAddress(mac_address))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'mac_address, valid',
|
||||
[
|
||||
# Valid MAC addresses
|
||||
('00:00:5e:00:53:01', True),
|
||||
(MacAddress('00:00:5e:00:53:01'), True),
|
||||
# Invalid MAC addresses
|
||||
(0, False),
|
||||
(['00:00:00'], False),
|
||||
],
|
||||
)
|
||||
def test_type_for_mac_address(mac_address: Any, valid: bool):
|
||||
if valid:
|
||||
Network(mac_address=MacAddress(mac_address))
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='MAC address must be 14'):
|
||||
Network(mac_address=MacAddress(mac_address))
|
||||
|
||||
|
||||
def test_model_validation():
|
||||
class Model(BaseModel):
|
||||
mac_address: MacAddress
|
||||
|
||||
assert Model(mac_address='00:00:5e:00:53:01').mac_address == '00:00:5e:00:53:01'
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
Model(mac_address='1234')
|
||||
|
||||
assert exc_info.value.errors() == [
|
||||
{
|
||||
'ctx': {'mac_address': '1234', 'required_length': 14},
|
||||
'input': '1234',
|
||||
'loc': ('mac_address',),
|
||||
'msg': 'Length for a 1234 MAC address must be 14',
|
||||
'type': 'mac_address_len',
|
||||
}
|
||||
]
|
39
tests/test_pendulum_dt.py
Normal file
39
tests/test_pendulum_dt.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
import pendulum
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types.pendulum_dt import DateTime
|
||||
|
||||
|
||||
class Model(BaseModel):
|
||||
dt: DateTime
|
||||
|
||||
|
||||
def test_pendulum_dt_existing_instance():
|
||||
"""
|
||||
Verifies that constructing a model with an existing pendulum dt doesn't throw.
|
||||
"""
|
||||
now = pendulum.now()
|
||||
model = Model(dt=now)
|
||||
assert model.dt == now
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dt', [pendulum.now().to_iso8601_string(), pendulum.now().to_w3c_string(), pendulum.now().to_iso8601_string()]
|
||||
)
|
||||
def test_pendulum_dt_from_serialized(dt):
|
||||
"""
|
||||
Verifies that building an instance from serialized, well-formed strings decode properly.
|
||||
"""
|
||||
dt_actual = pendulum.parse(dt)
|
||||
model = Model(dt=dt)
|
||||
assert model.dt == dt_actual
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dt', [None, 'malformed', pendulum.now().to_iso8601_string()[:5], 42])
|
||||
def test_pendulum_dt_malformed(dt):
|
||||
"""
|
||||
Verifies that the instance fails to validate if malformed dt are passed.
|
||||
"""
|
||||
with pytest.raises(ValidationError):
|
||||
Model(dt=dt)
|
69
tests/test_phone_numbers.py
Normal file
69
tests/test_phone_numbers.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types.phone_numbers import PhoneNumber
|
||||
|
||||
|
||||
class Something(BaseModel):
|
||||
phone_number: PhoneNumber
|
||||
|
||||
|
||||
# Note: the 555 area code will result in an invalid phone number
|
||||
def test_valid_phone_number() -> None:
|
||||
Something(phone_number='+1 901 555 1212')
|
||||
|
||||
|
||||
def test_when_extension_provided() -> None:
|
||||
Something(phone_number='+1 901 555 1212 ext 12533')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('invalid_number', ['', '123', 12, None, object(), '55 121'])
|
||||
def test_invalid_phone_number(invalid_number: Any) -> None:
|
||||
with pytest.raises(ValidationError):
|
||||
Something(phone_number=invalid_number)
|
||||
|
||||
|
||||
def test_formats_phone_number() -> None:
|
||||
result = Something(phone_number='+1 901 555 1212 ext 12533')
|
||||
assert result.phone_number == 'tel:+1-901-555-1212;ext=12533'
|
||||
|
||||
|
||||
def test_supported_regions() -> None:
|
||||
assert 'US' in PhoneNumber.supported_regions
|
||||
assert 'GB' in PhoneNumber.supported_regions
|
||||
|
||||
|
||||
def test_supported_formats() -> None:
|
||||
assert 'E164' in PhoneNumber.supported_formats
|
||||
assert 'RFC3966' in PhoneNumber.supported_formats
|
||||
assert '__dict__' not in PhoneNumber.supported_formats
|
||||
assert 'to_string' not in PhoneNumber.supported_formats
|
||||
|
||||
|
||||
def test_parse_error() -> None:
|
||||
with pytest.raises(ValidationError, match='value is not a valid phone number'):
|
||||
Something(phone_number='555 1212')
|
||||
|
||||
|
||||
def test_parsed_but_not_a_valid_number() -> None:
|
||||
with pytest.raises(ValidationError, match='value is not a valid phone number'):
|
||||
Something(phone_number='+1 555-1212')
|
||||
|
||||
|
||||
def test_json_schema() -> None:
|
||||
assert Something.model_json_schema() == {
|
||||
'title': 'Something',
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'phone_number': {
|
||||
'title': 'Phone Number',
|
||||
'type': 'string',
|
||||
'format': 'phone',
|
||||
'minLength': 7,
|
||||
'maxLength': 64,
|
||||
}
|
||||
},
|
||||
'required': ['phone_number'],
|
||||
}
|
41
tests/test_routing_number.py
Normal file
41
tests/test_routing_number.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types.routing_number import ABARoutingNumber
|
||||
|
||||
|
||||
class Model(BaseModel):
|
||||
routing_number: ABARoutingNumber
|
||||
|
||||
|
||||
@pytest.mark.parametrize('routing_number', [12, None, object(), 123456789])
|
||||
def test_invalid_routing_number_string(routing_number: Any) -> None:
|
||||
with pytest.raises(ValidationError) as validation_error:
|
||||
Model(routing_number=routing_number)
|
||||
assert validation_error.match('Input should be a valid string')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('routing_number', ['', '123', '1234567890'])
|
||||
def test_invalid_routing_number_length(routing_number: Any) -> None:
|
||||
with pytest.raises(ValidationError) as validation_error:
|
||||
Model(routing_number=routing_number)
|
||||
assert validation_error.match(r'String should have at (most|least) 9 characters')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('routing_number', ['122105154', '122235822', '123103723', '074900781'])
|
||||
def test_invalid_routing_number(routing_number: Any) -> None:
|
||||
with pytest.raises(ValidationError) as validation_error:
|
||||
Model(routing_number=routing_number)
|
||||
assert validation_error.match('Incorrect ABA routing transit number')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('routing_number', ['122105155', '122235821', '123103729', '074900783'])
|
||||
def test_valid_routing_number(routing_number: str) -> None:
|
||||
Model(routing_number=routing_number)
|
||||
|
||||
|
||||
def test_raises_error_when_not_a_string() -> None:
|
||||
with pytest.raises(ValidationError, match='routing number is not all digits'):
|
||||
Model(routing_number='A12210515')
|
230
tests/test_types_color.py
Normal file
230
tests/test_types_color.py
Normal file
|
@ -0,0 +1,230 @@
|
|||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
from pydantic_core import PydanticCustomError
|
||||
|
||||
from pydantic_extra_types.color import Color
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'raw_color, as_tuple',
|
||||
[
|
||||
# named colors
|
||||
('aliceblue', (240, 248, 255)),
|
||||
('Antiquewhite', (250, 235, 215)),
|
||||
('transparent', (0, 0, 0, 0)),
|
||||
('#000000', (0, 0, 0)),
|
||||
('#DAB', (221, 170, 187)),
|
||||
('#dab', (221, 170, 187)),
|
||||
('#000', (0, 0, 0)),
|
||||
('0x797979', (121, 121, 121)),
|
||||
('0x777', (119, 119, 119)),
|
||||
('0x777777', (119, 119, 119)),
|
||||
('0x777777cc', (119, 119, 119, 0.8)),
|
||||
('777', (119, 119, 119)),
|
||||
('777c', (119, 119, 119, 0.8)),
|
||||
(' 777', (119, 119, 119)),
|
||||
('777 ', (119, 119, 119)),
|
||||
(' 777 ', (119, 119, 119)),
|
||||
((0, 0, 128), (0, 0, 128)),
|
||||
([0, 0, 128], (0, 0, 128)),
|
||||
((0, 0, 205, 1.0), (0, 0, 205)),
|
||||
((0, 0, 205, 0.5), (0, 0, 205, 0.5)),
|
||||
('rgb(0, 0, 205)', (0, 0, 205)),
|
||||
('rgb(0, 0, 205.2)', (0, 0, 205)),
|
||||
('rgb(0, 0.2, 205)', (0, 0, 205)),
|
||||
('rgba(0, 0, 128, 0.6)', (0, 0, 128, 0.6)),
|
||||
('rgba(0, 0, 128, .6)', (0, 0, 128, 0.6)),
|
||||
('rgba(0, 0, 128, 60%)', (0, 0, 128, 0.6)),
|
||||
(' rgba(0, 0, 128,0.6) ', (0, 0, 128, 0.6)),
|
||||
('rgba(00,0,128,0.6 )', (0, 0, 128, 0.6)),
|
||||
('rgba(0, 0, 128, 0)', (0, 0, 128, 0)),
|
||||
('rgba(0, 0, 128, 1)', (0, 0, 128)),
|
||||
('rgb(0 0.2 205)', (0, 0, 205)),
|
||||
('rgb(0 0.2 205 / 0.6)', (0, 0, 205, 0.6)),
|
||||
('rgb(0 0.2 205 / 60%)', (0, 0, 205, 0.6)),
|
||||
('rgba(0 0 128)', (0, 0, 128)),
|
||||
('rgba(0 0 128 / 0.6)', (0, 0, 128, 0.6)),
|
||||
('rgba(0 0 128 / 60%)', (0, 0, 128, 0.6)),
|
||||
('hsl(270, 60%, 70%)', (178, 133, 224)),
|
||||
('hsl(180, 100%, 50%)', (0, 255, 255)),
|
||||
('hsl(630, 60%, 70%)', (178, 133, 224)),
|
||||
('hsl(270deg, 60%, 70%)', (178, 133, 224)),
|
||||
('hsl(.75turn, 60%, 70%)', (178, 133, 224)),
|
||||
('hsl(-.25turn, 60%, 70%)', (178, 133, 224)),
|
||||
('hsl(-0.25turn, 60%, 70%)', (178, 133, 224)),
|
||||
('hsl(4.71238rad, 60%, 70%)', (178, 133, 224)),
|
||||
('hsl(10.9955rad, 60%, 70%)', (178, 133, 224)),
|
||||
('hsl(270, 60%, 50%, .15)', (127, 51, 204, 0.15)),
|
||||
('hsl(270.00deg, 60%, 50%, 15%)', (127, 51, 204, 0.15)),
|
||||
('hsl(630 60% 70%)', (178, 133, 224)),
|
||||
('hsl(270 60% 50% / .15)', (127, 51, 204, 0.15)),
|
||||
('hsla(630, 60%, 70%)', (178, 133, 224)),
|
||||
('hsla(630 60% 70%)', (178, 133, 224)),
|
||||
('hsla(270 60% 50% / .15)', (127, 51, 204, 0.15)),
|
||||
],
|
||||
)
|
||||
def test_color_success(raw_color, as_tuple):
|
||||
c = Color(raw_color)
|
||||
assert c.as_rgb_tuple() == as_tuple
|
||||
assert c.original() == raw_color
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'color',
|
||||
[
|
||||
# named colors
|
||||
'nosuchname',
|
||||
'chucknorris',
|
||||
# hex
|
||||
'#0000000',
|
||||
'x000',
|
||||
# rgb/rgba tuples
|
||||
(256, 256, 256),
|
||||
(128, 128, 128, 0.5, 128),
|
||||
(0, 0, 'x'),
|
||||
(0, 0, 0, 1.5),
|
||||
(0, 0, 0, 'x'),
|
||||
(0, 0, 1280),
|
||||
(0, 0, 1205, 0.1),
|
||||
(0, 0, 1128, 0.5),
|
||||
(0, 0, 1128, -0.5),
|
||||
(0, 0, 1128, 1.5),
|
||||
# rgb/rgba strings
|
||||
'rgb(0, 0, 1205)',
|
||||
'rgb(0, 0, 1128)',
|
||||
'rgb(0, 0, 200 / 0.2)',
|
||||
'rgb(72 122 18, 0.3)',
|
||||
'rgba(0, 0, 11205, 0.1)',
|
||||
'rgba(0, 0, 128, 11.5)',
|
||||
'rgba(0, 0, 128 / 11.5)',
|
||||
'rgba(72 122 18 0.3)',
|
||||
# hsl/hsla strings
|
||||
'hsl(180, 101%, 50%)',
|
||||
'hsl(72 122 18 / 0.3)',
|
||||
'hsl(630 60% 70%, 0.3)',
|
||||
'hsla(72 122 18 / 0.3)',
|
||||
# neither a tuple, not a string
|
||||
datetime(2017, 10, 5, 19, 47, 7),
|
||||
object,
|
||||
range(10),
|
||||
],
|
||||
)
|
||||
def test_color_fail(color):
|
||||
with pytest.raises(PydanticCustomError) as exc_info:
|
||||
Color(color)
|
||||
assert exc_info.value.type == 'color_error'
|
||||
|
||||
|
||||
def test_model_validation():
|
||||
class Model(BaseModel):
|
||||
color: Color
|
||||
|
||||
assert Model(color='red').color.as_hex() == '#f00'
|
||||
assert Model(color=Color('red')).color.as_hex() == '#f00'
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
Model(color='snot')
|
||||
# insert_assert(exc_info.value.errors())
|
||||
assert exc_info.value.errors() == [
|
||||
{
|
||||
'type': 'color_error',
|
||||
'loc': ('color',),
|
||||
'msg': 'value is not a valid color: string not recognised as a valid color',
|
||||
'input': 'snot',
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_as_rgb():
|
||||
assert Color('bad').as_rgb() == 'rgb(187, 170, 221)'
|
||||
assert Color((1, 2, 3, 0.123456)).as_rgb() == 'rgba(1, 2, 3, 0.12)'
|
||||
assert Color((1, 2, 3, 0.1)).as_rgb() == 'rgba(1, 2, 3, 0.1)'
|
||||
|
||||
|
||||
def test_as_rgb_tuple():
|
||||
assert Color((1, 2, 3)).as_rgb_tuple(alpha=None) == (1, 2, 3)
|
||||
assert Color((1, 2, 3, 1)).as_rgb_tuple(alpha=None) == (1, 2, 3)
|
||||
assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=None) == (1, 2, 3, 0.3)
|
||||
assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=None) == (1, 2, 3, 0.3)
|
||||
|
||||
assert Color((1, 2, 3)).as_rgb_tuple(alpha=False) == (1, 2, 3)
|
||||
assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=False) == (1, 2, 3)
|
||||
|
||||
assert Color((1, 2, 3)).as_rgb_tuple(alpha=True) == (1, 2, 3, 1)
|
||||
assert Color((1, 2, 3, 0.3)).as_rgb_tuple(alpha=True) == (1, 2, 3, 0.3)
|
||||
|
||||
|
||||
def test_as_hsl():
|
||||
assert Color('bad').as_hsl() == 'hsl(260, 43%, 77%)'
|
||||
assert Color((1, 2, 3, 0.123456)).as_hsl() == 'hsl(210, 50%, 1%, 0.12)'
|
||||
assert Color('hsl(260, 43%, 77%)').as_hsl() == 'hsl(260, 43%, 77%)'
|
||||
|
||||
|
||||
def test_as_hsl_tuple():
|
||||
c = Color('016997')
|
||||
h, s, l_, a = c.as_hsl_tuple(alpha=True)
|
||||
assert h == pytest.approx(0.551, rel=0.01)
|
||||
assert s == pytest.approx(0.986, rel=0.01)
|
||||
assert l_ == pytest.approx(0.298, rel=0.01)
|
||||
assert a == 1
|
||||
|
||||
assert c.as_hsl_tuple(alpha=False) == c.as_hsl_tuple(alpha=None) == (h, s, l_)
|
||||
|
||||
c = Color((3, 40, 50, 0.5))
|
||||
hsla = c.as_hsl_tuple(alpha=None)
|
||||
assert len(hsla) == 4
|
||||
assert hsla[3] == 0.5
|
||||
|
||||
|
||||
def test_as_hex():
|
||||
assert Color((1, 2, 3)).as_hex() == '#010203'
|
||||
assert Color((119, 119, 119)).as_hex() == '#777'
|
||||
assert Color((119, 0, 238)).as_hex() == '#70e'
|
||||
assert Color('B0B').as_hex() == '#b0b'
|
||||
assert Color((1, 2, 3, 0.123456)).as_hex() == '#0102031f'
|
||||
assert Color((1, 2, 3, 0.1)).as_hex() == '#0102031a'
|
||||
|
||||
|
||||
def test_as_hex_long():
|
||||
assert Color((1, 2, 3)).as_hex(format='long') == '#010203'
|
||||
assert Color((119, 119, 119)).as_hex(format='long') == '#777777'
|
||||
assert Color((119, 0, 238)).as_hex(format='long') == '#7700ee'
|
||||
assert Color('B0B').as_hex(format='long') == '#bb00bb'
|
||||
assert Color('#0102031a').as_hex(format='long') == '#0102031a'
|
||||
|
||||
|
||||
def test_as_named():
|
||||
assert Color((0, 255, 255)).as_named() == 'cyan'
|
||||
assert Color('#808000').as_named() == 'olive'
|
||||
assert Color('hsl(180, 100%, 50%)').as_named() == 'cyan'
|
||||
|
||||
assert Color((240, 248, 255)).as_named() == 'aliceblue'
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
Color((1, 2, 3)).as_named()
|
||||
assert exc_info.value.args[0] == 'no named color found, use fallback=True, as_hex() or as_rgb()'
|
||||
|
||||
assert Color((1, 2, 3)).as_named(fallback=True) == '#010203'
|
||||
assert Color((1, 2, 3, 0.1)).as_named(fallback=True) == '#0102031a'
|
||||
|
||||
|
||||
def test_str_repr():
|
||||
assert str(Color('red')) == 'red'
|
||||
assert repr(Color('red')) == "Color('red', rgb=(255, 0, 0))"
|
||||
assert str(Color((1, 2, 3))) == '#010203'
|
||||
assert repr(Color((1, 2, 3))) == "Color('#010203', rgb=(1, 2, 3))"
|
||||
|
||||
|
||||
def test_eq():
|
||||
assert Color('red') == Color('red')
|
||||
assert Color('red') != Color('blue')
|
||||
assert Color('red') != 'red'
|
||||
|
||||
assert Color('red') == Color((255, 0, 0))
|
||||
assert Color('red') != Color((0, 0, 255))
|
||||
|
||||
|
||||
def test_color_hashable():
|
||||
assert hash(Color('red')) != hash(Color('blue'))
|
||||
assert hash(Color('red')) == hash(Color((255, 0, 0)))
|
||||
assert hash(Color('red')) != hash(Color((255, 0, 0, 0.5)))
|
191
tests/test_types_payment.py
Normal file
191
tests/test_types_payment.py
Normal file
|
@ -0,0 +1,191 @@
|
|||
from collections import namedtuple
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
from pydantic_core._pydantic_core import PydanticCustomError
|
||||
|
||||
from pydantic_extra_types.payment import PaymentCardBrand, PaymentCardNumber
|
||||
|
||||
VALID_AMEX = '370000000000002'
|
||||
VALID_MC = '5100000000000003'
|
||||
VALID_VISA_13 = '4050000000001'
|
||||
VALID_VISA_16 = '4050000000000001'
|
||||
VALID_VISA_19 = '4050000000000000001'
|
||||
VALID_MIR_16 = '2200000000000004'
|
||||
VALID_MIR_17 = '22000000000000004'
|
||||
VALID_MIR_18 = '220000000000000004'
|
||||
VALID_MIR_19 = '2200000000000000004'
|
||||
VALID_DISCOVER = '6011000000000004'
|
||||
VALID_VERVE_16 = '5061000000000001'
|
||||
VALID_VERVE_18 = '506100000000000001'
|
||||
VALID_VERVE_19 = '5061000000000000001'
|
||||
VALID_DANKORT = '5019000000000000'
|
||||
VALID_UNIONPAY_16 = '6200000000000001'
|
||||
VALID_UNIONPAY_19 = '8100000000000000001'
|
||||
VALID_JCB_16 = '3528000000000001'
|
||||
VALID_JCB_19 = '3528000000000000001'
|
||||
VALID_MAESTRO = '6759649826438453'
|
||||
VALID_TROY = '9792000000000001'
|
||||
VALID_OTHER = '2000000000000000008'
|
||||
LUHN_INVALID = '4000000000000000'
|
||||
LEN_INVALID = '40000000000000006'
|
||||
|
||||
|
||||
# Mock PaymentCardNumber
|
||||
PCN = namedtuple('PaymentCardNumber', ['card_number', 'brand'])
|
||||
PCN.__len__ = lambda v: len(v.card_number)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session', name='PaymentCard')
|
||||
def payment_card_model_fixture():
|
||||
class PaymentCard(BaseModel):
|
||||
card_number: PaymentCardNumber
|
||||
|
||||
return PaymentCard
|
||||
|
||||
|
||||
def test_validate_digits():
|
||||
digits = '12345'
|
||||
assert PaymentCardNumber.validate_digits(digits) is None
|
||||
with pytest.raises(PydanticCustomError, match='Card number is not all digits'):
|
||||
PaymentCardNumber.validate_digits('hello')
|
||||
with pytest.raises(PydanticCustomError, match='Card number is not all digits'):
|
||||
PaymentCardNumber.validate_digits('²')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'card_number, valid',
|
||||
[
|
||||
('0', True),
|
||||
('00', True),
|
||||
('18', True),
|
||||
('0000000000000000', True),
|
||||
('4242424242424240', False),
|
||||
('4242424242424241', False),
|
||||
('4242424242424242', True),
|
||||
('4242424242424243', False),
|
||||
('4242424242424244', False),
|
||||
('4242424242424245', False),
|
||||
('4242424242424246', False),
|
||||
('4242424242424247', False),
|
||||
('4242424242424248', False),
|
||||
('4242424242424249', False),
|
||||
('42424242424242426', True),
|
||||
('424242424242424267', True),
|
||||
('4242424242424242675', True),
|
||||
('5164581347216566', True),
|
||||
('4345351087414150', True),
|
||||
('343728738009846', True),
|
||||
('5164581347216567', False),
|
||||
('4345351087414151', False),
|
||||
('343728738009847', False),
|
||||
('000000018', True),
|
||||
('99999999999999999999', True),
|
||||
('99999999999999999999999999999999999999999999999999999999999999999997', True),
|
||||
],
|
||||
)
|
||||
def test_validate_luhn_check_digit(card_number: str, valid: bool):
|
||||
if valid:
|
||||
assert PaymentCardNumber.validate_luhn_check_digit(card_number) == card_number
|
||||
else:
|
||||
with pytest.raises(PydanticCustomError, match='Card number is not luhn valid'):
|
||||
PaymentCardNumber.validate_luhn_check_digit(card_number)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'card_number, brand, valid',
|
||||
[
|
||||
(VALID_VISA_13, PaymentCardBrand.visa, True),
|
||||
(VALID_VISA_16, PaymentCardBrand.visa, True),
|
||||
(VALID_VISA_19, PaymentCardBrand.visa, True),
|
||||
(VALID_MC, PaymentCardBrand.mastercard, True),
|
||||
(VALID_AMEX, PaymentCardBrand.amex, True),
|
||||
(VALID_MIR_16, PaymentCardBrand.mir, True),
|
||||
(VALID_MIR_17, PaymentCardBrand.mir, True),
|
||||
(VALID_MIR_18, PaymentCardBrand.mir, True),
|
||||
(VALID_MIR_19, PaymentCardBrand.mir, True),
|
||||
(VALID_DISCOVER, PaymentCardBrand.discover, True),
|
||||
(VALID_VERVE_16, PaymentCardBrand.verve, True),
|
||||
(VALID_VERVE_18, PaymentCardBrand.verve, True),
|
||||
(VALID_VERVE_19, PaymentCardBrand.verve, True),
|
||||
(VALID_DANKORT, PaymentCardBrand.dankort, True),
|
||||
(VALID_UNIONPAY_16, PaymentCardBrand.unionpay, True),
|
||||
(VALID_UNIONPAY_19, PaymentCardBrand.unionpay, True),
|
||||
(VALID_JCB_16, PaymentCardBrand.jcb, True),
|
||||
(VALID_JCB_19, PaymentCardBrand.jcb, True),
|
||||
(LEN_INVALID, PaymentCardBrand.visa, False),
|
||||
(VALID_MAESTRO, PaymentCardBrand.maestro, True),
|
||||
(VALID_TROY, PaymentCardBrand.troy, True),
|
||||
(VALID_OTHER, PaymentCardBrand.other, True),
|
||||
],
|
||||
)
|
||||
def test_length_for_brand(card_number: str, brand: PaymentCardBrand, valid: bool):
|
||||
# pcn = PCN(card_number, brand)
|
||||
if valid:
|
||||
assert PaymentCardNumber.validate_brand(card_number) == brand
|
||||
else:
|
||||
with pytest.raises(PydanticCustomError) as exc_info:
|
||||
PaymentCardNumber.validate_brand(card_number)
|
||||
assert exc_info.value.type == 'payment_card_number_brand'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'card_number, brand',
|
||||
[
|
||||
(VALID_AMEX, PaymentCardBrand.amex),
|
||||
(VALID_MC, PaymentCardBrand.mastercard),
|
||||
(VALID_VISA_16, PaymentCardBrand.visa),
|
||||
(VALID_MIR_16, PaymentCardBrand.mir),
|
||||
(VALID_DISCOVER, PaymentCardBrand.discover),
|
||||
(VALID_VERVE_16, PaymentCardBrand.verve),
|
||||
(VALID_DANKORT, PaymentCardBrand.dankort),
|
||||
(VALID_UNIONPAY_16, PaymentCardBrand.unionpay),
|
||||
(VALID_JCB_16, PaymentCardBrand.jcb),
|
||||
(VALID_OTHER, PaymentCardBrand.other),
|
||||
(VALID_MAESTRO, PaymentCardBrand.maestro),
|
||||
(VALID_TROY, PaymentCardBrand.troy),
|
||||
],
|
||||
)
|
||||
def test_get_brand(card_number: str, brand: PaymentCardBrand):
|
||||
assert PaymentCardNumber.validate_brand(card_number) == brand
|
||||
|
||||
|
||||
def test_valid(PaymentCard):
|
||||
card = PaymentCard(card_number=VALID_VISA_16)
|
||||
assert str(card.card_number) == VALID_VISA_16
|
||||
assert card.card_number.masked == '405000******0001'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'card_number, error_message',
|
||||
[
|
||||
(None, 'type=string_type'),
|
||||
('1' * 11, 'type=string_too_short,'),
|
||||
('1' * 20, 'type=string_too_long,'),
|
||||
('h' * 16, 'type=payment_card_number_digits'),
|
||||
(LUHN_INVALID, 'type=payment_card_number_luhn,'),
|
||||
(LEN_INVALID, 'type=payment_card_number_brand,'),
|
||||
],
|
||||
)
|
||||
def test_error_types(card_number: Any, error_message: str, PaymentCard):
|
||||
with pytest.raises(ValidationError, match=error_message):
|
||||
PaymentCard(card_number=card_number)
|
||||
|
||||
|
||||
def test_payment_card_brand():
|
||||
b = PaymentCardBrand.visa
|
||||
assert str(b) == 'Visa'
|
||||
assert b is PaymentCardBrand.visa
|
||||
assert b == PaymentCardBrand.visa
|
||||
assert b in {PaymentCardBrand.visa, PaymentCardBrand.mastercard}
|
||||
|
||||
b = 'Visa'
|
||||
assert b is not PaymentCardBrand.visa
|
||||
assert b == PaymentCardBrand.visa
|
||||
assert b in {PaymentCardBrand.visa, PaymentCardBrand.mastercard}
|
||||
|
||||
b = PaymentCardBrand.amex
|
||||
assert b is not PaymentCardBrand.visa
|
||||
assert b != PaymentCardBrand.visa
|
||||
assert b not in {PaymentCardBrand.visa, PaymentCardBrand.mastercard}
|
80
tests/test_ulid.py
Normal file
80
tests/test_ulid.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from pydantic_extra_types.ulid import ULID
|
||||
|
||||
try:
|
||||
from ulid import ULID as _ULID
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
raise RuntimeError(
|
||||
'The `ulid` module requires "python-ulid" to be installed. You can install it with "pip install python-ulid".'
|
||||
)
|
||||
|
||||
|
||||
class Something(BaseModel):
|
||||
ulid: ULID
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'ulid, result, valid',
|
||||
[
|
||||
# Valid ULID for str format
|
||||
('01BTGNYV6HRNK8K8VKZASZCFPE', '01BTGNYV6HRNK8K8VKZASZCFPE', True),
|
||||
('01BTGNYV6HRNK8K8VKZASZCFPF', '01BTGNYV6HRNK8K8VKZASZCFPF', True),
|
||||
# Invalid ULID for str format
|
||||
('01BTGNYV6HRNK8K8VKZASZCFP', None, False), # Invalid ULID (short length)
|
||||
('01BTGNYV6HRNK8K8VKZASZCFPEA', None, False), # Invalid ULID (long length)
|
||||
# Valid ULID for _ULID format
|
||||
(_ULID.from_str('01BTGNYV6HRNK8K8VKZASZCFPE'), '01BTGNYV6HRNK8K8VKZASZCFPE', True),
|
||||
(_ULID.from_str('01BTGNYV6HRNK8K8VKZASZCFPF'), '01BTGNYV6HRNK8K8VKZASZCFPF', True),
|
||||
# Invalid _ULID for bytes format
|
||||
(b'\x01\xBA\x1E\xB2\x8A\x9F\xFAy\x10\xD5\xA5k\xC8', None, False), # Invalid ULID (short length)
|
||||
(b'\x01\xBA\x1E\xB2\x8A\x9F\xFAy\x10\xD5\xA5k\xC8\xB6\x00', None, False), # Invalid ULID (long length)
|
||||
# Valid ULID for int format
|
||||
(109667145845879622871206540411193812282, '2JG4FVY7N8XS4GFVHPXGJZ8S9T', True),
|
||||
(109667145845879622871206540411193812283, '2JG4FVY7N8XS4GFVHPXGJZ8S9V', True),
|
||||
(109667145845879622871206540411193812284, '2JG4FVY7N8XS4GFVHPXGJZ8S9W', True),
|
||||
],
|
||||
)
|
||||
def test_format_for_ulid(ulid: Any, result: Any, valid: bool):
|
||||
if valid:
|
||||
assert str(Something(ulid=ulid).ulid) == result
|
||||
else:
|
||||
with pytest.raises(ValidationError, match='format'):
|
||||
Something(ulid=ulid)
|
||||
|
||||
|
||||
def test_property_for_ulid():
|
||||
ulid = Something(ulid='01BTGNYV6HRNK8K8VKZASZCFPE').ulid
|
||||
assert ulid.hex == '015ea15f6cd1c56689a373fab3f63ece'
|
||||
assert ulid == '01BTGNYV6HRNK8K8VKZASZCFPE'
|
||||
assert ulid.datetime == datetime(2017, 9, 20, 22, 18, 59, 153000, tzinfo=timezone.utc)
|
||||
assert ulid.timestamp == 1505945939.153
|
||||
|
||||
|
||||
def test_json_schema():
|
||||
assert Something.model_json_schema(mode='validation') == {
|
||||
'properties': {
|
||||
'ulid': {
|
||||
'anyOf': [{'type': 'integer'}, {'format': 'binary', 'type': 'string'}, {'type': 'string'}],
|
||||
'title': 'Ulid',
|
||||
}
|
||||
},
|
||||
'required': ['ulid'],
|
||||
'title': 'Something',
|
||||
'type': 'object',
|
||||
}
|
||||
assert Something.model_json_schema(mode='serialization') == {
|
||||
'properties': {
|
||||
'ulid': {
|
||||
'anyOf': [{'type': 'integer'}, {'format': 'binary', 'type': 'string'}, {'type': 'string'}],
|
||||
'title': 'Ulid',
|
||||
}
|
||||
},
|
||||
'required': ['ulid'],
|
||||
'title': 'Something',
|
||||
'type': 'object',
|
||||
}
|
Loading…
Add table
Reference in a new issue