1
0
Fork 0

Adding upstream version 1.2+20240521.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-05 14:24:15 +01:00
parent 6b2864e4b9
commit 8512f66c5a
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
229 changed files with 19561 additions and 0 deletions

1
.env.default Normal file
View file

@ -0,0 +1 @@
PYTHONPATH=.

3
.github/CONTRIBUTING.md vendored Normal file
View file

@ -0,0 +1,3 @@
# Contributing
There's lots to do, and we're working hard, so any help is welcome!

65
.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,65 @@
name: CI
on:
push:
branches:
- master
pull_request:
branches:
- '**'
jobs:
tests:
name: ${{ matrix.os }} / ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}-latest
strategy:
fail-fast: false
matrix:
os: [Ubuntu, MacOS, Windows]
python-version: ['3.9', '3.10', '3.11', '3.12']
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))")" >> "$GITHUB_OUTPUT"
- name: Install poetry
shell: bash
run: |
python -m pip install poetry
echo "$HOME/.poetry/bin" >> $GITHUB_PATH
- name: Configure poetry
shell: bash
run: poetry config virtualenvs.in-project true
- name: Set up cache
uses: actions/cache@v3
id: cache
with:
path: .venv
key: venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('**/poetry.lock') }}
- name: Ensure cache is healthy
if: steps.cache.outputs.cache-hit == 'true'
shell: bash
run: poetry run pip --version >/dev/null 2>&1 || rm -rf .venv
- name: Install dependencies
shell: bash
run: poetry install -E compiler
- name: Generate code from proto files
shell: bash
run: poetry run python -m tests.generate -v
- name: Execute test suite
shell: bash
run: poetry run python -m pytest tests/

18
.github/workflows/code-quality.yml vendored Normal file
View file

@ -0,0 +1,18 @@
name: Code Quality
on:
push:
branches:
- master
pull_request:
branches:
- '**'
jobs:
check-formatting:
name: Check code/doc formatting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: pre-commit/action@v2.0.3

20
.gitignore vendored Normal file
View file

@ -0,0 +1,20 @@
.coverage
.DS_Store
.env
.vscode/settings.json
.mypy_cache
.pytest_cache
.python-version
build/
tests/output_*
**/__pycache__
dist
**/*.egg-info
output
.idea
.DS_Store
.tox
.venv
.asv
venv
.devcontainer

29
.pre-commit-config.yaml Normal file
View file

@ -0,0 +1,29 @@
ci:
autofix_prs: false
repos:
- repo: https://github.com/pycqa/isort
rev: 5.11.5
hooks:
- id: isort
- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
args: ["--target-version", "py310"]
- repo: https://github.com/PyCQA/doc8
rev: 0.10.1
hooks:
- id: doc8
additional_dependencies:
- toml
# Removing since aristaproto don't use the java code and this breaks CI.
# - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
# rev: v2.10.0
# hooks:
# - id: pretty-format-java
# args: [--autofix, --aosp]
# files: ^.*\.java$

23
LICENSE.md Normal file
View file

@ -0,0 +1,23 @@
MIT License
Copyright (c) 2023 Arista Networks
Copyright (c) 2019-2023 Daniel G. Taylor
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

463
README.md Normal file
View file

@ -0,0 +1,463 @@
# Arista Protobuf / Python gRPC bindings generator & library
This was originally forked from <https://github.com/danielgtaylor/python-betterproto> @ [b8a091ae7055dd949d193695a06c9536ad51eea8](https://github.com/danielgtaylor/python-betterproto/commit/b8a091ae7055dd949d193695a06c9536ad51eea8).
Afterwards commits up to `1f88b67eeb9871d33da154fd2c859b9d1aed62c1` on `python-betterproto` have been cherry-picked.
Changes in this project compared with the base project:
- Renamed to `aristaproto`.
- Cut support for Python < 3.9.
- Updating various CI actions and dependencies.
- Merged docs from multiple `rst` files to MarkDown.
- Keep nanosecond precision for `Timestamp`.
- Subclass `datetime` to store the original nano-second value when converting from `Timestamp` to `datetime`.
- On conversion from the subclass of `datetime` to `Timestamp` the original nano-second value is restored.
## Installation
First, install the package. Note that the `[compiler]` feature flag tells it to install extra dependencies only needed by the `protoc` plugin:
```sh
# Install both the library and compiler
pip install "aristaproto[compiler]"
# Install just the library (to use the generated code output)
pip install aristaproto
```
## Getting Started
### Compiling proto files
Given you installed the compiler and have a proto file, e.g `example.proto`:
```protobuf
syntax = "proto3";
package hello;
// Greeting represents a message you can tell a user.
message Greeting {
string message = 1;
}
```
You can run the following to invoke protoc directly:
```sh
mkdir lib
protoc -I . --python_aristaproto_out=lib example.proto
```
or run the following to invoke protoc via grpcio-tools:
```sh
pip install grpcio-tools
python -m grpc_tools.protoc -I . --python_aristaproto_out=lib example.proto
```
This will generate `lib/hello/__init__.py` which looks like:
```python
# Generated by the protocol buffer compiler. DO NOT EDIT!
# sources: example.proto
# plugin: python-aristaproto
from dataclasses import dataclass
import aristaproto
@dataclass
class Greeting(aristaproto.Message):
"""Greeting represents a message you can tell a user."""
message: str = aristaproto.string_field(1)
```
Now you can use it!
```python
>>> from lib.hello import Greeting
>>> test = Greeting()
>>> test
Greeting(message='')
>>> test.message = "Hey!"
>>> test
Greeting(message="Hey!")
>>> serialized = bytes(test)
>>> serialized
b'\n\x04Hey!'
>>> another = Greeting().parse(serialized)
>>> another
Greeting(message="Hey!")
>>> another.to_dict()
{"message": "Hey!"}
>>> another.to_json(indent=2)
'{\n "message": "Hey!"\n}'
```
### Async gRPC Support
The generated Protobuf `Message` classes are compatible with [grpclib](https://github.com/vmagamedov/grpclib) so you are free to use it if you like. That said, this project also includes support for async gRPC stub generation with better static type checking and code completion support. It is enabled by default.
Given an example service definition:
```protobuf
syntax = "proto3";
package echo;
message EchoRequest {
string value = 1;
// Number of extra times to echo
uint32 extra_times = 2;
}
message EchoResponse {
repeated string values = 1;
}
message EchoStreamResponse {
string value = 1;
}
service Echo {
rpc Echo(EchoRequest) returns (EchoResponse);
rpc EchoStream(EchoRequest) returns (stream EchoStreamResponse);
}
```
Generate echo proto file:
```sh
python -m grpc_tools.protoc -I . --python_aristaproto_out=. echo.proto
```
A client can be implemented as follows:
```python
import asyncio
import echo
from grpclib.client import Channel
async def main():
channel = Channel(host="127.0.0.1", port=50051)
service = echo.EchoStub(channel)
response = await service.echo(echo.EchoRequest(value="hello", extra_times=1))
print(response)
async for response in service.echo_stream(echo.EchoRequest(value="hello", extra_times=1)):
print(response)
# don't forget to close the channel when done!
channel.close()
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
```
which would output
```python
EchoResponse(values=['hello', 'hello'])
EchoStreamResponse(value='hello')
EchoStreamResponse(value='hello')
```
This project also produces server-facing stubs that can be used to implement a Python
gRPC server.
To use them, simply subclass the base class in the generated files and override the
service methods:
```python
import asyncio
from echo import EchoBase, EchoRequest, EchoResponse, EchoStreamResponse
from grpclib.server import Server
from typing import AsyncIterator
class EchoService(EchoBase):
async def echo(self, echo_request: "EchoRequest") -> "EchoResponse":
return EchoResponse([echo_request.value for _ in range(echo_request.extra_times)])
async def echo_stream(self, echo_request: "EchoRequest") -> AsyncIterator["EchoStreamResponse"]:
for _ in range(echo_request.extra_times):
yield EchoStreamResponse(echo_request.value)
async def main():
server = Server([EchoService()])
await server.start("127.0.0.1", 50051)
await server.wait_closed()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
```
### JSON
Both serializing and parsing are supported to/from JSON and Python dictionaries using the following methods:
- Dicts: `Message().to_dict()`, `Message().from_dict(...)`
- JSON: `Message().to_json()`, `Message().from_json(...)`
For compatibility the default is to convert field names to `camelCase`. You can control this behavior by passing a casing value, e.g:
```python
MyMessage().to_dict(casing=aristaproto.Casing.SNAKE)
```
### Determining if a message was sent
Sometimes it is useful to be able to determine whether a message has been sent on the wire. This is how the Google wrapper types work to let you know whether a value is unset, set as the default (zero value), or set as something else, for example.
Use `aristaproto.serialized_on_wire(message)` to determine if it was sent. This is a little bit different from the official Google generated Python code, and it lives outside the generated `Message` class to prevent name clashes. Note that it **only** supports Proto 3 and thus can **only** be used to check if `Message` fields are set. You cannot check if a scalar was sent on the wire.
```py
# Old way (official Google Protobuf package)
>>> mymessage.HasField('myfield')
# New way (this project)
>>> aristaproto.serialized_on_wire(mymessage.myfield)
```
### One-of Support
Protobuf supports grouping fields in a `oneof` clause. Only one of the fields in the group may be set at a given time. For example, given the proto:
```protobuf
syntax = "proto3";
message Test {
oneof foo {
bool on = 1;
int32 count = 2;
string name = 3;
}
}
```
On Python 3.10 and later, you can use a `match` statement to access the provided one-of field, which supports type-checking:
```py
test = Test()
match test:
case Test(on=value):
print(value) # value: bool
case Test(count=value):
print(value) # value: int
case Test(name=value):
print(value) # value: str
case _:
print("No value provided")
```
You can also use `aristaproto.which_one_of(message, group_name)` to determine which of the fields was set. It returns a tuple of the field name and value, or a blank string and `None` if unset.
```py
>>> test = Test()
>>> aristaproto.which_one_of(test, "foo")
["", None]
>>> test.on = True
>>> aristaproto.which_one_of(test, "foo")
["on", True]
# Setting one member of the group resets the others.
>>> test.count = 57
>>> aristaproto.which_one_of(test, "foo")
["count", 57]
# Default (zero) values also work.
>>> test.name = ""
>>> aristaproto.which_one_of(test, "foo")
["name", ""]
```
Again this is a little different than the official Google code generator:
```py
# Old way (official Google protobuf package)
>>> message.WhichOneof("group")
"foo"
# New way (this project)
>>> aristaproto.which_one_of(message, "group")
["foo", "foo's value"]
```
### Well-Known Google Types
Google provides several well-known message types like a timestamp, duration, and several wrappers used to provide optional zero value support. Each of these has a special JSON representation and is handled a little differently from normal messages. The Python mapping for these is as follows:
| Google Message | Python Type | Default |
| --------------------------- | ---------------------------------------- | ---------------------- |
| `google.protobuf.duration` | [`datetime.timedelta`][td] | `0` |
| `google.protobuf.timestamp` | Timezone-aware [`datetime.datetime`][dt] | `1970-01-01T00:00:00Z` |
| `google.protobuf.*Value` | `Optional[...]` | `None` |
| `google.protobuf.*` | `aristaproto.lib.google.protobuf.*` | `None` |
[td]: https://docs.python.org/3/library/datetime.html#timedelta-objects
[dt]: https://docs.python.org/3/library/datetime.html#datetime.datetime
For the wrapper types, the Python type corresponds to the wrapped type, e.g. `google.protobuf.BoolValue` becomes `Optional[bool]` while `google.protobuf.Int32Value` becomes `Optional[int]`. All of the optional values default to `None`, so don't forget to check for that possible state. Given:
```protobuf
syntax = "proto3";
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
message Test {
google.protobuf.BoolValue maybe = 1;
google.protobuf.Timestamp ts = 2;
google.protobuf.Duration duration = 3;
}
```
You can do stuff like:
```py
>>> t = Test().from_dict({"maybe": True, "ts": "2019-01-01T12:00:00Z", "duration": "1.200s"})
>>> t
Test(maybe=True, ts=datetime.datetime(2019, 1, 1, 12, 0, tzinfo=datetime.timezone.utc), duration=datetime.timedelta(seconds=1, microseconds=200000))
>>> t.ts - t.duration
datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc)
>>> t.ts.isoformat()
'2019-01-01T12:00:00+00:00'
>>> t.maybe = None
>>> t.to_dict()
{'ts': '2019-01-01T12:00:00Z', 'duration': '1.200s'}
```
## Generating Pydantic Models
You can use python-aristaproto to generate pydantic based models, using
pydantic dataclasses. This means the results of the protobuf unmarshalling will
be typed checked. The usage is the same, but you need to add a custom option
when calling the protobuf compiler:
```sh
protoc -I . --python_aristaproto_opt=pydantic_dataclasses --python_aristaproto_out=lib example.proto
```
With the important change being `--python_aristaproto_opt=pydantic_dataclasses`. This will
swap the dataclass implementation from the builtin python dataclass to the
pydantic dataclass. You must have pydantic as a dependency in your project for
this to work.
## Development
### Requirements
- Python (3.9 or higher)
- [poetry](https://python-poetry.org/docs/#installation)
*Needed to install dependencies in a virtual environment*
- [poethepoet](https://github.com/nat-n/poethepoet) for running development tasks as defined in pyproject.toml
- Can be installed to your host environment via `pip install poethepoet` then executed as simple `poe`
- or run from the poetry venv as `poetry run poe`
### Setup
```sh
# Get set up with the virtual env & dependencies
poetry install -E compiler
# Activate the poetry environment
poetry shell
```
### Code style
This project enforces [black](https://github.com/psf/black) python code formatting.
Before committing changes run:
```sh
poe format
```
To avoid merge conflicts later, non-black formatted python code will fail in CI.
### Tests
There are two types of tests:
1. Standard tests
2. Custom tests
#### Standard tests
Adding a standard test case is easy.
- Create a new directory `aristaproto/tests/inputs/<name>`
- add `<name>.proto` with a message called `Test`
- add `<name>.json` with some test data (optional)
It will be picked up automatically when you run the tests.
- See also: [Standard Tests Development Guide](tests/README.md)
#### Custom tests
Custom tests are found in `tests/test_*.py` and are run with pytest.
#### Running
Here's how to run the tests.
```sh
# Generate assets from sample .proto files required by the tests
poe generate
# Run the tests
poe test
```
To run tests as they are run in CI (with tox) run:
```sh
poe full-test
```
### (Re)compiling Google Well-known Types
Betterproto includes compiled versions for Google's well-known types at [src/aristaproto/lib/google](src/aristaproto/lib/google).
Be sure to regenerate these files when modifying the plugin output format, and validate by running the tests.
Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`.
Assuming your `google.protobuf` source files (included with all releases of `protoc`) are located in `/usr/local/include`, you can regenerate them as follows:
```sh
protoc \
--plugin=protoc-gen-custom=src/aristaproto/plugin/main.py \
--custom_opt=INCLUDE_GOOGLE \
--custom_out=src/aristaproto/lib \
-I /usr/local/include/ \
/usr/local/include/google/protobuf/*.proto
```
## License
Copyright 2023 Arista Networks
Copyright 2019-2023 Daniel G. Taylor
This software is free to use under the MIT license. See the [LICENSE](./LICENSE.md) file for license text.

1761
poetry.lock generated Normal file

File diff suppressed because it is too large Load diff

159
pyproject.toml Normal file
View file

@ -0,0 +1,159 @@
[tool.poetry]
name = "aristaproto"
version = "0.1.2"
description = "Arista Protobuf / Python gRPC bindings generator & library"
authors = ["Arista Networks <ansible@arista.com>"]
readme = "README.md"
repository = "https://github.com/aristanetworks/python-aristaproto"
keywords = ["protobuf", "gRPC", "aristanetworks", "arista"]
license = "MIT"
packages = [
{ include = "aristaproto", from = "src" }
]
[tool.poetry.dependencies]
python = "^3.9"
black = { version = ">=23.1.0", optional = true }
grpclib = "^0.4.1"
jinja2 = { version = ">=3.0.3", optional = true }
python-dateutil = "^2.8"
isort = {version = "^5.11.5", optional = true}
typing-extensions = "^4.7.1"
betterproto-rust-codec = { version = "0.1.1", optional = true }
[tool.poetry.group.dev.dependencies]
asv = "^0.4.2"
bpython = "^0.19"
jinja2 = ">=3.0.3"
mypy = "^0.930"
sphinx = "3.1.2"
sphinx-rtd-theme = "0.5.0"
pre-commit = "^2.17.0"
grpcio-tools = "^1.54.2"
tox = "^4.0.0"
[tool.poetry.group.test.dependencies]
poethepoet = ">=0.9.0"
pytest = "^7.3.2"
pytest-asyncio = "^0.23.0"
pytest-cov = "^2.9.0"
pytest-mock = "^3.12.0"
pydantic = ">=1.8.0,<2"
protobuf = "^4"
cachelib = "^0.10.2"
tomlkit = ">=0.7.0"
[tool.poetry.scripts]
protoc-gen-python_aristaproto = "aristaproto.plugin:main"
[tool.poetry.extras]
compiler = ["black", "isort", "jinja2"]
rust-codec = ["betterproto-rust-codec"]
# Dev workflow tasks
[tool.poe.tasks.generate]
script = "tests.generate:main"
help = "Generate test cases (do this once before running test)"
[tool.poe.tasks.test]
cmd = "pytest"
help = "Run tests"
[tool.poe.tasks.types]
cmd = "mypy src --ignore-missing-imports"
help = "Check types with mypy"
[tool.poe.tasks]
_black = "black . --exclude tests/output_ --target-version py310"
_isort = "isort . --extend-skip-glob 'tests/output_*/**/*'"
[tool.poe.tasks.format]
sequence = ["_black", "_isort"]
help = "Apply black and isort formatting to source code"
[tool.poe.tasks.docs]
cmd = "sphinx-build docs docs/build"
help = "Build the sphinx docs"
[tool.poe.tasks.bench]
shell = "asv run master^! && asv run HEAD^! && asv compare master HEAD"
help = "Benchmark current commit vs. master branch"
[tool.poe.tasks.clean]
cmd = """
rm -rf .asv .coverage .mypy_cache .pytest_cache
dist aristaproto.egg-info **/__pycache__
testsoutput_*
"""
help = "Clean out generated files from the workspace"
[tool.poe.tasks.generate_lib]
cmd = """
protoc
--plugin=protoc-gen-custom=src/aristaproto/plugin/main.py
--custom_opt=INCLUDE_GOOGLE
--custom_out=src/aristaproto/lib/std
-I /usr/local/include/
/usr/local/include/google/protobuf/**/*.proto
"""
help = "Regenerate the types in aristaproto.lib.std.google"
# CI tasks
[tool.poe.tasks.full-test]
shell = "poe generate && tox"
help = "Run tests with multiple pythons"
[tool.poe.tasks.check-style]
cmd = "black . --check --diff"
help = "Check if code style is correct"
[tool.isort]
py_version = 37
profile = "black"
force_single_line = false
combine_as_imports = true
lines_after_imports = 2
include_trailing_comma = true
force_grid_wrap = 2
src_paths = ["src", "tests"]
[tool.black]
target-version = ['py37']
[tool.doc8]
paths = ["docs"]
max_line_length = 88
[tool.doc8.ignore_path_errors]
"docs/migrating.rst" = [
"D001", # contains table which is longer than 88 characters long
]
[tool.coverage.run]
omit = ["aristaproto/tests/*"]
[tool.tox]
legacy_tox_ini = """
[tox]
requires =
tox>=4.2
tox-poetry-installer[poetry]==1.0.0b1
env_list =
py311
py39
[testenv]
commands =
pytest {posargs: --cov aristaproto}
poetry_dep_groups =
test
require_locked_deps = true
require_poetry = true
"""
[build-system]
requires = ["poetry-core>=1.0.0,<2"]
build-backend = "poetry.core.masonry.api"

5
pytest.ini Normal file
View file

@ -0,0 +1,5 @@
[pytest]
python_files = test_*.py
python_classes =
norecursedirs = **/output_*
addopts = -p no:warnings

2038
src/aristaproto/__init__.py Normal file

File diff suppressed because it is too large Load diff

14
src/aristaproto/_types.py Normal file
View file

@ -0,0 +1,14 @@
from typing import (
TYPE_CHECKING,
TypeVar,
)
if TYPE_CHECKING:
from grpclib._typing import IProtoMessage
from . import Message
# Bound type variable to allow methods to return `self` of subclasses
T = TypeVar("T", bound="Message")
ST = TypeVar("ST", bound="IProtoMessage")

View file

@ -0,0 +1,4 @@
from importlib import metadata
__version__ = metadata.version("aristaproto")

143
src/aristaproto/casing.py Normal file
View file

@ -0,0 +1,143 @@
import keyword
import re
# Word delimiters and symbols that will not be preserved when re-casing.
# language=PythonRegExp
SYMBOLS = "[^a-zA-Z0-9]*"
# Optionally capitalized word.
# language=PythonRegExp
WORD = "[A-Z]*[a-z]*[0-9]*"
# Uppercase word, not followed by lowercase letters.
# language=PythonRegExp
WORD_UPPER = "[A-Z]+(?![a-z])[0-9]*"
def safe_snake_case(value: str) -> str:
"""Snake case a value taking into account Python keywords."""
value = snake_case(value)
value = sanitize_name(value)
return value
def snake_case(value: str, strict: bool = True) -> str:
"""
Join words with an underscore into lowercase and remove symbols.
Parameters
-----------
value: :class:`str`
The value to convert.
strict: :class:`bool`
Whether or not to force single underscores.
Returns
--------
:class:`str`
The value in snake_case.
"""
def substitute_word(symbols: str, word: str, is_start: bool) -> str:
if not word:
return ""
if strict:
delimiter_count = 0 if is_start else 1 # Single underscore if strict.
elif is_start:
delimiter_count = len(symbols)
elif word.isupper() or word.islower():
delimiter_count = max(
1, len(symbols)
) # Preserve all delimiters if not strict.
else:
delimiter_count = len(symbols) + 1 # Extra underscore for leading capital.
return ("_" * delimiter_count) + word.lower()
snake = re.sub(
f"(^)?({SYMBOLS})({WORD_UPPER}|{WORD})",
lambda groups: substitute_word(groups[2], groups[3], groups[1] is not None),
value,
)
return snake
def pascal_case(value: str, strict: bool = True) -> str:
"""
Capitalize each word and remove symbols.
Parameters
-----------
value: :class:`str`
The value to convert.
strict: :class:`bool`
Whether or not to output only alphanumeric characters.
Returns
--------
:class:`str`
The value in PascalCase.
"""
def substitute_word(symbols, word):
if strict:
return word.capitalize() # Remove all delimiters
if word.islower():
delimiter_length = len(symbols[:-1]) # Lose one delimiter
else:
delimiter_length = len(symbols) # Preserve all delimiters
return ("_" * delimiter_length) + word.capitalize()
return re.sub(
f"({SYMBOLS})({WORD_UPPER}|{WORD})",
lambda groups: substitute_word(groups[1], groups[2]),
value,
)
def camel_case(value: str, strict: bool = True) -> str:
"""
Capitalize all words except first and remove symbols.
Parameters
-----------
value: :class:`str`
The value to convert.
strict: :class:`bool`
Whether or not to output only alphanumeric characters.
Returns
--------
:class:`str`
The value in camelCase.
"""
return lowercase_first(pascal_case(value, strict=strict))
def lowercase_first(value: str) -> str:
"""
Lower cases the first character of the value.
Parameters
----------
value: :class:`str`
The value to lower case.
Returns
-------
:class:`str`
The lower cased string.
"""
return value[0:1].lower() + value[1:]
def sanitize_name(value: str) -> str:
# https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles
if keyword.iskeyword(value):
return f"{value}_"
if not value.isidentifier():
return f"_{value}"
return value

View file

View file

@ -0,0 +1,176 @@
import os
import re
from typing import (
Dict,
List,
Set,
Tuple,
Type,
)
from ..casing import safe_snake_case
from ..lib.google import protobuf as google_protobuf
from .naming import pythonize_class_name
WRAPPER_TYPES: Dict[str, Type] = {
".google.protobuf.DoubleValue": google_protobuf.DoubleValue,
".google.protobuf.FloatValue": google_protobuf.FloatValue,
".google.protobuf.Int32Value": google_protobuf.Int32Value,
".google.protobuf.Int64Value": google_protobuf.Int64Value,
".google.protobuf.UInt32Value": google_protobuf.UInt32Value,
".google.protobuf.UInt64Value": google_protobuf.UInt64Value,
".google.protobuf.BoolValue": google_protobuf.BoolValue,
".google.protobuf.StringValue": google_protobuf.StringValue,
".google.protobuf.BytesValue": google_protobuf.BytesValue,
}
def parse_source_type_name(field_type_name: str) -> Tuple[str, str]:
"""
Split full source type name into package and type name.
E.g. 'root.package.Message' -> ('root.package', 'Message')
'root.Message.SomeEnum' -> ('root', 'Message.SomeEnum')
"""
package_match = re.match(r"^\.?([^A-Z]+)\.(.+)", field_type_name)
if package_match:
package = package_match.group(1)
name = package_match.group(2)
else:
package = ""
name = field_type_name.lstrip(".")
return package, name
def get_type_reference(
*,
package: str,
imports: set,
source_type: str,
unwrap: bool = True,
pydantic: bool = False,
) -> str:
"""
Return a Python type name for a proto type reference. Adds the import if
necessary. Unwraps well known type if required.
"""
if unwrap:
if source_type in WRAPPER_TYPES:
wrapped_type = type(WRAPPER_TYPES[source_type]().value)
return f"Optional[{wrapped_type.__name__}]"
if source_type == ".google.protobuf.Duration":
return "timedelta"
elif source_type == ".google.protobuf.Timestamp":
return "datetime"
source_package, source_type = parse_source_type_name(source_type)
current_package: List[str] = package.split(".") if package else []
py_package: List[str] = source_package.split(".") if source_package else []
py_type: str = pythonize_class_name(source_type)
compiling_google_protobuf = current_package == ["google", "protobuf"]
importing_google_protobuf = py_package == ["google", "protobuf"]
if importing_google_protobuf and not compiling_google_protobuf:
py_package = (
["aristaproto", "lib"] + (["pydantic"] if pydantic else []) + py_package
)
if py_package[:1] == ["aristaproto"]:
return reference_absolute(imports, py_package, py_type)
if py_package == current_package:
return reference_sibling(py_type)
if py_package[: len(current_package)] == current_package:
return reference_descendent(current_package, imports, py_package, py_type)
if current_package[: len(py_package)] == py_package:
return reference_ancestor(current_package, imports, py_package, py_type)
return reference_cousin(current_package, imports, py_package, py_type)
def reference_absolute(imports: Set[str], py_package: List[str], py_type: str) -> str:
"""
Returns a reference to a python type located in the root, i.e. sys.path.
"""
string_import = ".".join(py_package)
string_alias = safe_snake_case(string_import)
imports.add(f"import {string_import} as {string_alias}")
return f'"{string_alias}.{py_type}"'
def reference_sibling(py_type: str) -> str:
"""
Returns a reference to a python type within the same package as the current package.
"""
return f'"{py_type}"'
def reference_descendent(
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
) -> str:
"""
Returns a reference to a python type in a package that is a descendent of the
current package, and adds the required import that is aliased to avoid name
conflicts.
"""
importing_descendent = py_package[len(current_package) :]
string_from = ".".join(importing_descendent[:-1])
string_import = importing_descendent[-1]
if string_from:
string_alias = "_".join(importing_descendent)
imports.add(f"from .{string_from} import {string_import} as {string_alias}")
return f'"{string_alias}.{py_type}"'
else:
imports.add(f"from . import {string_import}")
return f'"{string_import}.{py_type}"'
def reference_ancestor(
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
) -> str:
"""
Returns a reference to a python type in a package which is an ancestor to the
current package, and adds the required import that is aliased (if possible) to avoid
name conflicts.
Adds trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34).
"""
distance_up = len(current_package) - len(py_package)
if py_package:
string_import = py_package[-1]
string_alias = f"_{'_' * distance_up}{string_import}__"
string_from = f"..{'.' * distance_up}"
imports.add(f"from {string_from} import {string_import} as {string_alias}")
return f'"{string_alias}.{py_type}"'
else:
string_alias = f"{'_' * distance_up}{py_type}__"
imports.add(f"from .{'.' * distance_up} import {py_type} as {string_alias}")
return f'"{string_alias}"'
def reference_cousin(
current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
) -> str:
"""
Returns a reference to a python type in a package that is not descendent, ancestor
or sibling, and adds the required import that is aliased to avoid name conflicts.
"""
shared_ancestry = os.path.commonprefix([current_package, py_package]) # type: ignore
distance_up = len(current_package) - len(shared_ancestry)
string_from = f".{'.' * distance_up}" + ".".join(
py_package[len(shared_ancestry) : -1]
)
string_import = py_package[-1]
# Add trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34)
string_alias = (
f"{'_' * distance_up}"
+ safe_snake_case(".".join(py_package[len(shared_ancestry) :]))
+ "__"
)
imports.add(f"from {string_from} import {string_import} as {string_alias}")
return f'"{string_alias}.{py_type}"'

View file

@ -0,0 +1,21 @@
from aristaproto import casing
def pythonize_class_name(name: str) -> str:
return casing.pascal_case(name)
def pythonize_field_name(name: str) -> str:
return casing.safe_snake_case(name)
def pythonize_method_name(name: str) -> str:
return casing.safe_snake_case(name)
def pythonize_enum_member_name(name: str, enum_name: str) -> str:
enum_name = casing.snake_case(enum_name).upper()
find = name.find(enum_name)
if find != -1:
name = name[find + len(enum_name) :].strip("_")
return casing.sanitize_name(name)

195
src/aristaproto/enum.py Normal file
View file

@ -0,0 +1,195 @@
from __future__ import annotations
import sys
from enum import (
EnumMeta,
IntEnum,
)
from types import MappingProxyType
from typing import (
TYPE_CHECKING,
Any,
Dict,
Optional,
Tuple,
)
if TYPE_CHECKING:
from collections.abc import (
Generator,
Mapping,
)
from typing_extensions import (
Never,
Self,
)
def _is_descriptor(obj: object) -> bool:
return (
hasattr(obj, "__get__") or hasattr(obj, "__set__") or hasattr(obj, "__delete__")
)
class EnumType(EnumMeta if TYPE_CHECKING else type):
_value_map_: Mapping[int, Enum]
_member_map_: Mapping[str, Enum]
def __new__(
mcs, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]
) -> Self:
value_map = {}
member_map = {}
new_mcs = type(
f"{name}Type",
tuple(
dict.fromkeys(
[base.__class__ for base in bases if base.__class__ is not type]
+ [EnumType, type]
)
), # reorder the bases so EnumType and type are last to avoid conflicts
{"_value_map_": value_map, "_member_map_": member_map},
)
members = {
name: value
for name, value in namespace.items()
if not _is_descriptor(value) and not name.startswith("__")
}
cls = type.__new__(
new_mcs,
name,
bases,
{key: value for key, value in namespace.items() if key not in members},
)
# this allows us to disallow member access from other members as
# members become proper class variables
for name, value in members.items():
member = value_map.get(value)
if member is None:
member = cls.__new__(cls, name=name, value=value) # type: ignore
value_map[value] = member
member_map[name] = member
type.__setattr__(new_mcs, name, member)
return cls
if not TYPE_CHECKING:
def __call__(cls, value: int) -> Enum:
try:
return cls._value_map_[value]
except (KeyError, TypeError):
raise ValueError(f"{value!r} is not a valid {cls.__name__}") from None
def __iter__(cls) -> Generator[Enum, None, None]:
yield from cls._member_map_.values()
def __reversed__(cls) -> Generator[Enum, None, None]:
yield from reversed(cls._member_map_.values())
def __getitem__(cls, key: str) -> Enum:
return cls._member_map_[key]
@property
def __members__(cls) -> MappingProxyType[str, Enum]:
return MappingProxyType(cls._member_map_)
def __repr__(cls) -> str:
return f"<enum {cls.__name__!r}>"
def __len__(cls) -> int:
return len(cls._member_map_)
def __setattr__(cls, name: str, value: Any) -> Never:
raise AttributeError(f"{cls.__name__}: cannot reassign Enum members.")
def __delattr__(cls, name: str) -> Never:
raise AttributeError(f"{cls.__name__}: cannot delete Enum members.")
def __contains__(cls, member: object) -> bool:
return isinstance(member, cls) and member.name in cls._member_map_
class Enum(IntEnum if TYPE_CHECKING else int, metaclass=EnumType):
"""
The base class for protobuf enumerations, all generated enumerations will
inherit from this. Emulates `enum.IntEnum`.
"""
name: Optional[str]
value: int
if not TYPE_CHECKING:
def __new__(cls, *, name: Optional[str], value: int) -> Self:
self = super().__new__(cls, value)
super().__setattr__(self, "name", name)
super().__setattr__(self, "value", value)
return self
def __str__(self) -> str:
return self.name or "None"
def __repr__(self) -> str:
return f"{self.__class__.__name__}.{self.name}"
def __setattr__(self, key: str, value: Any) -> Never:
raise AttributeError(
f"{self.__class__.__name__} Cannot reassign a member's attributes."
)
def __delattr__(self, item: Any) -> Never:
raise AttributeError(
f"{self.__class__.__name__} Cannot delete a member's attributes."
)
def __copy__(self) -> Self:
return self
def __deepcopy__(self, memo: Any) -> Self:
return self
@classmethod
def try_value(cls, value: int = 0) -> Self:
"""Return the value which corresponds to the value.
Parameters
-----------
value: :class:`int`
The value of the enum member to get.
Returns
-------
:class:`Enum`
The corresponding member or a new instance of the enum if
``value`` isn't actually a member.
"""
try:
return cls._value_map_[value]
except (KeyError, TypeError):
return cls.__new__(cls, name=None, value=value)
@classmethod
def from_string(cls, name: str) -> Self:
"""Return the value which corresponds to the string name.
Parameters
-----------
name: :class:`str`
The name of the enum member to get.
Raises
-------
:exc:`ValueError`
The member was not found in the Enum.
"""
try:
return cls._member_map_[name]
except KeyError as e:
raise ValueError(f"Unknown value {name} for enum {cls.__name__}") from e

View file

View file

@ -0,0 +1,177 @@
import asyncio
from abc import ABC
from typing import (
TYPE_CHECKING,
AsyncIterable,
AsyncIterator,
Collection,
Iterable,
Mapping,
Optional,
Tuple,
Type,
Union,
)
import grpclib.const
if TYPE_CHECKING:
from grpclib.client import Channel
from grpclib.metadata import Deadline
from .._types import (
ST,
IProtoMessage,
Message,
T,
)
Value = Union[str, bytes]
MetadataLike = Union[Mapping[str, Value], Collection[Tuple[str, Value]]]
MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]]
class ServiceStub(ABC):
"""
Base class for async gRPC clients.
"""
def __init__(
self,
channel: "Channel",
*,
timeout: Optional[float] = None,
deadline: Optional["Deadline"] = None,
metadata: Optional[MetadataLike] = None,
) -> None:
self.channel = channel
self.timeout = timeout
self.deadline = deadline
self.metadata = metadata
def __resolve_request_kwargs(
self,
timeout: Optional[float],
deadline: Optional["Deadline"],
metadata: Optional[MetadataLike],
):
return {
"timeout": self.timeout if timeout is None else timeout,
"deadline": self.deadline if deadline is None else deadline,
"metadata": self.metadata if metadata is None else metadata,
}
async def _unary_unary(
self,
route: str,
request: "IProtoMessage",
response_type: Type["T"],
*,
timeout: Optional[float] = None,
deadline: Optional["Deadline"] = None,
metadata: Optional[MetadataLike] = None,
) -> "T":
"""Make a unary request and return the response."""
async with self.channel.request(
route,
grpclib.const.Cardinality.UNARY_UNARY,
type(request),
response_type,
**self.__resolve_request_kwargs(timeout, deadline, metadata),
) as stream:
await stream.send_message(request, end=True)
response = await stream.recv_message()
assert response is not None
return response
async def _unary_stream(
self,
route: str,
request: "IProtoMessage",
response_type: Type["T"],
*,
timeout: Optional[float] = None,
deadline: Optional["Deadline"] = None,
metadata: Optional[MetadataLike] = None,
) -> AsyncIterator["T"]:
"""Make a unary request and return the stream response iterator."""
async with self.channel.request(
route,
grpclib.const.Cardinality.UNARY_STREAM,
type(request),
response_type,
**self.__resolve_request_kwargs(timeout, deadline, metadata),
) as stream:
await stream.send_message(request, end=True)
async for message in stream:
yield message
async def _stream_unary(
self,
route: str,
request_iterator: MessageSource,
request_type: Type["IProtoMessage"],
response_type: Type["T"],
*,
timeout: Optional[float] = None,
deadline: Optional["Deadline"] = None,
metadata: Optional[MetadataLike] = None,
) -> "T":
"""Make a stream request and return the response."""
async with self.channel.request(
route,
grpclib.const.Cardinality.STREAM_UNARY,
request_type,
response_type,
**self.__resolve_request_kwargs(timeout, deadline, metadata),
) as stream:
await stream.send_request()
await self._send_messages(stream, request_iterator)
response = await stream.recv_message()
assert response is not None
return response
async def _stream_stream(
self,
route: str,
request_iterator: MessageSource,
request_type: Type["IProtoMessage"],
response_type: Type["T"],
*,
timeout: Optional[float] = None,
deadline: Optional["Deadline"] = None,
metadata: Optional[MetadataLike] = None,
) -> AsyncIterator["T"]:
"""
Make a stream request and return an AsyncIterator to iterate over response
messages.
"""
async with self.channel.request(
route,
grpclib.const.Cardinality.STREAM_STREAM,
request_type,
response_type,
**self.__resolve_request_kwargs(timeout, deadline, metadata),
) as stream:
await stream.send_request()
sending_task = asyncio.ensure_future(
self._send_messages(stream, request_iterator)
)
try:
async for response in stream:
yield response
except:
sending_task.cancel()
raise
@staticmethod
async def _send_messages(stream, messages: MessageSource):
if isinstance(messages, AsyncIterable):
async for message in messages:
await stream.send_message(message)
else:
for message in messages:
await stream.send_message(message)
await stream.end()

View file

@ -0,0 +1,33 @@
from abc import ABC
from collections.abc import AsyncIterable
from typing import (
Any,
Callable,
Dict,
)
import grpclib
import grpclib.server
class ServiceBase(ABC):
"""
Base class for async gRPC servers.
"""
async def _call_rpc_handler_server_stream(
self,
handler: Callable,
stream: grpclib.server.Stream,
request: Any,
) -> None:
response_iter = handler(request)
# check if response is actually an AsyncIterator
# this might be false if the method just returns without
# yielding at least once
# in that case, we just interpret it as an empty iterator
if isinstance(response_iter, AsyncIterable):
async for response_message in response_iter:
await stream.send_message(response_message)
else:
response_iter.close()

View file

View file

@ -0,0 +1,193 @@
import asyncio
from typing import (
AsyncIterable,
AsyncIterator,
Iterable,
Optional,
TypeVar,
Union,
)
T = TypeVar("T")
class ChannelClosed(Exception):
"""
An exception raised on an attempt to send through a closed channel
"""
class ChannelDone(Exception):
"""
An exception raised on an attempt to send receive from a channel that is both closed
and empty.
"""
class AsyncChannel(AsyncIterable[T]):
"""
A buffered async channel for sending items between coroutines with FIFO ordering.
This makes decoupled bidirectional steaming gRPC requests easy if used like:
.. code-block:: python
client = GeneratedStub(grpclib_chan)
request_channel = await AsyncChannel()
# We can start be sending all the requests we already have
await request_channel.send_from([RequestObject(...), RequestObject(...)])
async for response in client.rpc_call(request_channel):
# The response iterator will remain active until the connection is closed
...
# More items can be sent at any time
await request_channel.send(RequestObject(...))
...
# The channel must be closed to complete the gRPC connection
request_channel.close()
Items can be sent through the channel by either:
- providing an iterable to the send_from method
- passing them to the send method one at a time
Items can be received from the channel by either:
- iterating over the channel with a for loop to get all items
- calling the receive method to get one item at a time
If the channel is empty then receivers will wait until either an item appears or the
channel is closed.
Once the channel is closed then subsequent attempt to send through the channel will
fail with a ChannelClosed exception.
When th channel is closed and empty then it is done, and further attempts to receive
from it will fail with a ChannelDone exception
If multiple coroutines receive from the channel concurrently, each item sent will be
received by only one of the receivers.
:param source:
An optional iterable will items that should be sent through the channel
immediately.
:param buffer_limit:
Limit the number of items that can be buffered in the channel, A value less than
1 implies no limit. If the channel is full then attempts to send more items will
result in the sender waiting until an item is received from the channel.
:param close:
If set to True then the channel will automatically close after exhausting source
or immediately if no source is provided.
"""
def __init__(self, *, buffer_limit: int = 0, close: bool = False):
self._queue: asyncio.Queue[T] = asyncio.Queue(buffer_limit)
self._closed = False
self._waiting_receivers: int = 0
# Track whether flush has been invoked so it can only happen once
self._flushed = False
def __aiter__(self) -> AsyncIterator[T]:
return self
async def __anext__(self) -> T:
if self.done():
raise StopAsyncIteration
self._waiting_receivers += 1
try:
result = await self._queue.get()
if result is self.__flush:
raise StopAsyncIteration
return result
finally:
self._waiting_receivers -= 1
self._queue.task_done()
def closed(self) -> bool:
"""
Returns True if this channel is closed and no-longer accepting new items
"""
return self._closed
def done(self) -> bool:
"""
Check if this channel is done.
:return: True if this channel is closed and and has been drained of items in
which case any further attempts to receive an item from this channel will raise
a ChannelDone exception.
"""
# After close the channel is not yet done until there is at least one waiting
# receiver per enqueued item.
return self._closed and self._queue.qsize() <= self._waiting_receivers
async def send_from(
self, source: Union[Iterable[T], AsyncIterable[T]], close: bool = False
) -> "AsyncChannel[T]":
"""
Iterates the given [Async]Iterable and sends all the resulting items.
If close is set to True then subsequent send calls will be rejected with a
ChannelClosed exception.
:param source: an iterable of items to send
:param close:
if True then the channel will be closed after the source has been exhausted
"""
if self._closed:
raise ChannelClosed("Cannot send through a closed channel")
if isinstance(source, AsyncIterable):
async for item in source:
await self._queue.put(item)
else:
for item in source:
await self._queue.put(item)
if close:
# Complete the closing process
self.close()
return self
async def send(self, item: T) -> "AsyncChannel[T]":
"""
Send a single item over this channel.
:param item: The item to send
"""
if self._closed:
raise ChannelClosed("Cannot send through a closed channel")
await self._queue.put(item)
return self
async def receive(self) -> Optional[T]:
"""
Returns the next item from this channel when it becomes available,
or None if the channel is closed before another item is sent.
:return: An item from the channel
"""
if self.done():
raise ChannelDone("Cannot receive from a closed channel")
self._waiting_receivers += 1
try:
result = await self._queue.get()
if result is self.__flush:
return None
return result
finally:
self._waiting_receivers -= 1
self._queue.task_done()
def close(self):
"""
Close this channel to new items
"""
self._closed = True
asyncio.ensure_future(self._flush_queue())
async def _flush_queue(self):
"""
To be called after the channel is closed. Pushes a number of self.__flush
objects to the queue to ensure no waiting consumers get deadlocked.
"""
if not self._flushed:
self._flushed = True
deadlocked_receivers = max(0, self._waiting_receivers - self._queue.qsize())
for _ in range(deadlocked_receivers):
await self._queue.put(self.__flush)
# A special signal object for flushing the queue when the channel is closed
__flush = object()

View file

View file

View file

@ -0,0 +1 @@
from aristaproto.lib.std.google.protobuf import *

View file

@ -0,0 +1 @@
from aristaproto.lib.std.google.protobuf.compiler import *

View file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,210 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# sources: google/protobuf/compiler/plugin.proto
# plugin: python-aristaproto
# This file has been @generated
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from dataclasses import dataclass
else:
from pydantic.dataclasses import dataclass
from typing import List
import aristaproto
import aristaproto.lib.pydantic.google.protobuf as aristaproto_lib_pydantic_google_protobuf
class CodeGeneratorResponseFeature(aristaproto.Enum):
"""Sync with code_generator.h."""
FEATURE_NONE = 0
FEATURE_PROTO3_OPTIONAL = 1
FEATURE_SUPPORTS_EDITIONS = 2
@dataclass(eq=False, repr=False)
class Version(aristaproto.Message):
"""The version number of protocol compiler."""
major: int = aristaproto.int32_field(1)
minor: int = aristaproto.int32_field(2)
patch: int = aristaproto.int32_field(3)
suffix: str = aristaproto.string_field(4)
"""
A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
be empty for mainline stable releases.
"""
@dataclass(eq=False, repr=False)
class CodeGeneratorRequest(aristaproto.Message):
"""An encoded CodeGeneratorRequest is written to the plugin's stdin."""
file_to_generate: List[str] = aristaproto.string_field(1)
"""
The .proto files that were explicitly listed on the command-line. The
code generator should generate code only for these files. Each file's
descriptor will be included in proto_file, below.
"""
parameter: str = aristaproto.string_field(2)
"""The generator parameter passed on the command-line."""
proto_file: List[
"aristaproto_lib_pydantic_google_protobuf.FileDescriptorProto"
] = aristaproto.message_field(15)
"""
FileDescriptorProtos for all files in files_to_generate and everything
they import. The files will appear in topological order, so each file
appears before any file that imports it.
Note: the files listed in files_to_generate will include runtime-retention
options only, but all other files will include source-retention options.
The source_file_descriptors field below is available in case you need
source-retention options for files_to_generate.
protoc guarantees that all proto_files will be written after
the fields above, even though this is not technically guaranteed by the
protobuf wire format. This theoretically could allow a plugin to stream
in the FileDescriptorProtos and handle them one by one rather than read
the entire set into memory at once. However, as of this writing, this
is not similarly optimized on protoc's end -- it will store all fields in
memory at once before sending them to the plugin.
Type names of fields and extensions in the FileDescriptorProto are always
fully qualified.
"""
source_file_descriptors: List[
"aristaproto_lib_pydantic_google_protobuf.FileDescriptorProto"
] = aristaproto.message_field(17)
"""
File descriptors with all options, including source-retention options.
These descriptors are only provided for the files listed in
files_to_generate.
"""
compiler_version: "Version" = aristaproto.message_field(3)
"""The version number of protocol compiler."""
@dataclass(eq=False, repr=False)
class CodeGeneratorResponse(aristaproto.Message):
"""The plugin writes an encoded CodeGeneratorResponse to stdout."""
error: str = aristaproto.string_field(1)
"""
Error message. If non-empty, code generation failed. The plugin process
should exit with status code zero even if it reports an error in this way.
This should be used to indicate errors in .proto files which prevent the
code generator from generating correct code. Errors which indicate a
problem in protoc itself -- such as the input CodeGeneratorRequest being
unparseable -- should be reported by writing a message to stderr and
exiting with a non-zero status code.
"""
supported_features: int = aristaproto.uint64_field(2)
"""
A bitmask of supported features that the code generator supports.
This is a bitwise "or" of values from the Feature enum.
"""
minimum_edition: int = aristaproto.int32_field(3)
"""
The minimum edition this plugin supports. This will be treated as an
Edition enum, but we want to allow unknown values. It should be specified
according the edition enum value, *not* the edition number. Only takes
effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
"""
maximum_edition: int = aristaproto.int32_field(4)
"""
The maximum edition this plugin supports. This will be treated as an
Edition enum, but we want to allow unknown values. It should be specified
according the edition enum value, *not* the edition number. Only takes
effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
"""
file: List["CodeGeneratorResponseFile"] = aristaproto.message_field(15)
@dataclass(eq=False, repr=False)
class CodeGeneratorResponseFile(aristaproto.Message):
"""Represents a single generated file."""
name: str = aristaproto.string_field(1)
"""
The file name, relative to the output directory. The name must not
contain "." or ".." components and must be relative, not be absolute (so,
the file cannot lie outside the output directory). "/" must be used as
the path separator, not "\".
If the name is omitted, the content will be appended to the previous
file. This allows the generator to break large files into small chunks,
and allows the generated text to be streamed back to protoc so that large
files need not reside completely in memory at one time. Note that as of
this writing protoc does not optimize for this -- it will read the entire
CodeGeneratorResponse before writing files to disk.
"""
insertion_point: str = aristaproto.string_field(2)
"""
If non-empty, indicates that the named file should already exist, and the
content here is to be inserted into that file at a defined insertion
point. This feature allows a code generator to extend the output
produced by another code generator. The original generator may provide
insertion points by placing special annotations in the file that look
like:
@@protoc_insertion_point(NAME)
The annotation can have arbitrary text before and after it on the line,
which allows it to be placed in a comment. NAME should be replaced with
an identifier naming the point -- this is what other generators will use
as the insertion_point. Code inserted at this point will be placed
immediately above the line containing the insertion point (thus multiple
insertions to the same point will come out in the order they were added).
The double-@ is intended to make it unlikely that the generated code
could contain things that look like insertion points by accident.
For example, the C++ code generator places the following line in the
.pb.h files that it generates:
// @@protoc_insertion_point(namespace_scope)
This line appears within the scope of the file's package namespace, but
outside of any particular class. Another plugin can then specify the
insertion_point "namespace_scope" to generate additional classes or
other declarations that should be placed in this scope.
Note that if the line containing the insertion point begins with
whitespace, the same whitespace will be added to every line of the
inserted text. This is useful for languages like Python, where
indentation matters. In these languages, the insertion point comment
should be indented the same amount as any inserted code will need to be
in order to work correctly in that context.
The code generator that generates the initial file and the one which
inserts into it must both run as part of a single invocation of protoc.
Code generators are executed in the order in which they appear on the
command line.
If |insertion_point| is present, |name| must also be present.
"""
content: str = aristaproto.string_field(15)
"""The file contents."""
generated_code_info: (
"aristaproto_lib_pydantic_google_protobuf.GeneratedCodeInfo"
) = aristaproto.message_field(16)
"""
Information describing the file content being inserted. If an insertion
point is used, this information will be appropriately offset and inserted
into the code generation metadata for the generated files.
"""
CodeGeneratorRequest.__pydantic_model__.update_forward_refs() # type: ignore
CodeGeneratorResponse.__pydantic_model__.update_forward_refs() # type: ignore
CodeGeneratorResponseFile.__pydantic_model__.update_forward_refs() # type: ignore

View file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,198 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# sources: google/protobuf/compiler/plugin.proto
# plugin: python-aristaproto
# This file has been @generated
from dataclasses import dataclass
from typing import List
import aristaproto
import aristaproto.lib.google.protobuf as aristaproto_lib_google_protobuf
class CodeGeneratorResponseFeature(aristaproto.Enum):
"""Sync with code_generator.h."""
FEATURE_NONE = 0
FEATURE_PROTO3_OPTIONAL = 1
FEATURE_SUPPORTS_EDITIONS = 2
@dataclass(eq=False, repr=False)
class Version(aristaproto.Message):
"""The version number of protocol compiler."""
major: int = aristaproto.int32_field(1)
minor: int = aristaproto.int32_field(2)
patch: int = aristaproto.int32_field(3)
suffix: str = aristaproto.string_field(4)
"""
A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
be empty for mainline stable releases.
"""
@dataclass(eq=False, repr=False)
class CodeGeneratorRequest(aristaproto.Message):
"""An encoded CodeGeneratorRequest is written to the plugin's stdin."""
file_to_generate: List[str] = aristaproto.string_field(1)
"""
The .proto files that were explicitly listed on the command-line. The
code generator should generate code only for these files. Each file's
descriptor will be included in proto_file, below.
"""
parameter: str = aristaproto.string_field(2)
"""The generator parameter passed on the command-line."""
proto_file: List[
"aristaproto_lib_google_protobuf.FileDescriptorProto"
] = aristaproto.message_field(15)
"""
FileDescriptorProtos for all files in files_to_generate and everything
they import. The files will appear in topological order, so each file
appears before any file that imports it.
Note: the files listed in files_to_generate will include runtime-retention
options only, but all other files will include source-retention options.
The source_file_descriptors field below is available in case you need
source-retention options for files_to_generate.
protoc guarantees that all proto_files will be written after
the fields above, even though this is not technically guaranteed by the
protobuf wire format. This theoretically could allow a plugin to stream
in the FileDescriptorProtos and handle them one by one rather than read
the entire set into memory at once. However, as of this writing, this
is not similarly optimized on protoc's end -- it will store all fields in
memory at once before sending them to the plugin.
Type names of fields and extensions in the FileDescriptorProto are always
fully qualified.
"""
source_file_descriptors: List[
"aristaproto_lib_google_protobuf.FileDescriptorProto"
] = aristaproto.message_field(17)
"""
File descriptors with all options, including source-retention options.
These descriptors are only provided for the files listed in
files_to_generate.
"""
compiler_version: "Version" = aristaproto.message_field(3)
"""The version number of protocol compiler."""
@dataclass(eq=False, repr=False)
class CodeGeneratorResponse(aristaproto.Message):
"""The plugin writes an encoded CodeGeneratorResponse to stdout."""
error: str = aristaproto.string_field(1)
"""
Error message. If non-empty, code generation failed. The plugin process
should exit with status code zero even if it reports an error in this way.
This should be used to indicate errors in .proto files which prevent the
code generator from generating correct code. Errors which indicate a
problem in protoc itself -- such as the input CodeGeneratorRequest being
unparseable -- should be reported by writing a message to stderr and
exiting with a non-zero status code.
"""
supported_features: int = aristaproto.uint64_field(2)
"""
A bitmask of supported features that the code generator supports.
This is a bitwise "or" of values from the Feature enum.
"""
minimum_edition: int = aristaproto.int32_field(3)
"""
The minimum edition this plugin supports. This will be treated as an
Edition enum, but we want to allow unknown values. It should be specified
according the edition enum value, *not* the edition number. Only takes
effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
"""
maximum_edition: int = aristaproto.int32_field(4)
"""
The maximum edition this plugin supports. This will be treated as an
Edition enum, but we want to allow unknown values. It should be specified
according the edition enum value, *not* the edition number. Only takes
effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
"""
file: List["CodeGeneratorResponseFile"] = aristaproto.message_field(15)
@dataclass(eq=False, repr=False)
class CodeGeneratorResponseFile(aristaproto.Message):
"""Represents a single generated file."""
name: str = aristaproto.string_field(1)
"""
The file name, relative to the output directory. The name must not
contain "." or ".." components and must be relative, not be absolute (so,
the file cannot lie outside the output directory). "/" must be used as
the path separator, not "\".
If the name is omitted, the content will be appended to the previous
file. This allows the generator to break large files into small chunks,
and allows the generated text to be streamed back to protoc so that large
files need not reside completely in memory at one time. Note that as of
this writing protoc does not optimize for this -- it will read the entire
CodeGeneratorResponse before writing files to disk.
"""
insertion_point: str = aristaproto.string_field(2)
"""
If non-empty, indicates that the named file should already exist, and the
content here is to be inserted into that file at a defined insertion
point. This feature allows a code generator to extend the output
produced by another code generator. The original generator may provide
insertion points by placing special annotations in the file that look
like:
@@protoc_insertion_point(NAME)
The annotation can have arbitrary text before and after it on the line,
which allows it to be placed in a comment. NAME should be replaced with
an identifier naming the point -- this is what other generators will use
as the insertion_point. Code inserted at this point will be placed
immediately above the line containing the insertion point (thus multiple
insertions to the same point will come out in the order they were added).
The double-@ is intended to make it unlikely that the generated code
could contain things that look like insertion points by accident.
For example, the C++ code generator places the following line in the
.pb.h files that it generates:
// @@protoc_insertion_point(namespace_scope)
This line appears within the scope of the file's package namespace, but
outside of any particular class. Another plugin can then specify the
insertion_point "namespace_scope" to generate additional classes or
other declarations that should be placed in this scope.
Note that if the line containing the insertion point begins with
whitespace, the same whitespace will be added to every line of the
inserted text. This is useful for languages like Python, where
indentation matters. In these languages, the insertion point comment
should be indented the same amount as any inserted code will need to be
in order to work correctly in that context.
The code generator that generates the initial file and the one which
inserts into it must both run as part of a single invocation of protoc.
Code generators are executed in the order in which they appear on the
command line.
If |insertion_point| is present, |name| must also be present.
"""
content: str = aristaproto.string_field(15)
"""The file contents."""
generated_code_info: "aristaproto_lib_google_protobuf.GeneratedCodeInfo" = (
aristaproto.message_field(16)
)
"""
Information describing the file content being inserted. If an insertion
point is used, this information will be appropriately offset and inserted
into the code generation metadata for the generated files.
"""

View file

@ -0,0 +1 @@
from .main import main

View file

@ -0,0 +1,4 @@
from .main import main
main()

View file

@ -0,0 +1,50 @@
import os.path
try:
# aristaproto[compiler] specific dependencies
import black
import isort.api
import jinja2
except ImportError as err:
print(
"\033[31m"
f"Unable to import `{err.name}` from aristaproto plugin! "
"Please ensure that you've installed aristaproto as "
'`pip install "aristaproto[compiler]"` so that compiler dependencies '
"are included."
"\033[0m"
)
raise SystemExit(1)
from .models import OutputTemplate
def outputfile_compiler(output_file: OutputTemplate) -> str:
templates_folder = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "templates")
)
env = jinja2.Environment(
trim_blocks=True,
lstrip_blocks=True,
loader=jinja2.FileSystemLoader(templates_folder),
)
template = env.get_template("template.py.j2")
code = template.render(output_file=output_file)
code = isort.api.sort_code_string(
code=code,
show_diff=False,
py_version=37,
profile="black",
combine_as_imports=True,
lines_after_imports=2,
quiet=True,
force_grid_wrap=2,
known_third_party=["grpclib", "aristaproto"],
)
return black.format_str(
src_contents=code,
mode=black.Mode(),
)

52
src/aristaproto/plugin/main.py Executable file
View file

@ -0,0 +1,52 @@
#!/usr/bin/env python
import os
import sys
from aristaproto.lib.google.protobuf.compiler import (
CodeGeneratorRequest,
CodeGeneratorResponse,
)
from aristaproto.plugin.models import monkey_patch_oneof_index
from aristaproto.plugin.parser import generate_code
def main() -> None:
"""The plugin's main entry point."""
# Read request message from stdin
data = sys.stdin.buffer.read()
# Apply Work around for proto2/3 difference in protoc messages
monkey_patch_oneof_index()
# Parse request
request = CodeGeneratorRequest()
request.parse(data)
dump_file = os.getenv("ARISTAPROTO_DUMP")
if dump_file:
dump_request(dump_file, request)
# Generate code
response = generate_code(request)
# Serialise response message
output = response.SerializeToString()
# Write to stdout
sys.stdout.buffer.write(output)
def dump_request(dump_file: str, request: CodeGeneratorRequest) -> None:
"""
For developers: Supports running plugin.py standalone so its possible to debug it.
Run protoc (or generate.py) with ARISTAPROTO_DUMP="yourfile.bin" to write the request to a file.
Then run plugin.py from your IDE in debugging mode, and redirect stdin to the file.
"""
with open(str(dump_file), "wb") as fh:
sys.stderr.write(f"\033[31mWriting input from protoc to: {dump_file}\033[0m\n")
fh.write(request.SerializeToString())
if __name__ == "__main__":
main()

View file

@ -0,0 +1,851 @@
"""Plugin model dataclasses.
These classes are meant to be an intermediate representation
of protobuf objects. They are used to organize the data collected during parsing.
The general intention is to create a doubly-linked tree-like structure
with the following types of references:
- Downwards references: from message -> fields, from output package -> messages
or from service -> service methods
- Upwards references: from field -> message, message -> package.
- Input/output message references: from a service method to it's corresponding
input/output messages, which may even be in another package.
There are convenience methods to allow climbing up and down this tree, for
example to retrieve the list of all messages that are in the same package as
the current message.
Most of these classes take as inputs:
- proto_obj: A reference to it's corresponding protobuf object as
presented by the protoc plugin.
- parent: a reference to the parent object in the tree.
With this information, the class is able to expose attributes,
such as a pythonized name, that will be calculated from proto_obj.
The instantiation should also attach a reference to the new object
into the corresponding place within it's parent object. For example,
instantiating field `A` with parent message `B` should add a
reference to `A` to `B`'s `fields` attribute.
"""
import builtins
import re
import textwrap
from dataclasses import (
dataclass,
field,
)
from typing import (
Dict,
Iterable,
Iterator,
List,
Optional,
Set,
Type,
Union,
)
import aristaproto
from aristaproto import which_one_of
from aristaproto.casing import sanitize_name
from aristaproto.compile.importing import (
get_type_reference,
parse_source_type_name,
)
from aristaproto.compile.naming import (
pythonize_class_name,
pythonize_field_name,
pythonize_method_name,
)
from aristaproto.lib.google.protobuf import (
DescriptorProto,
EnumDescriptorProto,
Field,
FieldDescriptorProto,
FieldDescriptorProtoLabel,
FieldDescriptorProtoType,
FileDescriptorProto,
MethodDescriptorProto,
)
from aristaproto.lib.google.protobuf.compiler import CodeGeneratorRequest
from ..compile.importing import (
get_type_reference,
parse_source_type_name,
)
from ..compile.naming import (
pythonize_class_name,
pythonize_enum_member_name,
pythonize_field_name,
pythonize_method_name,
)
# Create a unique placeholder to deal with
# https://stackoverflow.com/questions/51575931/class-inheritance-in-python-3-7-dataclasses
PLACEHOLDER = object()
# Organize proto types into categories
PROTO_FLOAT_TYPES = (
FieldDescriptorProtoType.TYPE_DOUBLE, # 1
FieldDescriptorProtoType.TYPE_FLOAT, # 2
)
PROTO_INT_TYPES = (
FieldDescriptorProtoType.TYPE_INT64, # 3
FieldDescriptorProtoType.TYPE_UINT64, # 4
FieldDescriptorProtoType.TYPE_INT32, # 5
FieldDescriptorProtoType.TYPE_FIXED64, # 6
FieldDescriptorProtoType.TYPE_FIXED32, # 7
FieldDescriptorProtoType.TYPE_UINT32, # 13
FieldDescriptorProtoType.TYPE_SFIXED32, # 15
FieldDescriptorProtoType.TYPE_SFIXED64, # 16
FieldDescriptorProtoType.TYPE_SINT32, # 17
FieldDescriptorProtoType.TYPE_SINT64, # 18
)
PROTO_BOOL_TYPES = (FieldDescriptorProtoType.TYPE_BOOL,) # 8
PROTO_STR_TYPES = (FieldDescriptorProtoType.TYPE_STRING,) # 9
PROTO_BYTES_TYPES = (FieldDescriptorProtoType.TYPE_BYTES,) # 12
PROTO_MESSAGE_TYPES = (
FieldDescriptorProtoType.TYPE_MESSAGE, # 11
FieldDescriptorProtoType.TYPE_ENUM, # 14
)
PROTO_MAP_TYPES = (FieldDescriptorProtoType.TYPE_MESSAGE,) # 11
PROTO_PACKED_TYPES = (
FieldDescriptorProtoType.TYPE_DOUBLE, # 1
FieldDescriptorProtoType.TYPE_FLOAT, # 2
FieldDescriptorProtoType.TYPE_INT64, # 3
FieldDescriptorProtoType.TYPE_UINT64, # 4
FieldDescriptorProtoType.TYPE_INT32, # 5
FieldDescriptorProtoType.TYPE_FIXED64, # 6
FieldDescriptorProtoType.TYPE_FIXED32, # 7
FieldDescriptorProtoType.TYPE_BOOL, # 8
FieldDescriptorProtoType.TYPE_UINT32, # 13
FieldDescriptorProtoType.TYPE_SFIXED32, # 15
FieldDescriptorProtoType.TYPE_SFIXED64, # 16
FieldDescriptorProtoType.TYPE_SINT32, # 17
FieldDescriptorProtoType.TYPE_SINT64, # 18
)
def monkey_patch_oneof_index():
"""
The compiler message types are written for proto2, but we read them as proto3.
For this to work in the case of the oneof_index fields, which depend on being able
to tell whether they were set, we have to treat them as oneof fields. This method
monkey patches the generated classes after the fact to force this behaviour.
"""
object.__setattr__(
FieldDescriptorProto.__dataclass_fields__["oneof_index"].metadata[
"aristaproto"
],
"group",
"oneof_index",
)
object.__setattr__(
Field.__dataclass_fields__["oneof_index"].metadata["aristaproto"],
"group",
"oneof_index",
)
def get_comment(
proto_file: "FileDescriptorProto", path: List[int], indent: int = 4
) -> str:
pad = " " * indent
for sci_loc in proto_file.source_code_info.location:
if list(sci_loc.path) == path and sci_loc.leading_comments:
lines = sci_loc.leading_comments.strip().replace("\t", " ").split("\n")
# This is a field, message, enum, service, or method
if len(lines) == 1 and len(lines[0]) < 79 - indent - 6:
lines[0] = lines[0].strip('"')
# rstrip to remove trailing spaces including whitespaces from empty lines.
return f'{pad}"""{lines[0]}"""'
else:
# rstrip to remove trailing spaces including empty lines.
padded = [f"\n{pad}{line}".rstrip(" ") for line in lines]
joined = "".join(padded)
return f'{pad}"""{joined}\n{pad}"""'
return ""
class ProtoContentBase:
"""Methods common to MessageCompiler, ServiceCompiler and ServiceMethodCompiler."""
source_file: FileDescriptorProto
path: List[int]
comment_indent: int = 4
parent: Union["aristaproto.Message", "OutputTemplate"]
__dataclass_fields__: Dict[str, object]
def __post_init__(self) -> None:
"""Checks that no fake default fields were left as placeholders."""
for field_name, field_val in self.__dataclass_fields__.items():
if field_val is PLACEHOLDER:
raise ValueError(f"`{field_name}` is a required field.")
@property
def output_file(self) -> "OutputTemplate":
current = self
while not isinstance(current, OutputTemplate):
current = current.parent
return current
@property
def request(self) -> "PluginRequestCompiler":
current = self
while not isinstance(current, OutputTemplate):
current = current.parent
return current.parent_request
@property
def comment(self) -> str:
"""Crawl the proto source code and retrieve comments
for this object.
"""
return get_comment(
proto_file=self.source_file, path=self.path, indent=self.comment_indent
)
@dataclass
class PluginRequestCompiler:
plugin_request_obj: CodeGeneratorRequest
output_packages: Dict[str, "OutputTemplate"] = field(default_factory=dict)
@property
def all_messages(self) -> List["MessageCompiler"]:
"""All of the messages in this request.
Returns
-------
List[MessageCompiler]
List of all of the messages in this request.
"""
return [
msg for output in self.output_packages.values() for msg in output.messages
]
@dataclass
class OutputTemplate:
"""Representation of an output .py file.
Each output file corresponds to a .proto input file,
but may need references to other .proto files to be
built.
"""
parent_request: PluginRequestCompiler
package_proto_obj: FileDescriptorProto
input_files: List[str] = field(default_factory=list)
imports: Set[str] = field(default_factory=set)
datetime_imports: Set[str] = field(default_factory=set)
typing_imports: Set[str] = field(default_factory=set)
pydantic_imports: Set[str] = field(default_factory=set)
builtins_import: bool = False
messages: List["MessageCompiler"] = field(default_factory=list)
enums: List["EnumDefinitionCompiler"] = field(default_factory=list)
services: List["ServiceCompiler"] = field(default_factory=list)
imports_type_checking_only: Set[str] = field(default_factory=set)
pydantic_dataclasses: bool = False
output: bool = True
@property
def package(self) -> str:
"""Name of input package.
Returns
-------
str
Name of input package.
"""
return self.package_proto_obj.package
@property
def input_filenames(self) -> Iterable[str]:
"""Names of the input files used to build this output.
Returns
-------
Iterable[str]
Names of the input files used to build this output.
"""
return sorted(f.name for f in self.input_files)
@property
def python_module_imports(self) -> Set[str]:
imports = set()
if any(x for x in self.messages if any(x.deprecated_fields)):
imports.add("warnings")
if self.builtins_import:
imports.add("builtins")
return imports
@dataclass
class MessageCompiler(ProtoContentBase):
"""Representation of a protobuf message."""
source_file: FileDescriptorProto
parent: Union["MessageCompiler", OutputTemplate] = PLACEHOLDER
proto_obj: DescriptorProto = PLACEHOLDER
path: List[int] = PLACEHOLDER
fields: List[Union["FieldCompiler", "MessageCompiler"]] = field(
default_factory=list
)
deprecated: bool = field(default=False, init=False)
builtins_types: Set[str] = field(default_factory=set)
def __post_init__(self) -> None:
# Add message to output file
if isinstance(self.parent, OutputTemplate):
if isinstance(self, EnumDefinitionCompiler):
self.output_file.enums.append(self)
else:
self.output_file.messages.append(self)
self.deprecated = self.proto_obj.options.deprecated
super().__post_init__()
@property
def proto_name(self) -> str:
return self.proto_obj.name
@property
def py_name(self) -> str:
return pythonize_class_name(self.proto_name)
@property
def annotation(self) -> str:
if self.repeated:
return f"List[{self.py_name}]"
return self.py_name
@property
def deprecated_fields(self) -> Iterator[str]:
for f in self.fields:
if f.deprecated:
yield f.py_name
@property
def has_deprecated_fields(self) -> bool:
return any(self.deprecated_fields)
@property
def has_oneof_fields(self) -> bool:
return any(isinstance(field, OneOfFieldCompiler) for field in self.fields)
@property
def has_message_field(self) -> bool:
return any(
(
field.proto_obj.type in PROTO_MESSAGE_TYPES
for field in self.fields
if isinstance(field.proto_obj, FieldDescriptorProto)
)
)
def is_map(
proto_field_obj: FieldDescriptorProto, parent_message: DescriptorProto
) -> bool:
"""True if proto_field_obj is a map, otherwise False."""
if proto_field_obj.type == FieldDescriptorProtoType.TYPE_MESSAGE:
if not hasattr(parent_message, "nested_type"):
return False
# This might be a map...
message_type = proto_field_obj.type_name.split(".").pop().lower()
map_entry = f"{proto_field_obj.name.replace('_', '').lower()}entry"
if message_type == map_entry:
for nested in parent_message.nested_type: # parent message
if (
nested.name.replace("_", "").lower() == map_entry
and nested.options.map_entry
):
return True
return False
def is_oneof(proto_field_obj: FieldDescriptorProto) -> bool:
"""
True if proto_field_obj is a OneOf, otherwise False.
.. warning::
Becuase the message from protoc is defined in proto2, and aristaproto works with
proto3, and interpreting the FieldDescriptorProto.oneof_index field requires
distinguishing between default and unset values (which proto3 doesn't support),
we have to hack the generated FieldDescriptorProto class for this to work.
The hack consists of setting group="oneof_index" in the field metadata,
essentially making oneof_index the sole member of a one_of group, which allows
us to tell whether it was set, via the which_one_of interface.
"""
return (
not proto_field_obj.proto3_optional
and which_one_of(proto_field_obj, "oneof_index")[0] == "oneof_index"
)
@dataclass
class FieldCompiler(MessageCompiler):
parent: MessageCompiler = PLACEHOLDER
proto_obj: FieldDescriptorProto = PLACEHOLDER
def __post_init__(self) -> None:
# Add field to message
self.parent.fields.append(self)
# Check for new imports
self.add_imports_to(self.output_file)
super().__post_init__() # call FieldCompiler-> MessageCompiler __post_init__
def get_field_string(self, indent: int = 4) -> str:
"""Construct string representation of this field as a field."""
name = f"{self.py_name}"
annotations = f": {self.annotation}"
field_args = ", ".join(
([""] + self.aristaproto_field_args) if self.aristaproto_field_args else []
)
aristaproto_field_type = (
f"aristaproto.{self.field_type}_field({self.proto_obj.number}{field_args})"
)
if self.py_name in dir(builtins):
self.parent.builtins_types.add(self.py_name)
return f"{name}{annotations} = {aristaproto_field_type}"
@property
def aristaproto_field_args(self) -> List[str]:
args = []
if self.field_wraps:
args.append(f"wraps={self.field_wraps}")
if self.optional:
args.append(f"optional=True")
return args
@property
def datetime_imports(self) -> Set[str]:
imports = set()
annotation = self.annotation
# FIXME: false positives - e.g. `MyDatetimedelta`
if "timedelta" in annotation:
imports.add("timedelta")
if "datetime" in annotation:
imports.add("datetime")
return imports
@property
def typing_imports(self) -> Set[str]:
imports = set()
annotation = self.annotation
if "Optional[" in annotation:
imports.add("Optional")
if "List[" in annotation:
imports.add("List")
if "Dict[" in annotation:
imports.add("Dict")
return imports
@property
def pydantic_imports(self) -> Set[str]:
return set()
@property
def use_builtins(self) -> bool:
return self.py_type in self.parent.builtins_types or (
self.py_type == self.py_name and self.py_name in dir(builtins)
)
def add_imports_to(self, output_file: OutputTemplate) -> None:
output_file.datetime_imports.update(self.datetime_imports)
output_file.typing_imports.update(self.typing_imports)
output_file.pydantic_imports.update(self.pydantic_imports)
output_file.builtins_import = output_file.builtins_import or self.use_builtins
@property
def field_wraps(self) -> Optional[str]:
"""Returns aristaproto wrapped field type or None."""
match_wrapper = re.match(
r"\.google\.protobuf\.(.+)Value$", self.proto_obj.type_name
)
if match_wrapper:
wrapped_type = "TYPE_" + match_wrapper.group(1).upper()
if hasattr(aristaproto, wrapped_type):
return f"aristaproto.{wrapped_type}"
return None
@property
def repeated(self) -> bool:
return (
self.proto_obj.label == FieldDescriptorProtoLabel.LABEL_REPEATED
and not is_map(self.proto_obj, self.parent)
)
@property
def optional(self) -> bool:
return self.proto_obj.proto3_optional
@property
def mutable(self) -> bool:
"""True if the field is a mutable type, otherwise False."""
return self.annotation.startswith(("List[", "Dict["))
@property
def field_type(self) -> str:
"""String representation of proto field type."""
return (
FieldDescriptorProtoType(self.proto_obj.type)
.name.lower()
.replace("type_", "")
)
@property
def default_value_string(self) -> str:
"""Python representation of the default proto value."""
if self.repeated:
return "[]"
if self.optional:
return "None"
if self.py_type == "int":
return "0"
if self.py_type == "float":
return "0.0"
elif self.py_type == "bool":
return "False"
elif self.py_type == "str":
return '""'
elif self.py_type == "bytes":
return 'b""'
elif self.field_type == "enum":
enum_proto_obj_name = self.proto_obj.type_name.split(".").pop()
enum = next(
e
for e in self.output_file.enums
if e.proto_obj.name == enum_proto_obj_name
)
return enum.default_value_string
else:
# Message type
return "None"
@property
def packed(self) -> bool:
"""True if the wire representation is a packed format."""
return self.repeated and self.proto_obj.type in PROTO_PACKED_TYPES
@property
def py_name(self) -> str:
"""Pythonized name."""
return pythonize_field_name(self.proto_name)
@property
def proto_name(self) -> str:
"""Original protobuf name."""
return self.proto_obj.name
@property
def py_type(self) -> str:
"""String representation of Python type."""
if self.proto_obj.type in PROTO_FLOAT_TYPES:
return "float"
elif self.proto_obj.type in PROTO_INT_TYPES:
return "int"
elif self.proto_obj.type in PROTO_BOOL_TYPES:
return "bool"
elif self.proto_obj.type in PROTO_STR_TYPES:
return "str"
elif self.proto_obj.type in PROTO_BYTES_TYPES:
return "bytes"
elif self.proto_obj.type in PROTO_MESSAGE_TYPES:
# Type referencing another defined Message or a named enum
return get_type_reference(
package=self.output_file.package,
imports=self.output_file.imports,
source_type=self.proto_obj.type_name,
pydantic=self.output_file.pydantic_dataclasses,
)
else:
raise NotImplementedError(f"Unknown type {self.proto_obj.type}")
@property
def annotation(self) -> str:
py_type = self.py_type
if self.use_builtins:
py_type = f"builtins.{py_type}"
if self.repeated:
return f"List[{py_type}]"
if self.optional:
return f"Optional[{py_type}]"
return py_type
@dataclass
class OneOfFieldCompiler(FieldCompiler):
@property
def aristaproto_field_args(self) -> List[str]:
args = super().aristaproto_field_args
group = self.parent.proto_obj.oneof_decl[self.proto_obj.oneof_index].name
args.append(f'group="{group}"')
return args
@dataclass
class PydanticOneOfFieldCompiler(OneOfFieldCompiler):
@property
def optional(self) -> bool:
# Force the optional to be True. This will allow the pydantic dataclass
# to validate the object correctly by allowing the field to be let empty.
# We add a pydantic validator later to ensure exactly one field is defined.
return True
@property
def pydantic_imports(self) -> Set[str]:
return {"root_validator"}
@dataclass
class MapEntryCompiler(FieldCompiler):
py_k_type: Type = PLACEHOLDER
py_v_type: Type = PLACEHOLDER
proto_k_type: str = PLACEHOLDER
proto_v_type: str = PLACEHOLDER
def __post_init__(self) -> None:
"""Explore nested types and set k_type and v_type if unset."""
map_entry = f"{self.proto_obj.name.replace('_', '').lower()}entry"
for nested in self.parent.proto_obj.nested_type:
if (
nested.name.replace("_", "").lower() == map_entry
and nested.options.map_entry
):
# Get Python types
self.py_k_type = FieldCompiler(
source_file=self.source_file,
parent=self,
proto_obj=nested.field[0], # key
).py_type
self.py_v_type = FieldCompiler(
source_file=self.source_file,
parent=self,
proto_obj=nested.field[1], # value
).py_type
# Get proto types
self.proto_k_type = FieldDescriptorProtoType(nested.field[0].type).name
self.proto_v_type = FieldDescriptorProtoType(nested.field[1].type).name
super().__post_init__() # call FieldCompiler-> MessageCompiler __post_init__
@property
def aristaproto_field_args(self) -> List[str]:
return [f"aristaproto.{self.proto_k_type}", f"aristaproto.{self.proto_v_type}"]
@property
def field_type(self) -> str:
return "map"
@property
def annotation(self) -> str:
return f"Dict[{self.py_k_type}, {self.py_v_type}]"
@property
def repeated(self) -> bool:
return False # maps cannot be repeated
@dataclass
class EnumDefinitionCompiler(MessageCompiler):
"""Representation of a proto Enum definition."""
proto_obj: EnumDescriptorProto = PLACEHOLDER
entries: List["EnumDefinitionCompiler.EnumEntry"] = PLACEHOLDER
@dataclass(unsafe_hash=True)
class EnumEntry:
"""Representation of an Enum entry."""
name: str
value: int
comment: str
def __post_init__(self) -> None:
# Get entries/allowed values for this Enum
self.entries = [
self.EnumEntry(
name=pythonize_enum_member_name(
entry_proto_value.name, self.proto_obj.name
),
value=entry_proto_value.number,
comment=get_comment(
proto_file=self.source_file, path=self.path + [2, entry_number]
),
)
for entry_number, entry_proto_value in enumerate(self.proto_obj.value)
]
super().__post_init__() # call MessageCompiler __post_init__
@property
def default_value_string(self) -> str:
"""Python representation of the default value for Enums.
As per the spec, this is the first value of the Enum.
"""
return str(self.entries[0].value) # ideally, should ALWAYS be int(0)!
@dataclass
class ServiceCompiler(ProtoContentBase):
parent: OutputTemplate = PLACEHOLDER
proto_obj: DescriptorProto = PLACEHOLDER
path: List[int] = PLACEHOLDER
methods: List["ServiceMethodCompiler"] = field(default_factory=list)
def __post_init__(self) -> None:
# Add service to output file
self.output_file.services.append(self)
self.output_file.typing_imports.add("Dict")
super().__post_init__() # check for unset fields
@property
def proto_name(self) -> str:
return self.proto_obj.name
@property
def py_name(self) -> str:
return pythonize_class_name(self.proto_name)
@dataclass
class ServiceMethodCompiler(ProtoContentBase):
parent: ServiceCompiler
proto_obj: MethodDescriptorProto
path: List[int] = PLACEHOLDER
comment_indent: int = 8
def __post_init__(self) -> None:
# Add method to service
self.parent.methods.append(self)
# Check for imports
if "Optional" in self.py_output_message_type:
self.output_file.typing_imports.add("Optional")
# Check for Async imports
if self.client_streaming:
self.output_file.typing_imports.add("AsyncIterable")
self.output_file.typing_imports.add("Iterable")
self.output_file.typing_imports.add("Union")
# Required by both client and server
if self.client_streaming or self.server_streaming:
self.output_file.typing_imports.add("AsyncIterator")
# add imports required for request arguments timeout, deadline and metadata
self.output_file.typing_imports.add("Optional")
self.output_file.imports_type_checking_only.add("import grpclib.server")
self.output_file.imports_type_checking_only.add(
"from aristaproto.grpc.grpclib_client import MetadataLike"
)
self.output_file.imports_type_checking_only.add(
"from grpclib.metadata import Deadline"
)
super().__post_init__() # check for unset fields
@property
def py_name(self) -> str:
"""Pythonized method name."""
return pythonize_method_name(self.proto_obj.name)
@property
def proto_name(self) -> str:
"""Original protobuf name."""
return self.proto_obj.name
@property
def route(self) -> str:
package_part = (
f"{self.output_file.package}." if self.output_file.package else ""
)
return f"/{package_part}{self.parent.proto_name}/{self.proto_name}"
@property
def py_input_message(self) -> Optional[MessageCompiler]:
"""Find the input message object.
Returns
-------
Optional[MessageCompiler]
Method instance representing the input message.
If not input message could be found or there are no
input messages, None is returned.
"""
package, name = parse_source_type_name(self.proto_obj.input_type)
# Nested types are currently flattened without dots.
# Todo: keep a fully quantified name in types, that is
# comparable with method.input_type
for msg in self.request.all_messages:
if (
msg.py_name == pythonize_class_name(name.replace(".", ""))
and msg.output_file.package == package
):
return msg
return None
@property
def py_input_message_type(self) -> str:
"""String representation of the Python type corresponding to the
input message.
Returns
-------
str
String representation of the Python type corresponding to the input message.
"""
return get_type_reference(
package=self.output_file.package,
imports=self.output_file.imports,
source_type=self.proto_obj.input_type,
unwrap=False,
pydantic=self.output_file.pydantic_dataclasses,
).strip('"')
@property
def py_input_message_param(self) -> str:
"""Param name corresponding to py_input_message_type.
Returns
-------
str
Param name corresponding to py_input_message_type.
"""
return pythonize_field_name(self.py_input_message_type)
@property
def py_output_message_type(self) -> str:
"""String representation of the Python type corresponding to the
output message.
Returns
-------
str
String representation of the Python type corresponding to the output message.
"""
return get_type_reference(
package=self.output_file.package,
imports=self.output_file.imports,
source_type=self.proto_obj.output_type,
unwrap=False,
pydantic=self.output_file.pydantic_dataclasses,
).strip('"')
@property
def client_streaming(self) -> bool:
return self.proto_obj.client_streaming
@property
def server_streaming(self) -> bool:
return self.proto_obj.server_streaming

View file

@ -0,0 +1,221 @@
import pathlib
import sys
from typing import (
Generator,
List,
Set,
Tuple,
Union,
)
from aristaproto.lib.google.protobuf import (
DescriptorProto,
EnumDescriptorProto,
FieldDescriptorProto,
FileDescriptorProto,
ServiceDescriptorProto,
)
from aristaproto.lib.google.protobuf.compiler import (
CodeGeneratorRequest,
CodeGeneratorResponse,
CodeGeneratorResponseFeature,
CodeGeneratorResponseFile,
)
from .compiler import outputfile_compiler
from .models import (
EnumDefinitionCompiler,
FieldCompiler,
MapEntryCompiler,
MessageCompiler,
OneOfFieldCompiler,
OutputTemplate,
PluginRequestCompiler,
PydanticOneOfFieldCompiler,
ServiceCompiler,
ServiceMethodCompiler,
is_map,
is_oneof,
)
def traverse(
proto_file: FileDescriptorProto,
) -> Generator[
Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None
]:
# Todo: Keep information about nested hierarchy
def _traverse(
path: List[int],
items: Union[List[EnumDescriptorProto], List[DescriptorProto]],
prefix: str = "",
) -> Generator[
Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None
]:
for i, item in enumerate(items):
# Adjust the name since we flatten the hierarchy.
# Todo: don't change the name, but include full name in returned tuple
item.name = next_prefix = f"{prefix}_{item.name}"
yield item, [*path, i]
if isinstance(item, DescriptorProto):
# Get nested types.
yield from _traverse([*path, i, 4], item.enum_type, next_prefix)
yield from _traverse([*path, i, 3], item.nested_type, next_prefix)
yield from _traverse([5], proto_file.enum_type)
yield from _traverse([4], proto_file.message_type)
def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
response = CodeGeneratorResponse()
plugin_options = request.parameter.split(",") if request.parameter else []
response.supported_features = CodeGeneratorResponseFeature.FEATURE_PROTO3_OPTIONAL
request_data = PluginRequestCompiler(plugin_request_obj=request)
# Gather output packages
for proto_file in request.proto_file:
output_package_name = proto_file.package
if output_package_name not in request_data.output_packages:
# Create a new output if there is no output for this package
request_data.output_packages[output_package_name] = OutputTemplate(
parent_request=request_data, package_proto_obj=proto_file
)
# Add this input file to the output corresponding to this package
request_data.output_packages[output_package_name].input_files.append(proto_file)
if (
proto_file.package == "google.protobuf"
and "INCLUDE_GOOGLE" not in plugin_options
):
# If not INCLUDE_GOOGLE,
# skip outputting Google's well-known types
request_data.output_packages[output_package_name].output = False
if "pydantic_dataclasses" in plugin_options:
request_data.output_packages[
output_package_name
].pydantic_dataclasses = True
# Read Messages and Enums
# We need to read Messages before Services in so that we can
# get the references to input/output messages for each service
for output_package_name, output_package in request_data.output_packages.items():
for proto_input_file in output_package.input_files:
for item, path in traverse(proto_input_file):
read_protobuf_type(
source_file=proto_input_file,
item=item,
path=path,
output_package=output_package,
)
# Read Services
for output_package_name, output_package in request_data.output_packages.items():
for proto_input_file in output_package.input_files:
for index, service in enumerate(proto_input_file.service):
read_protobuf_service(service, index, output_package)
# Generate output files
output_paths: Set[pathlib.Path] = set()
for output_package_name, output_package in request_data.output_packages.items():
if not output_package.output:
continue
# Add files to the response object
output_path = pathlib.Path(*output_package_name.split("."), "__init__.py")
output_paths.add(output_path)
response.file.append(
CodeGeneratorResponseFile(
name=str(output_path),
# Render and then format the output file
content=outputfile_compiler(output_file=output_package),
)
)
# Make each output directory a package with __init__ file
init_files = {
directory.joinpath("__init__.py")
for path in output_paths
for directory in path.parents
if not directory.joinpath("__init__.py").exists()
} - output_paths
for init_file in init_files:
response.file.append(CodeGeneratorResponseFile(name=str(init_file)))
for output_package_name in sorted(output_paths.union(init_files)):
print(f"Writing {output_package_name}", file=sys.stderr)
return response
def _make_one_of_field_compiler(
output_package: OutputTemplate,
source_file: "FileDescriptorProto",
parent: MessageCompiler,
proto_obj: "FieldDescriptorProto",
path: List[int],
) -> FieldCompiler:
pydantic = output_package.pydantic_dataclasses
Cls = PydanticOneOfFieldCompiler if pydantic else OneOfFieldCompiler
return Cls(
source_file=source_file,
parent=parent,
proto_obj=proto_obj,
path=path,
)
def read_protobuf_type(
item: DescriptorProto,
path: List[int],
source_file: "FileDescriptorProto",
output_package: OutputTemplate,
) -> None:
if isinstance(item, DescriptorProto):
if item.options.map_entry:
# Skip generated map entry messages since we just use dicts
return
# Process Message
message_data = MessageCompiler(
source_file=source_file, parent=output_package, proto_obj=item, path=path
)
for index, field in enumerate(item.field):
if is_map(field, item):
MapEntryCompiler(
source_file=source_file,
parent=message_data,
proto_obj=field,
path=path + [2, index],
)
elif is_oneof(field):
_make_one_of_field_compiler(
output_package, source_file, message_data, field, path + [2, index]
)
else:
FieldCompiler(
source_file=source_file,
parent=message_data,
proto_obj=field,
path=path + [2, index],
)
elif isinstance(item, EnumDescriptorProto):
# Enum
EnumDefinitionCompiler(
source_file=source_file, parent=output_package, proto_obj=item, path=path
)
def read_protobuf_service(
service: ServiceDescriptorProto, index: int, output_package: OutputTemplate
) -> None:
service_data = ServiceCompiler(
parent=output_package, proto_obj=service, path=[6, index]
)
for j, method in enumerate(service.method):
ServiceMethodCompiler(
parent=service_data, proto_obj=method, path=[6, index, 2, j]
)

View file

@ -0,0 +1,2 @@
@SET plugin_dir=%~dp0
@python -m %plugin_dir% %*

0
src/aristaproto/py.typed Normal file
View file

View file

@ -0,0 +1,257 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# sources: {{ ', '.join(output_file.input_filenames) }}
# plugin: python-aristaproto
# This file has been @generated
{% for i in output_file.python_module_imports|sort %}
import {{ i }}
{% endfor %}
{% if output_file.pydantic_dataclasses %}
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from dataclasses import dataclass
else:
from pydantic.dataclasses import dataclass
{%- else -%}
from dataclasses import dataclass
{% endif %}
{% if output_file.datetime_imports %}
from datetime import {% for i in output_file.datetime_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
{% endif%}
{% if output_file.typing_imports %}
from typing import {% for i in output_file.typing_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
{% endif %}
{% if output_file.pydantic_imports %}
from pydantic import {% for i in output_file.pydantic_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
{% endif %}
import aristaproto
{% if output_file.services %}
from aristaproto.grpc.grpclib_server import ServiceBase
import grpclib
{% endif %}
{% for i in output_file.imports|sort %}
{{ i }}
{% endfor %}
{% if output_file.imports_type_checking_only %}
from typing import TYPE_CHECKING
if TYPE_CHECKING:
{% for i in output_file.imports_type_checking_only|sort %} {{ i }}
{% endfor %}
{% endif %}
{% if output_file.enums %}{% for enum in output_file.enums %}
class {{ enum.py_name }}(aristaproto.Enum):
{% if enum.comment %}
{{ enum.comment }}
{% endif %}
{% for entry in enum.entries %}
{{ entry.name }} = {{ entry.value }}
{% if entry.comment %}
{{ entry.comment }}
{% endif %}
{% endfor %}
{% endfor %}
{% endif %}
{% for message in output_file.messages %}
@dataclass(eq=False, repr=False)
class {{ message.py_name }}(aristaproto.Message):
{% if message.comment %}
{{ message.comment }}
{% endif %}
{% for field in message.fields %}
{{ field.get_field_string() }}
{% if field.comment %}
{{ field.comment }}
{% endif %}
{% endfor %}
{% if not message.fields %}
pass
{% endif %}
{% if message.deprecated or message.has_deprecated_fields %}
def __post_init__(self) -> None:
{% if message.deprecated %}
warnings.warn("{{ message.py_name }} is deprecated", DeprecationWarning)
{% endif %}
super().__post_init__()
{% for field in message.deprecated_fields %}
if self.is_set("{{ field }}"):
warnings.warn("{{ message.py_name }}.{{ field }} is deprecated", DeprecationWarning)
{% endfor %}
{% endif %}
{% if output_file.pydantic_dataclasses and message.has_oneof_fields %}
@root_validator()
def check_oneof(cls, values):
return cls._validate_field_groups(values)
{% endif %}
{% endfor %}
{% for service in output_file.services %}
class {{ service.py_name }}Stub(aristaproto.ServiceStub):
{% if service.comment %}
{{ service.comment }}
{% elif not service.methods %}
pass
{% endif %}
{% for method in service.methods %}
async def {{ method.py_name }}(self
{%- if not method.client_streaming -%}
{%- if method.py_input_message -%}, {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}"{%- endif -%}
{%- else -%}
{# Client streaming: need a request iterator instead #}
, {{ method.py_input_message_param }}_iterator: Union[AsyncIterable["{{ method.py_input_message_type }}"], Iterable["{{ method.py_input_message_type }}"]]
{%- endif -%}
,
*
, timeout: Optional[float] = None
, deadline: Optional["Deadline"] = None
, metadata: Optional["MetadataLike"] = None
) -> {% if method.server_streaming %}AsyncIterator["{{ method.py_output_message_type }}"]{% else %}"{{ method.py_output_message_type }}"{% endif %}:
{% if method.comment %}
{{ method.comment }}
{% endif %}
{% if method.server_streaming %}
{% if method.client_streaming %}
async for response in self._stream_stream(
"{{ method.route }}",
{{ method.py_input_message_param }}_iterator,
{{ method.py_input_message_type }},
{{ method.py_output_message_type.strip('"') }},
timeout=timeout,
deadline=deadline,
metadata=metadata,
):
yield response
{% else %}{# i.e. not client streaming #}
async for response in self._unary_stream(
"{{ method.route }}",
{{ method.py_input_message_param }},
{{ method.py_output_message_type.strip('"') }},
timeout=timeout,
deadline=deadline,
metadata=metadata,
):
yield response
{% endif %}{# if client streaming #}
{% else %}{# i.e. not server streaming #}
{% if method.client_streaming %}
return await self._stream_unary(
"{{ method.route }}",
{{ method.py_input_message_param }}_iterator,
{{ method.py_input_message_type }},
{{ method.py_output_message_type.strip('"') }},
timeout=timeout,
deadline=deadline,
metadata=metadata,
)
{% else %}{# i.e. not client streaming #}
return await self._unary_unary(
"{{ method.route }}",
{{ method.py_input_message_param }},
{{ method.py_output_message_type.strip('"') }},
timeout=timeout,
deadline=deadline,
metadata=metadata,
)
{% endif %}{# client streaming #}
{% endif %}
{% endfor %}
{% endfor %}
{% for service in output_file.services %}
class {{ service.py_name }}Base(ServiceBase):
{% if service.comment %}
{{ service.comment }}
{% endif %}
{% for method in service.methods %}
async def {{ method.py_name }}(self
{%- if not method.client_streaming -%}
{%- if method.py_input_message -%}, {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}"{%- endif -%}
{%- else -%}
{# Client streaming: need a request iterator instead #}
, {{ method.py_input_message_param }}_iterator: AsyncIterator["{{ method.py_input_message_type }}"]
{%- endif -%}
) -> {% if method.server_streaming %}AsyncIterator["{{ method.py_output_message_type }}"]{% else %}"{{ method.py_output_message_type }}"{% endif %}:
{% if method.comment %}
{{ method.comment }}
{% endif %}
raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
{% if method.server_streaming %}
{# Commented out to avoid unreachable code. #}
{# yield {{ method.py_output_message_type }}() #}
{% endif %}
{% endfor %}
{% for method in service.methods %}
async def __rpc_{{ method.py_name }}(self, stream: "grpclib.server.Stream[{{ method.py_input_message_type }}, {{ method.py_output_message_type }}]") -> None:
{% if not method.client_streaming %}
request = await stream.recv_message()
{% else %}
request = stream.__aiter__()
{% endif %}
{% if not method.server_streaming %}
response = await self.{{ method.py_name }}(request)
await stream.send_message(response)
{% else %}
await self._call_rpc_handler_server_stream(
self.{{ method.py_name }},
stream,
request,
)
{% endif %}
{% endfor %}
def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
return {
{% for method in service.methods %}
"{{ method.route }}": grpclib.const.Handler(
self.__rpc_{{ method.py_name }},
{% if not method.client_streaming and not method.server_streaming %}
grpclib.const.Cardinality.UNARY_UNARY,
{% elif not method.client_streaming and method.server_streaming %}
grpclib.const.Cardinality.UNARY_STREAM,
{% elif method.client_streaming and not method.server_streaming %}
grpclib.const.Cardinality.STREAM_UNARY,
{% else %}
grpclib.const.Cardinality.STREAM_STREAM,
{% endif %}
{{ method.py_input_message_type }},
{{ method.py_output_message_type }},
),
{% endfor %}
}
{% endfor %}
{% if output_file.pydantic_dataclasses %}
{% for message in output_file.messages %}
{% if message.has_message_field %}
{{ message.py_name }}.__pydantic_model__.update_forward_refs() # type: ignore
{% endif %}
{% endfor %}
{% endif %}

56
src/aristaproto/utils.py Normal file
View file

@ -0,0 +1,56 @@
from __future__ import annotations
from typing import (
Any,
Callable,
Generic,
Optional,
Type,
TypeVar,
)
from typing_extensions import (
Concatenate,
ParamSpec,
Self,
)
SelfT = TypeVar("SelfT")
P = ParamSpec("P")
HybridT = TypeVar("HybridT", covariant=True)
class hybridmethod(Generic[SelfT, P, HybridT]):
def __init__(
self,
func: Callable[
Concatenate[type[SelfT], P], HybridT
], # Must be the classmethod version
):
self.cls_func = func
self.__doc__ = func.__doc__
def instancemethod(self, func: Callable[Concatenate[SelfT, P], HybridT]) -> Self:
self.instance_func = func
return self
def __get__(
self, instance: Optional[SelfT], owner: Type[SelfT]
) -> Callable[P, HybridT]:
if instance is None or self.instance_func is None:
# either bound to the class, or no instance method available
return self.cls_func.__get__(owner, None)
return self.instance_func.__get__(instance, owner)
T_co = TypeVar("T_co")
TT_co = TypeVar("TT_co", bound="type[Any]")
class classproperty(Generic[TT_co, T_co]):
def __init__(self, func: Callable[[TT_co], T_co]):
self.__func__ = func
def __get__(self, instance: Any, type: TT_co) -> T_co:
return self.__func__(type)

91
tests/README.md Normal file
View file

@ -0,0 +1,91 @@
# Standard Tests Development Guide
Standard test cases are found in [aristaproto/tests/inputs](inputs), where each subdirectory represents a testcase, that is verified in isolation.
```
inputs/
bool/
double/
int32/
...
```
## Test case directory structure
Each testcase has a `<name>.proto` file with a message called `Test`, and optionally a matching `.json` file and a custom test called `test_*.py`.
```bash
bool/
bool.proto
bool.json # optional
test_bool.py # optional
```
### proto
`<name>.proto` &mdash; *The protobuf message to test*
```protobuf
syntax = "proto3";
message Test {
bool value = 1;
}
```
You can add multiple `.proto` files to the test case, as long as one file matches the directory name.
### json
`<name>.json` &mdash; *Test-data to validate the message with*
```json
{
"value": true
}
```
### pytest
`test_<name>.py` &mdash; *Custom test to validate specific aspects of the generated class*
```python
from tests.output_aristaproto.bool.bool import Test
def test_value():
message = Test()
assert not message.value, "Boolean is False by default"
```
## Standard tests
The following tests are automatically executed for all cases:
- [x] Can the generated python code be imported?
- [x] Can the generated message class be instantiated?
- [x] Is the generated code compatible with the Google's `grpc_tools.protoc` implementation?
- _when `.json` is present_
## Running the tests
- `pipenv run generate`
This generates:
- `aristaproto/tests/output_aristaproto` &mdash; *the plugin generated python classes*
- `aristaproto/tests/output_reference` &mdash; *reference implementation classes*
- `pipenv run test`
## Intentionally Failing tests
The standard test suite includes tests that fail by intention. These tests document known bugs and missing features that are intended to be corrected in the future.
When running `pytest`, they show up as `x` or `X` in the test results.
```
aristaproto/tests/test_inputs.py ..x...x..x...x.X........xx........x.....x.......x.xx....x...................... [ 84%]
```
- `.` &mdash; PASSED
- `x` &mdash; XFAIL: expected failure
- `X` &mdash; XPASS: expected failure, but still passed
Test cases marked for expected failure are declared in [inputs/config.py](inputs/config.py)

0
tests/__init__.py Normal file
View file

22
tests/conftest.py Normal file
View file

@ -0,0 +1,22 @@
import copy
import sys
import pytest
def pytest_addoption(parser):
parser.addoption(
"--repeat", type=int, default=1, help="repeat the operation multiple times"
)
@pytest.fixture(scope="session")
def repeat(request):
return request.config.getoption("repeat")
@pytest.fixture
def reset_sys_path():
original = copy.deepcopy(sys.path)
yield
sys.path = original

196
tests/generate.py Executable file
View file

@ -0,0 +1,196 @@
#!/usr/bin/env python
import asyncio
import os
import platform
import shutil
import sys
from pathlib import Path
from typing import Set
from tests.util import (
get_directories,
inputs_path,
output_path_aristaproto,
output_path_aristaproto_pydantic,
output_path_reference,
protoc,
)
# Force pure-python implementation instead of C++, otherwise imports
# break things because we can't properly reset the symbol database.
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
def clear_directory(dir_path: Path):
for file_or_directory in dir_path.glob("*"):
if file_or_directory.is_dir():
shutil.rmtree(file_or_directory)
else:
file_or_directory.unlink()
async def generate(whitelist: Set[str], verbose: bool):
test_case_names = set(get_directories(inputs_path)) - {"__pycache__"}
path_whitelist = set()
name_whitelist = set()
for item in whitelist:
if item in test_case_names:
name_whitelist.add(item)
continue
path_whitelist.add(item)
generation_tasks = []
for test_case_name in sorted(test_case_names):
test_case_input_path = inputs_path.joinpath(test_case_name).resolve()
if (
whitelist
and str(test_case_input_path) not in path_whitelist
and test_case_name not in name_whitelist
):
continue
generation_tasks.append(
generate_test_case_output(test_case_input_path, test_case_name, verbose)
)
failed_test_cases = []
# Wait for all subprocs and match any failures to names to report
for test_case_name, result in zip(
sorted(test_case_names), await asyncio.gather(*generation_tasks)
):
if result != 0:
failed_test_cases.append(test_case_name)
if len(failed_test_cases) > 0:
sys.stderr.write(
"\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n"
)
for failed_test_case in failed_test_cases:
sys.stderr.write(f"- {failed_test_case}\n")
sys.exit(1)
async def generate_test_case_output(
test_case_input_path: Path, test_case_name: str, verbose: bool
) -> int:
"""
Returns the max of the subprocess return values
"""
test_case_output_path_reference = output_path_reference.joinpath(test_case_name)
test_case_output_path_aristaproto = output_path_aristaproto
test_case_output_path_aristaproto_pyd = output_path_aristaproto_pydantic
os.makedirs(test_case_output_path_reference, exist_ok=True)
os.makedirs(test_case_output_path_aristaproto, exist_ok=True)
os.makedirs(test_case_output_path_aristaproto_pyd, exist_ok=True)
clear_directory(test_case_output_path_reference)
clear_directory(test_case_output_path_aristaproto)
(
(ref_out, ref_err, ref_code),
(plg_out, plg_err, plg_code),
(plg_out_pyd, plg_err_pyd, plg_code_pyd),
) = await asyncio.gather(
protoc(test_case_input_path, test_case_output_path_reference, True),
protoc(test_case_input_path, test_case_output_path_aristaproto, False),
protoc(
test_case_input_path, test_case_output_path_aristaproto_pyd, False, True
),
)
if ref_code == 0:
print(f"\033[31;1;4mGenerated reference output for {test_case_name!r}\033[0m")
else:
print(
f"\033[31;1;4mFailed to generate reference output for {test_case_name!r}\033[0m"
)
if verbose:
if ref_out:
print("Reference stdout:")
sys.stdout.buffer.write(ref_out)
sys.stdout.buffer.flush()
if ref_err:
print("Reference stderr:")
sys.stderr.buffer.write(ref_err)
sys.stderr.buffer.flush()
if plg_code == 0:
print(f"\033[31;1;4mGenerated plugin output for {test_case_name!r}\033[0m")
else:
print(
f"\033[31;1;4mFailed to generate plugin output for {test_case_name!r}\033[0m"
)
if verbose:
if plg_out:
print("Plugin stdout:")
sys.stdout.buffer.write(plg_out)
sys.stdout.buffer.flush()
if plg_err:
print("Plugin stderr:")
sys.stderr.buffer.write(plg_err)
sys.stderr.buffer.flush()
if plg_code_pyd == 0:
print(
f"\033[31;1;4mGenerated plugin (pydantic compatible) output for {test_case_name!r}\033[0m"
)
else:
print(
f"\033[31;1;4mFailed to generate plugin (pydantic compatible) output for {test_case_name!r}\033[0m"
)
if verbose:
if plg_out_pyd:
print("Plugin stdout:")
sys.stdout.buffer.write(plg_out_pyd)
sys.stdout.buffer.flush()
if plg_err_pyd:
print("Plugin stderr:")
sys.stderr.buffer.write(plg_err_pyd)
sys.stderr.buffer.flush()
return max(ref_code, plg_code, plg_code_pyd)
HELP = "\n".join(
(
"Usage: python generate.py [-h] [-v] [DIRECTORIES or NAMES]",
"Generate python classes for standard tests.",
"",
"DIRECTORIES One or more relative or absolute directories of test-cases to generate classes for.",
" python generate.py inputs/bool inputs/double inputs/enum",
"",
"NAMES One or more test-case names to generate classes for.",
" python generate.py bool double enums",
)
)
def main():
if set(sys.argv).intersection({"-h", "--help"}):
print(HELP)
return
if sys.argv[1:2] == ["-v"]:
verbose = True
whitelist = set(sys.argv[2:])
else:
verbose = False
whitelist = set(sys.argv[1:])
if platform.system() == "Windows":
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
asyncio.run(generate(whitelist, verbose))
if __name__ == "__main__":
main()

0
tests/grpc/__init__.py Normal file
View file

View file

@ -0,0 +1,298 @@
import asyncio
import sys
import uuid
import grpclib
import grpclib.client
import grpclib.metadata
import grpclib.server
import pytest
from grpclib.testing import ChannelFor
from aristaproto.grpc.util.async_channel import AsyncChannel
from tests.output_aristaproto.service import (
DoThingRequest,
DoThingResponse,
GetThingRequest,
TestStub as ThingServiceClient,
)
from .thing_service import ThingService
async def _test_client(client: ThingServiceClient, name="clean room", **kwargs):
response = await client.do_thing(DoThingRequest(name=name), **kwargs)
assert response.names == [name]
def _assert_request_meta_received(deadline, metadata):
def server_side_test(stream):
assert stream.deadline._timestamp == pytest.approx(
deadline._timestamp, 1
), "The provided deadline should be received serverside"
assert (
stream.metadata["authorization"] == metadata["authorization"]
), "The provided authorization metadata should be received serverside"
return server_side_test
@pytest.fixture
def handler_trailer_only_unauthenticated():
async def handler(stream: grpclib.server.Stream):
await stream.recv_message()
await stream.send_initial_metadata()
await stream.send_trailing_metadata(status=grpclib.Status.UNAUTHENTICATED)
return handler
@pytest.mark.asyncio
async def test_simple_service_call():
async with ChannelFor([ThingService()]) as channel:
await _test_client(ThingServiceClient(channel))
@pytest.mark.asyncio
async def test_trailer_only_error_unary_unary(
mocker, handler_trailer_only_unauthenticated
):
service = ThingService()
mocker.patch.object(
service,
"do_thing",
side_effect=handler_trailer_only_unauthenticated,
autospec=True,
)
async with ChannelFor([service]) as channel:
with pytest.raises(grpclib.exceptions.GRPCError) as e:
await ThingServiceClient(channel).do_thing(DoThingRequest(name="something"))
assert e.value.status == grpclib.Status.UNAUTHENTICATED
@pytest.mark.asyncio
async def test_trailer_only_error_stream_unary(
mocker, handler_trailer_only_unauthenticated
):
service = ThingService()
mocker.patch.object(
service,
"do_many_things",
side_effect=handler_trailer_only_unauthenticated,
autospec=True,
)
async with ChannelFor([service]) as channel:
with pytest.raises(grpclib.exceptions.GRPCError) as e:
await ThingServiceClient(channel).do_many_things(
do_thing_request_iterator=[DoThingRequest(name="something")]
)
await _test_client(ThingServiceClient(channel))
assert e.value.status == grpclib.Status.UNAUTHENTICATED
@pytest.mark.asyncio
@pytest.mark.skipif(
sys.version_info < (3, 8), reason="async mock spy does works for python3.8+"
)
async def test_service_call_mutable_defaults(mocker):
async with ChannelFor([ThingService()]) as channel:
client = ThingServiceClient(channel)
spy = mocker.spy(client, "_unary_unary")
await _test_client(client)
comments = spy.call_args_list[-1].args[1].comments
await _test_client(client)
assert spy.call_args_list[-1].args[1].comments is not comments
@pytest.mark.asyncio
async def test_service_call_with_upfront_request_params():
# Setting deadline
deadline = grpclib.metadata.Deadline.from_timeout(22)
metadata = {"authorization": "12345"}
async with ChannelFor(
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
) as channel:
await _test_client(
ThingServiceClient(channel, deadline=deadline, metadata=metadata)
)
# Setting timeout
timeout = 99
deadline = grpclib.metadata.Deadline.from_timeout(timeout)
metadata = {"authorization": "12345"}
async with ChannelFor(
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
) as channel:
await _test_client(
ThingServiceClient(channel, timeout=timeout, metadata=metadata)
)
@pytest.mark.asyncio
async def test_service_call_lower_level_with_overrides():
THING_TO_DO = "get milk"
# Setting deadline
deadline = grpclib.metadata.Deadline.from_timeout(22)
metadata = {"authorization": "12345"}
kwarg_deadline = grpclib.metadata.Deadline.from_timeout(28)
kwarg_metadata = {"authorization": "12345"}
async with ChannelFor(
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
) as channel:
client = ThingServiceClient(channel, deadline=deadline, metadata=metadata)
response = await client._unary_unary(
"/service.Test/DoThing",
DoThingRequest(THING_TO_DO),
DoThingResponse,
deadline=kwarg_deadline,
metadata=kwarg_metadata,
)
assert response.names == [THING_TO_DO]
# Setting timeout
timeout = 99
deadline = grpclib.metadata.Deadline.from_timeout(timeout)
metadata = {"authorization": "12345"}
kwarg_timeout = 9000
kwarg_deadline = grpclib.metadata.Deadline.from_timeout(kwarg_timeout)
kwarg_metadata = {"authorization": "09876"}
async with ChannelFor(
[
ThingService(
test_hook=_assert_request_meta_received(kwarg_deadline, kwarg_metadata),
)
]
) as channel:
client = ThingServiceClient(channel, deadline=deadline, metadata=metadata)
response = await client._unary_unary(
"/service.Test/DoThing",
DoThingRequest(THING_TO_DO),
DoThingResponse,
timeout=kwarg_timeout,
metadata=kwarg_metadata,
)
assert response.names == [THING_TO_DO]
@pytest.mark.asyncio
@pytest.mark.parametrize(
("overrides_gen",),
[
(lambda: dict(timeout=10),),
(lambda: dict(deadline=grpclib.metadata.Deadline.from_timeout(10)),),
(lambda: dict(metadata={"authorization": str(uuid.uuid4())}),),
(lambda: dict(timeout=20, metadata={"authorization": str(uuid.uuid4())}),),
],
)
async def test_service_call_high_level_with_overrides(mocker, overrides_gen):
overrides = overrides_gen()
request_spy = mocker.spy(grpclib.client.Channel, "request")
name = str(uuid.uuid4())
defaults = dict(
timeout=99,
deadline=grpclib.metadata.Deadline.from_timeout(99),
metadata={"authorization": name},
)
async with ChannelFor(
[
ThingService(
test_hook=_assert_request_meta_received(
deadline=grpclib.metadata.Deadline.from_timeout(
overrides.get("timeout", 99)
),
metadata=overrides.get("metadata", defaults.get("metadata")),
)
)
]
) as channel:
client = ThingServiceClient(channel, **defaults)
await _test_client(client, name=name, **overrides)
assert request_spy.call_count == 1
# for python <3.8 request_spy.call_args.kwargs do not work
_, request_spy_call_kwargs = request_spy.call_args_list[0]
# ensure all overrides were successful
for key, value in overrides.items():
assert key in request_spy_call_kwargs
assert request_spy_call_kwargs[key] == value
# ensure default values were retained
for key in set(defaults.keys()) - set(overrides.keys()):
assert key in request_spy_call_kwargs
assert request_spy_call_kwargs[key] == defaults[key]
@pytest.mark.asyncio
async def test_async_gen_for_unary_stream_request():
thing_name = "my milkshakes"
async with ChannelFor([ThingService()]) as channel:
client = ThingServiceClient(channel)
expected_versions = [5, 4, 3, 2, 1]
async for response in client.get_thing_versions(
GetThingRequest(name=thing_name)
):
assert response.name == thing_name
assert response.version == expected_versions.pop()
@pytest.mark.asyncio
async def test_async_gen_for_stream_stream_request():
some_things = ["cake", "cricket", "coral reef"]
more_things = ["ball", "that", "56kmodem", "liberal humanism", "cheesesticks"]
expected_things = (*some_things, *more_things)
async with ChannelFor([ThingService()]) as channel:
client = ThingServiceClient(channel)
# Use an AsyncChannel to decouple sending and recieving, it'll send some_things
# immediately and we'll use it to send more_things later, after recieving some
# results
request_chan = AsyncChannel()
send_initial_requests = asyncio.ensure_future(
request_chan.send_from(GetThingRequest(name) for name in some_things)
)
response_index = 0
async for response in client.get_different_things(request_chan):
assert response.name == expected_things[response_index]
assert response.version == response_index + 1
response_index += 1
if more_things:
# Send some more requests as we receive responses to be sure coordination of
# send/receive events doesn't matter
await request_chan.send(GetThingRequest(more_things.pop(0)))
elif not send_initial_requests.done():
# Make sure the sending task it completed
await send_initial_requests
else:
# No more things to send make sure channel is closed
request_chan.close()
assert response_index == len(
expected_things
), "Didn't receive all expected responses"
@pytest.mark.asyncio
async def test_stream_unary_with_empty_iterable():
things = [] # empty
async with ChannelFor([ThingService()]) as channel:
client = ThingServiceClient(channel)
requests = [DoThingRequest(name) for name in things]
response = await client.do_many_things(requests)
assert len(response.names) == 0
@pytest.mark.asyncio
async def test_stream_stream_with_empty_iterable():
things = [] # empty
async with ChannelFor([ThingService()]) as channel:
client = ThingServiceClient(channel)
requests = [GetThingRequest(name) for name in things]
responses = [
response async for response in client.get_different_things(requests)
]
assert len(responses) == 0

View file

@ -0,0 +1,99 @@
import asyncio
from dataclasses import dataclass
from typing import AsyncIterator
import pytest
import aristaproto
from aristaproto.grpc.util.async_channel import AsyncChannel
@dataclass
class Message(aristaproto.Message):
body: str = aristaproto.string_field(1)
@pytest.fixture
def expected_responses():
return [Message("Hello world 1"), Message("Hello world 2"), Message("Done")]
class ClientStub:
async def connect(self, requests: AsyncIterator):
await asyncio.sleep(0.1)
async for request in requests:
await asyncio.sleep(0.1)
yield request
await asyncio.sleep(0.1)
yield Message("Done")
async def to_list(generator: AsyncIterator):
return [value async for value in generator]
@pytest.fixture
def client():
# channel = Channel(host='127.0.0.1', port=50051)
# return ClientStub(channel)
return ClientStub()
@pytest.mark.asyncio
async def test_send_from_before_connect_and_close_automatically(
client, expected_responses
):
requests = AsyncChannel()
await requests.send_from(
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=True
)
responses = client.connect(requests)
assert await to_list(responses) == expected_responses
@pytest.mark.asyncio
async def test_send_from_after_connect_and_close_automatically(
client, expected_responses
):
requests = AsyncChannel()
responses = client.connect(requests)
await requests.send_from(
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=True
)
assert await to_list(responses) == expected_responses
@pytest.mark.asyncio
async def test_send_from_close_manually_immediately(client, expected_responses):
requests = AsyncChannel()
responses = client.connect(requests)
await requests.send_from(
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=False
)
requests.close()
assert await to_list(responses) == expected_responses
@pytest.mark.asyncio
async def test_send_individually_and_close_before_connect(client, expected_responses):
requests = AsyncChannel()
await requests.send(Message(body="Hello world 1"))
await requests.send(Message(body="Hello world 2"))
requests.close()
responses = client.connect(requests)
assert await to_list(responses) == expected_responses
@pytest.mark.asyncio
async def test_send_individually_and_close_after_connect(client, expected_responses):
requests = AsyncChannel()
await requests.send(Message(body="Hello world 1"))
await requests.send(Message(body="Hello world 2"))
responses = client.connect(requests)
requests.close()
assert await to_list(responses) == expected_responses

View file

@ -0,0 +1,85 @@
from typing import Dict
import grpclib
import grpclib.server
from tests.output_aristaproto.service import (
DoThingRequest,
DoThingResponse,
GetThingRequest,
GetThingResponse,
)
class ThingService:
def __init__(self, test_hook=None):
# This lets us pass assertions to the servicer ;)
self.test_hook = test_hook
async def do_thing(
self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"
):
request = await stream.recv_message()
if self.test_hook is not None:
self.test_hook(stream)
await stream.send_message(DoThingResponse([request.name]))
async def do_many_things(
self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"
):
thing_names = [request.name async for request in stream]
if self.test_hook is not None:
self.test_hook(stream)
await stream.send_message(DoThingResponse(thing_names))
async def get_thing_versions(
self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]"
):
request = await stream.recv_message()
if self.test_hook is not None:
self.test_hook(stream)
for version_num in range(1, 6):
await stream.send_message(
GetThingResponse(name=request.name, version=version_num)
)
async def get_different_things(
self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]"
):
if self.test_hook is not None:
self.test_hook(stream)
# Respond to each input item immediately
response_num = 0
async for request in stream:
response_num += 1
await stream.send_message(
GetThingResponse(name=request.name, version=response_num)
)
def __mapping__(self) -> Dict[str, "grpclib.const.Handler"]:
return {
"/service.Test/DoThing": grpclib.const.Handler(
self.do_thing,
grpclib.const.Cardinality.UNARY_UNARY,
DoThingRequest,
DoThingResponse,
),
"/service.Test/DoManyThings": grpclib.const.Handler(
self.do_many_things,
grpclib.const.Cardinality.STREAM_UNARY,
DoThingRequest,
DoThingResponse,
),
"/service.Test/GetThingVersions": grpclib.const.Handler(
self.get_thing_versions,
grpclib.const.Cardinality.UNARY_STREAM,
GetThingRequest,
GetThingResponse,
),
"/service.Test/GetDifferentThings": grpclib.const.Handler(
self.get_different_things,
grpclib.const.Cardinality.STREAM_STREAM,
GetThingRequest,
GetThingResponse,
),
}

View file

@ -0,0 +1,3 @@
{
"value": true
}

View file

@ -0,0 +1,7 @@
syntax = "proto3";
package bool;
message Test {
bool value = 1;
}

View file

@ -0,0 +1,19 @@
import pytest
from tests.output_aristaproto.bool import Test
from tests.output_aristaproto_pydantic.bool import Test as TestPyd
def test_value():
message = Test()
assert not message.value, "Boolean is False by default"
def test_pydantic_no_value():
with pytest.raises(ValueError):
TestPyd()
def test_pydantic_value():
message = Test(value=False)
assert not message.value

View file

@ -0,0 +1,3 @@
{
"data": "SGVsbG8sIFdvcmxkIQ=="
}

View file

@ -0,0 +1,7 @@
syntax = "proto3";
package bytes;
message Test {
bytes data = 1;
}

View file

@ -0,0 +1,4 @@
{
"camelCase": 1,
"snakeCase": "ONE"
}

View file

@ -0,0 +1,20 @@
syntax = "proto3";
package casing;
enum my_enum {
ZERO = 0;
ONE = 1;
TWO = 2;
}
message Test {
int32 camelCase = 1;
my_enum snake_case = 2;
snake_case_message snake_case_message = 3;
int32 UPPERCASE = 4;
}
message snake_case_message {
}

View file

@ -0,0 +1,23 @@
import tests.output_aristaproto.casing as casing
from tests.output_aristaproto.casing import Test
def test_message_attributes():
message = Test()
assert hasattr(
message, "snake_case_message"
), "snake_case field name is same in python"
assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python"
assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python"
def test_message_casing():
assert hasattr(
casing, "SnakeCaseMessage"
), "snake_case Message name is converted to CamelCase in python"
def test_enum_casing():
assert hasattr(
casing, "MyEnum"
), "snake_case Enum name is converted to CamelCase in python"

View file

@ -0,0 +1,10 @@
syntax = "proto3";
package casing_inner_class;
message Test {
message inner_class {
sint32 old_exp = 1;
}
inner_class inner = 2;
}

View file

@ -0,0 +1,14 @@
import tests.output_aristaproto.casing_inner_class as casing_inner_class
def test_message_casing_inner_class_name():
assert hasattr(
casing_inner_class, "TestInnerClass"
), "Inline defined Message is correctly converted to CamelCase"
def test_message_casing_inner_class_attributes():
message = casing_inner_class.Test()
assert hasattr(
message.inner, "old_exp"
), "Inline defined Message attribute is snake_case"

View file

@ -0,0 +1,9 @@
syntax = "proto3";
package casing_message_field_uppercase;
message Test {
int32 UPPERCASE = 1;
int32 UPPERCASE_V2 = 2;
int32 UPPER_CAMEL_CASE = 3;
}

View file

@ -0,0 +1,14 @@
from tests.output_aristaproto.casing_message_field_uppercase import Test
def test_message_casing():
message = Test()
assert hasattr(
message, "uppercase"
), "UPPERCASE attribute is converted to 'uppercase' in python"
assert hasattr(
message, "uppercase_v2"
), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python"
assert hasattr(
message, "upper_camel_case"
), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python"

30
tests/inputs/config.py Normal file
View file

@ -0,0 +1,30 @@
# Test cases that are expected to fail, e.g. unimplemented features or bug-fixes.
# Remove from list when fixed.
xfail = {
"namespace_keywords", # 70
"googletypes_struct", # 9
"googletypes_value", # 9
"import_capitalized_package",
"example", # This is the example in the readme. Not a test.
}
services = {
"googletypes_request",
"googletypes_response",
"googletypes_response_embedded",
"service",
"service_separate_packages",
"import_service_input_message",
"googletypes_service_returns_empty",
"googletypes_service_returns_googletype",
"example_service",
"empty_service",
"service_uppercase",
}
# Indicate json sample messages to skip when testing that json (de)serialization
# is symmetrical becuase some cases legitimately are not symmetrical.
# Each key references the name of the test scenario and the values in the tuple
# Are the names of the json files.
non_symmetrical_json = {"empty_repeated": ("empty_repeated",)}

View file

@ -0,0 +1,6 @@
{
"message": {
"value": "hello"
},
"value": 10
}

View file

@ -0,0 +1,14 @@
syntax = "proto3";
package deprecated;
// Some documentation about the Test message.
message Test {
Message message = 1 [deprecated=true];
int32 value = 2;
}
message Message {
option deprecated = true;
string value = 1;
}

View file

@ -0,0 +1,3 @@
{
"count": -123.45
}

View file

@ -0,0 +1,3 @@
{
"count": 123.45
}

View file

@ -0,0 +1,7 @@
syntax = "proto3";
package double;
message Test {
double count = 1;
}

View file

@ -0,0 +1,3 @@
{
"msg": [{"values":[]}]
}

View file

@ -0,0 +1,11 @@
syntax = "proto3";
package empty_repeated;
message MessageA {
repeated float values = 1;
}
message Test {
repeated MessageA msg = 1;
}

View file

@ -0,0 +1,7 @@
/* Empty service without comments */
syntax = "proto3";
package empty_service;
service Test {
}

View file

@ -0,0 +1,20 @@
syntax = "proto3";
package entry;
// This is a minimal example of a repeated message field that caused issues when
// checking whether a message is a map.
//
// During the check wheter a field is a "map", the string "entry" is added to
// the field name, checked against the type name and then further checks are
// made against the nested type of a parent message. In this edge-case, the
// first check would pass even though it shouldn't and that would cause an
// error because the parent type does not have a "nested_type" attribute.
message Test {
repeated ExportEntry export = 1;
}
message ExportEntry {
string name = 1;
}

View file

@ -0,0 +1,9 @@
{
"choice": "FOUR",
"choices": [
"ZERO",
"ONE",
"THREE",
"FOUR"
]
}

View file

@ -0,0 +1,25 @@
syntax = "proto3";
package enum;
// Tests that enums are correctly serialized and that it correctly handles skipped and out-of-order enum values
message Test {
Choice choice = 1;
repeated Choice choices = 2;
}
enum Choice {
ZERO = 0;
ONE = 1;
// TWO = 2;
FOUR = 4;
THREE = 3;
}
// A "C" like enum with the enum name prefixed onto members, these should be stripped
enum ArithmeticOperator {
ARITHMETIC_OPERATOR_NONE = 0;
ARITHMETIC_OPERATOR_PLUS = 1;
ARITHMETIC_OPERATOR_MINUS = 2;
ARITHMETIC_OPERATOR_0_PREFIXED = 3;
}

View file

@ -0,0 +1,114 @@
from tests.output_aristaproto.enum import (
ArithmeticOperator,
Choice,
Test,
)
def test_enum_set_and_get():
assert Test(choice=Choice.ZERO).choice == Choice.ZERO
assert Test(choice=Choice.ONE).choice == Choice.ONE
assert Test(choice=Choice.THREE).choice == Choice.THREE
assert Test(choice=Choice.FOUR).choice == Choice.FOUR
def test_enum_set_with_int():
assert Test(choice=0).choice == Choice.ZERO
assert Test(choice=1).choice == Choice.ONE
assert Test(choice=3).choice == Choice.THREE
assert Test(choice=4).choice == Choice.FOUR
def test_enum_is_comparable_with_int():
assert Test(choice=Choice.ZERO).choice == 0
assert Test(choice=Choice.ONE).choice == 1
assert Test(choice=Choice.THREE).choice == 3
assert Test(choice=Choice.FOUR).choice == 4
def test_enum_to_dict():
assert (
"choice" not in Test(choice=Choice.ZERO).to_dict()
), "Default enum value is not serialized"
assert (
Test(choice=Choice.ZERO).to_dict(include_default_values=True)["choice"]
== "ZERO"
)
assert Test(choice=Choice.ONE).to_dict()["choice"] == "ONE"
assert Test(choice=Choice.THREE).to_dict()["choice"] == "THREE"
assert Test(choice=Choice.FOUR).to_dict()["choice"] == "FOUR"
def test_repeated_enum_is_comparable_with_int():
assert Test(choices=[Choice.ZERO]).choices == [0]
assert Test(choices=[Choice.ONE]).choices == [1]
assert Test(choices=[Choice.THREE]).choices == [3]
assert Test(choices=[Choice.FOUR]).choices == [4]
def test_repeated_enum_set_and_get():
assert Test(choices=[Choice.ZERO]).choices == [Choice.ZERO]
assert Test(choices=[Choice.ONE]).choices == [Choice.ONE]
assert Test(choices=[Choice.THREE]).choices == [Choice.THREE]
assert Test(choices=[Choice.FOUR]).choices == [Choice.FOUR]
def test_repeated_enum_to_dict():
assert Test(choices=[Choice.ZERO]).to_dict()["choices"] == ["ZERO"]
assert Test(choices=[Choice.ONE]).to_dict()["choices"] == ["ONE"]
assert Test(choices=[Choice.THREE]).to_dict()["choices"] == ["THREE"]
assert Test(choices=[Choice.FOUR]).to_dict()["choices"] == ["FOUR"]
all_enums_dict = Test(
choices=[Choice.ZERO, Choice.ONE, Choice.THREE, Choice.FOUR]
).to_dict()
assert (all_enums_dict["choices"]) == ["ZERO", "ONE", "THREE", "FOUR"]
def test_repeated_enum_with_single_value_to_dict():
assert Test(choices=Choice.ONE).to_dict()["choices"] == ["ONE"]
assert Test(choices=1).to_dict()["choices"] == ["ONE"]
def test_repeated_enum_with_non_list_iterables_to_dict():
assert Test(choices=(1, 3)).to_dict()["choices"] == ["ONE", "THREE"]
assert Test(choices=(1, 3)).to_dict()["choices"] == ["ONE", "THREE"]
assert Test(choices=(Choice.ONE, Choice.THREE)).to_dict()["choices"] == [
"ONE",
"THREE",
]
def enum_generator():
yield Choice.ONE
yield Choice.THREE
assert Test(choices=enum_generator()).to_dict()["choices"] == ["ONE", "THREE"]
def test_enum_mapped_on_parse():
# test default value
b = Test().parse(bytes(Test()))
assert b.choice.name == Choice.ZERO.name
assert b.choices == []
# test non default value
a = Test().parse(bytes(Test(choice=Choice.ONE)))
assert a.choice.name == Choice.ONE.name
assert b.choices == []
# test repeated
c = Test().parse(bytes(Test(choices=[Choice.THREE, Choice.FOUR])))
assert c.choices[0].name == Choice.THREE.name
assert c.choices[1].name == Choice.FOUR.name
# bonus: defaults after empty init are also mapped
assert Test().choice.name == Choice.ZERO.name
def test_renamed_enum_members():
assert set(ArithmeticOperator.__members__) == {
"NONE",
"PLUS",
"MINUS",
"_0_PREFIXED",
}

View file

@ -0,0 +1,911 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: kenton@google.com (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
//
// The messages in this file describe the definitions found in .proto files.
// A valid .proto file can be translated directly to a FileDescriptorProto
// without any other information (e.g. without reading its imports).
syntax = "proto2";
package example;
// package google.protobuf;
option go_package = "google.golang.org/protobuf/types/descriptorpb";
option java_package = "com.google.protobuf";
option java_outer_classname = "DescriptorProtos";
option csharp_namespace = "Google.Protobuf.Reflection";
option objc_class_prefix = "GPB";
option cc_enable_arenas = true;
// descriptor.proto must be optimized for speed because reflection-based
// algorithms don't work during bootstrapping.
option optimize_for = SPEED;
// The protocol compiler can output a FileDescriptorSet containing the .proto
// files it parses.
message FileDescriptorSet {
repeated FileDescriptorProto file = 1;
}
// Describes a complete .proto file.
message FileDescriptorProto {
optional string name = 1; // file name, relative to root of source tree
optional string package = 2; // e.g. "foo", "foo.bar", etc.
// Names of files imported by this file.
repeated string dependency = 3;
// Indexes of the public imported files in the dependency list above.
repeated int32 public_dependency = 10;
// Indexes of the weak imported files in the dependency list.
// For Google-internal migration only. Do not use.
repeated int32 weak_dependency = 11;
// All top-level definitions in this file.
repeated DescriptorProto message_type = 4;
repeated EnumDescriptorProto enum_type = 5;
repeated ServiceDescriptorProto service = 6;
repeated FieldDescriptorProto extension = 7;
optional FileOptions options = 8;
// This field contains optional information about the original source code.
// You may safely remove this entire field without harming runtime
// functionality of the descriptors -- the information is needed only by
// development tools.
optional SourceCodeInfo source_code_info = 9;
// The syntax of the proto file.
// The supported values are "proto2" and "proto3".
optional string syntax = 12;
}
// Describes a message type.
message DescriptorProto {
optional string name = 1;
repeated FieldDescriptorProto field = 2;
repeated FieldDescriptorProto extension = 6;
repeated DescriptorProto nested_type = 3;
repeated EnumDescriptorProto enum_type = 4;
message ExtensionRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
optional ExtensionRangeOptions options = 3;
}
repeated ExtensionRange extension_range = 5;
repeated OneofDescriptorProto oneof_decl = 8;
optional MessageOptions options = 7;
// Range of reserved tag numbers. Reserved tag numbers may not be used by
// fields or extension ranges in the same message. Reserved ranges may
// not overlap.
message ReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
}
repeated ReservedRange reserved_range = 9;
// Reserved field names, which may not be used by fields in the same message.
// A given name may only be reserved once.
repeated string reserved_name = 10;
}
message ExtensionRangeOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// Describes a field within a message.
message FieldDescriptorProto {
enum Type {
// 0 is reserved for errors.
// Order is weird for historical reasons.
TYPE_DOUBLE = 1;
TYPE_FLOAT = 2;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
// negative values are likely.
TYPE_INT64 = 3;
TYPE_UINT64 = 4;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
// negative values are likely.
TYPE_INT32 = 5;
TYPE_FIXED64 = 6;
TYPE_FIXED32 = 7;
TYPE_BOOL = 8;
TYPE_STRING = 9;
// Tag-delimited aggregate.
// Group type is deprecated and not supported in proto3. However, Proto3
// implementations should still be able to parse the group wire format and
// treat group fields as unknown fields.
TYPE_GROUP = 10;
TYPE_MESSAGE = 11; // Length-delimited aggregate.
// New in version 2.
TYPE_BYTES = 12;
TYPE_UINT32 = 13;
TYPE_ENUM = 14;
TYPE_SFIXED32 = 15;
TYPE_SFIXED64 = 16;
TYPE_SINT32 = 17; // Uses ZigZag encoding.
TYPE_SINT64 = 18; // Uses ZigZag encoding.
}
enum Label {
// 0 is reserved for errors
LABEL_OPTIONAL = 1;
LABEL_REQUIRED = 2;
LABEL_REPEATED = 3;
}
optional string name = 1;
optional int32 number = 3;
optional Label label = 4;
// If type_name is set, this need not be set. If both this and type_name
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
optional Type type = 5;
// For message and enum types, this is the name of the type. If the name
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
// rules are used to find the type (i.e. first the nested types within this
// message are searched, then within the parent, on up to the root
// namespace).
optional string type_name = 6;
// For extensions, this is the name of the type being extended. It is
// resolved in the same manner as type_name.
optional string extendee = 2;
// For numeric types, contains the original text representation of the value.
// For booleans, "true" or "false".
// For strings, contains the default text contents (not escaped in any way).
// For bytes, contains the C escaped value. All bytes >= 128 are escaped.
// TODO(kenton): Base-64 encode?
optional string default_value = 7;
// If set, gives the index of a oneof in the containing type's oneof_decl
// list. This field is a member of that oneof.
optional int32 oneof_index = 9;
// JSON name of this field. The value is set by protocol compiler. If the
// user has set a "json_name" option on this field, that option's value
// will be used. Otherwise, it's deduced from the field's name by converting
// it to camelCase.
optional string json_name = 10;
optional FieldOptions options = 8;
// If true, this is a proto3 "optional". When a proto3 field is optional, it
// tracks presence regardless of field type.
//
// When proto3_optional is true, this field must be belong to a oneof to
// signal to old proto3 clients that presence is tracked for this field. This
// oneof is known as a "synthetic" oneof, and this field must be its sole
// member (each proto3 optional field gets its own synthetic oneof). Synthetic
// oneofs exist in the descriptor only, and do not generate any API. Synthetic
// oneofs must be ordered after all "real" oneofs.
//
// For message fields, proto3_optional doesn't create any semantic change,
// since non-repeated message fields always track presence. However it still
// indicates the semantic detail of whether the user wrote "optional" or not.
// This can be useful for round-tripping the .proto file. For consistency we
// give message fields a synthetic oneof also, even though it is not required
// to track presence. This is especially important because the parser can't
// tell if a field is a message or an enum, so it must always create a
// synthetic oneof.
//
// Proto2 optional fields do not set this flag, because they already indicate
// optional with `LABEL_OPTIONAL`.
optional bool proto3_optional = 17;
}
// Describes a oneof.
message OneofDescriptorProto {
optional string name = 1;
optional OneofOptions options = 2;
}
// Describes an enum type.
message EnumDescriptorProto {
optional string name = 1;
repeated EnumValueDescriptorProto value = 2;
optional EnumOptions options = 3;
// Range of reserved numeric values. Reserved values may not be used by
// entries in the same enum. Reserved ranges may not overlap.
//
// Note that this is distinct from DescriptorProto.ReservedRange in that it
// is inclusive such that it can appropriately represent the entire int32
// domain.
message EnumReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Inclusive.
}
// Range of reserved numeric values. Reserved numeric values may not be used
// by enum values in the same enum declaration. Reserved ranges may not
// overlap.
repeated EnumReservedRange reserved_range = 4;
// Reserved enum value names, which may not be reused. A given name may only
// be reserved once.
repeated string reserved_name = 5;
}
// Describes a value within an enum.
message EnumValueDescriptorProto {
optional string name = 1;
optional int32 number = 2;
optional EnumValueOptions options = 3;
}
// Describes a service.
message ServiceDescriptorProto {
optional string name = 1;
repeated MethodDescriptorProto method = 2;
optional ServiceOptions options = 3;
}
// Describes a method of a service.
message MethodDescriptorProto {
optional string name = 1;
// Input and output type names. These are resolved in the same way as
// FieldDescriptorProto.type_name, but must refer to a message type.
optional string input_type = 2;
optional string output_type = 3;
optional MethodOptions options = 4;
// Identifies if client streams multiple client messages
optional bool client_streaming = 5 [default = false];
// Identifies if server streams multiple server messages
optional bool server_streaming = 6 [default = false];
}
// ===================================================================
// Options
// Each of the definitions above may have "options" attached. These are
// just annotations which may cause code to be generated slightly differently
// or may contain hints for code that manipulates protocol messages.
//
// Clients may define custom options as extensions of the *Options messages.
// These extensions may not yet be known at parsing time, so the parser cannot
// store the values in them. Instead it stores them in a field in the *Options
// message called uninterpreted_option. This field must have the same name
// across all *Options messages. We then use this field to populate the
// extensions when we build a descriptor, at which point all protos have been
// parsed and so all extensions are known.
//
// Extension numbers for custom options may be chosen as follows:
// * For options which will only be used within a single application or
// organization, or for experimental options, use field numbers 50000
// through 99999. It is up to you to ensure that you do not use the
// same number for multiple options.
// * For options which will be published and used publicly by multiple
// independent entities, e-mail protobuf-global-extension-registry@google.com
// to reserve extension numbers. Simply provide your project name (e.g.
// Objective-C plugin) and your project website (if available) -- there's no
// need to explain how you intend to use them. Usually you only need one
// extension number. You can declare multiple options with only one extension
// number by putting them in a sub-message. See the Custom Options section of
// the docs for examples:
// https://developers.google.com/protocol-buffers/docs/proto#options
// If this turns out to be popular, a web service will be set up
// to automatically assign option numbers.
message FileOptions {
// Sets the Java package where classes generated from this .proto will be
// placed. By default, the proto package is used, but this is often
// inappropriate because proto packages do not normally start with backwards
// domain names.
optional string java_package = 1;
// If set, all the classes from the .proto file are wrapped in a single
// outer class with the given name. This applies to both Proto1
// (equivalent to the old "--one_java_file" option) and Proto2 (where
// a .proto always translates to a single class, but you may want to
// explicitly choose the class name).
optional string java_outer_classname = 8;
// If set true, then the Java code generator will generate a separate .java
// file for each top-level message, enum, and service defined in the .proto
// file. Thus, these types will *not* be nested inside the outer class
// named by java_outer_classname. However, the outer class will still be
// generated to contain the file's getDescriptor() method as well as any
// top-level extensions defined in the file.
optional bool java_multiple_files = 10 [default = false];
// This option does nothing.
optional bool java_generate_equals_and_hash = 20 [deprecated=true];
// If set true, then the Java2 code generator will generate code that
// throws an exception whenever an attempt is made to assign a non-UTF-8
// byte sequence to a string field.
// Message reflection will do the same.
// However, an extension field still accepts non-UTF-8 byte sequences.
// This option has no effect on when used with the lite runtime.
optional bool java_string_check_utf8 = 27 [default = false];
// Generated classes can be optimized for speed or code size.
enum OptimizeMode {
SPEED = 1; // Generate complete code for parsing, serialization,
// etc.
CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
}
optional OptimizeMode optimize_for = 9 [default = SPEED];
// Sets the Go package where structs generated from this .proto will be
// placed. If omitted, the Go package will be derived from the following:
// - The basename of the package import path, if provided.
// - Otherwise, the package statement in the .proto file, if present.
// - Otherwise, the basename of the .proto file, without extension.
optional string go_package = 11;
// Should generic services be generated in each language? "Generic" services
// are not specific to any particular RPC system. They are generated by the
// main code generators in each language (without additional plugins).
// Generic services were the only kind of service generation supported by
// early versions of google.protobuf.
//
// Generic services are now considered deprecated in favor of using plugins
// that generate code specific to your particular RPC system. Therefore,
// these default to false. Old code which depends on generic services should
// explicitly set them to true.
optional bool cc_generic_services = 16 [default = false];
optional bool java_generic_services = 17 [default = false];
optional bool py_generic_services = 18 [default = false];
optional bool php_generic_services = 42 [default = false];
// Is this file deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for everything in the file, or it will be completely ignored; in the very
// least, this is a formalization for deprecating files.
optional bool deprecated = 23 [default = false];
// Enables the use of arenas for the proto messages in this file. This applies
// only to generated classes for C++.
optional bool cc_enable_arenas = 31 [default = true];
// Sets the objective c class prefix which is prepended to all objective c
// generated classes from this .proto. There is no default.
optional string objc_class_prefix = 36;
// Namespace for generated classes; defaults to the package.
optional string csharp_namespace = 37;
// By default Swift generators will take the proto package and CamelCase it
// replacing '.' with underscore and use that to prefix the types/symbols
// defined. When this options is provided, they will use this value instead
// to prefix the types/symbols defined.
optional string swift_prefix = 39;
// Sets the php class prefix which is prepended to all php generated classes
// from this .proto. Default is empty.
optional string php_class_prefix = 40;
// Use this option to change the namespace of php generated classes. Default
// is empty. When this option is empty, the package name will be used for
// determining the namespace.
optional string php_namespace = 41;
// Use this option to change the namespace of php generated metadata classes.
// Default is empty. When this option is empty, the proto file name will be
// used for determining the namespace.
optional string php_metadata_namespace = 44;
// Use this option to change the package of ruby generated classes. Default
// is empty. When this option is not set, the package name will be used for
// determining the ruby package.
optional string ruby_package = 45;
// The parser stores options it doesn't recognize here.
// See the documentation for the "Options" section above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message.
// See the documentation for the "Options" section above.
extensions 1000 to max;
reserved 38;
}
message MessageOptions {
// Set true to use the old proto1 MessageSet wire format for extensions.
// This is provided for backwards-compatibility with the MessageSet wire
// format. You should not use this for any other reason: It's less
// efficient, has fewer features, and is more complicated.
//
// The message must be defined exactly as follows:
// message Foo {
// option message_set_wire_format = true;
// extensions 4 to max;
// }
// Note that the message cannot have any defined fields; MessageSets only
// have extensions.
//
// All extensions of your type must be singular messages; e.g. they cannot
// be int32s, enums, or repeated messages.
//
// Because this is an option, the above two restrictions are not enforced by
// the protocol compiler.
optional bool message_set_wire_format = 1 [default = false];
// Disables the generation of the standard "descriptor()" accessor, which can
// conflict with a field of the same name. This is meant to make migration
// from proto1 easier; new code should avoid fields named "descriptor".
optional bool no_standard_descriptor_accessor = 2 [default = false];
// Is this message deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the message, or it will be completely ignored; in the very least,
// this is a formalization for deprecating messages.
optional bool deprecated = 3 [default = false];
// Whether the message is an automatically generated map entry type for the
// maps field.
//
// For maps fields:
// map<KeyType, ValueType> map_field = 1;
// The parsed descriptor looks like:
// message MapFieldEntry {
// option map_entry = true;
// optional KeyType key = 1;
// optional ValueType value = 2;
// }
// repeated MapFieldEntry map_field = 1;
//
// Implementations may choose not to generate the map_entry=true message, but
// use a native map in the target language to hold the keys and values.
// The reflection APIs in such implementations still need to work as
// if the field is a repeated message field.
//
// NOTE: Do not set the option in .proto files. Always use the maps syntax
// instead. The option should only be implicitly set by the proto compiler
// parser.
optional bool map_entry = 7;
reserved 8; // javalite_serializable
reserved 9; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message FieldOptions {
// The ctype option instructs the C++ code generator to use a different
// representation of the field than it normally would. See the specific
// options below. This option is not yet implemented in the open source
// release -- sorry, we'll try to include it in a future version!
optional CType ctype = 1 [default = STRING];
enum CType {
// Default mode.
STRING = 0;
CORD = 1;
STRING_PIECE = 2;
}
// The packed option can be enabled for repeated primitive fields to enable
// a more efficient representation on the wire. Rather than repeatedly
// writing the tag and type for each element, the entire array is encoded as
// a single length-delimited blob. In proto3, only explicit setting it to
// false will avoid using packed encoding.
optional bool packed = 2;
// The jstype option determines the JavaScript type used for values of the
// field. The option is permitted only for 64 bit integral and fixed types
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
// is represented as JavaScript string, which avoids loss of precision that
// can happen when a large value is converted to a floating point JavaScript.
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
// use the JavaScript "number" type. The behavior of the default option
// JS_NORMAL is implementation dependent.
//
// This option is an enum to permit additional types to be added, e.g.
// goog.math.Integer.
optional JSType jstype = 6 [default = JS_NORMAL];
enum JSType {
// Use the default type.
JS_NORMAL = 0;
// Use JavaScript strings.
JS_STRING = 1;
// Use JavaScript numbers.
JS_NUMBER = 2;
}
// Should this field be parsed lazily? Lazy applies only to message-type
// fields. It means that when the outer message is initially parsed, the
// inner message's contents will not be parsed but instead stored in encoded
// form. The inner message will actually be parsed when it is first accessed.
//
// This is only a hint. Implementations are free to choose whether to use
// eager or lazy parsing regardless of the value of this option. However,
// setting this option true suggests that the protocol author believes that
// using lazy parsing on this field is worth the additional bookkeeping
// overhead typically needed to implement it.
//
// This option does not affect the public interface of any generated code;
// all method signatures remain the same. Furthermore, thread-safety of the
// interface is not affected by this option; const methods remain safe to
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outer message
// may return true even if the inner message has missing required fields.
// This is necessary because otherwise the inner message would have to be
// parsed in order to perform the check, defeating the purpose of lazy
// parsing. An implementation which chooses not to check required fields
// must be consistent about it. That is, for any particular sub-message, the
// implementation must either *always* check its required fields, or *never*
// check its required fields, regardless of whether or not the message has
// been parsed.
optional bool lazy = 5 [default = false];
// Is this field deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for accessors, or it will be completely ignored; in the very least, this
// is a formalization for deprecating fields.
optional bool deprecated = 3 [default = false];
// For Google-internal migration only. Do not use.
optional bool weak = 10 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
reserved 4; // removed jtype
}
message OneofOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumOptions {
// Set this option to true to allow mapping different tag names to the same
// value.
optional bool allow_alias = 2;
// Is this enum deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum, or it will be completely ignored; in the very least, this
// is a formalization for deprecating enums.
optional bool deprecated = 3 [default = false];
reserved 5; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumValueOptions {
// Is this enum value deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum value, or it will be completely ignored; in the very least,
// this is a formalization for deprecating enum values.
optional bool deprecated = 1 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message ServiceOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this service deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the service, or it will be completely ignored; in the very least,
// this is a formalization for deprecating services.
optional bool deprecated = 33 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message MethodOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this method deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the method, or it will be completely ignored; in the very least,
// this is a formalization for deprecating methods.
optional bool deprecated = 33 [default = false];
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
// or neither? HTTP based RPC implementation may choose GET verb for safe
// methods, and PUT verb for idempotent methods instead of the default POST.
enum IdempotencyLevel {
IDEMPOTENCY_UNKNOWN = 0;
NO_SIDE_EFFECTS = 1; // implies idempotent
IDEMPOTENT = 2; // idempotent, but may have side effects
}
optional IdempotencyLevel idempotency_level = 34
[default = IDEMPOTENCY_UNKNOWN];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// A message representing a option the parser does not recognize. This only
// appears in options protos created by the compiler::Parser class.
// DescriptorPool resolves these when building Descriptor objects. Therefore,
// options protos in descriptor objects (e.g. returned by Descriptor::options(),
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
// in them.
message UninterpretedOption {
// The name of the uninterpreted option. Each string represents a segment in
// a dot-separated name. is_extension is true iff a segment represents an
// extension (denoted with parentheses in options specs in .proto files).
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
// "foo.(bar.baz).qux".
message NamePart {
required string name_part = 1;
required bool is_extension = 2;
}
repeated NamePart name = 2;
// The value of the uninterpreted option, in whatever type the tokenizer
// identified it as during parsing. Exactly one of these should be set.
optional string identifier_value = 3;
optional uint64 positive_int_value = 4;
optional int64 negative_int_value = 5;
optional double double_value = 6;
optional bytes string_value = 7;
optional string aggregate_value = 8;
}
// ===================================================================
// Optional source code info
// Encapsulates information about the original source file from which a
// FileDescriptorProto was generated.
message SourceCodeInfo {
// A Location identifies a piece of source code in a .proto file which
// corresponds to a particular definition. This information is intended
// to be useful to IDEs, code indexers, documentation generators, and similar
// tools.
//
// For example, say we have a file like:
// message Foo {
// optional string foo = 1;
// }
// Let's look at just the field definition:
// optional string foo = 1;
// ^ ^^ ^^ ^ ^^^
// a bc de f ghi
// We have the following locations:
// span path represents
// [a,i) [ 4, 0, 2, 0 ] The whole field definition.
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
//
// Notes:
// - A location may refer to a repeated field itself (i.e. not to any
// particular index within it). This is used whenever a set of elements are
// logically enclosed in a single code segment. For example, an entire
// extend block (possibly containing multiple extension definitions) will
// have an outer location whose path refers to the "extensions" repeated
// field without an index.
// - Multiple locations may have the same path. This happens when a single
// logical declaration is spread out across multiple places. The most
// obvious example is the "extend" block again -- there may be multiple
// extend blocks in the same scope, each of which will have the same path.
// - A location's span is not always a subset of its parent's span. For
// example, the "extendee" of an extension declaration appears at the
// beginning of the "extend" block and is shared by all extensions within
// the block.
// - Just because a location's span is a subset of some other location's span
// does not mean that it is a descendant. For example, a "group" defines
// both a type and a field in a single declaration. Thus, the locations
// corresponding to the type and field and their components will overlap.
// - Code which tries to interpret locations should probably be designed to
// ignore those that it doesn't understand, as more types of locations could
// be recorded in the future.
repeated Location location = 1;
message Location {
// Identifies which part of the FileDescriptorProto was defined at this
// location.
//
// Each element is a field number or an index. They form a path from
// the root FileDescriptorProto to the place where the definition. For
// example, this path:
// [ 4, 3, 2, 7, 1 ]
// refers to:
// file.message_type(3) // 4, 3
// .field(7) // 2, 7
// .name() // 1
// This is because FileDescriptorProto.message_type has field number 4:
// repeated DescriptorProto message_type = 4;
// and DescriptorProto.field has field number 2:
// repeated FieldDescriptorProto field = 2;
// and FieldDescriptorProto.name has field number 1:
// optional string name = 1;
//
// Thus, the above path gives the location of a field name. If we removed
// the last element:
// [ 4, 3, 2, 7 ]
// this path refers to the whole field declaration (from the beginning
// of the label to the terminating semicolon).
repeated int32 path = 1 [packed = true];
// Always has exactly three or four elements: start line, start column,
// end line (optional, otherwise assumed same as start line), end column.
// These are packed into a single field for efficiency. Note that line
// and column numbers are zero-based -- typically you will want to add
// 1 to each before displaying to a user.
repeated int32 span = 2 [packed = true];
// If this SourceCodeInfo represents a complete declaration, these are any
// comments appearing before and after the declaration which appear to be
// attached to the declaration.
//
// A series of line comments appearing on consecutive lines, with no other
// tokens appearing on those lines, will be treated as a single comment.
//
// leading_detached_comments will keep paragraphs of comments that appear
// before (but not connected to) the current element. Each paragraph,
// separated by empty lines, will be one comment element in the repeated
// field.
//
// Only the comment content is provided; comment markers (e.g. //) are
// stripped out. For block comments, leading whitespace and an asterisk
// will be stripped from the beginning of each line other than the first.
// Newlines are included in the output.
//
// Examples:
//
// optional int32 foo = 1; // Comment attached to foo.
// // Comment attached to bar.
// optional int32 bar = 2;
//
// optional string baz = 3;
// // Comment attached to baz.
// // Another line attached to baz.
//
// // Comment attached to qux.
// //
// // Another line attached to qux.
// optional double qux = 4;
//
// // Detached comment for corge. This is not leading or trailing comments
// // to qux or corge because there are blank lines separating it from
// // both.
//
// // Detached comment for corge paragraph 2.
//
// optional string corge = 5;
// /* Block comment attached
// * to corge. Leading asterisks
// * will be removed. */
// /* Block comment attached to
// * grault. */
// optional int32 grault = 6;
//
// // ignored detached comments.
optional string leading_comments = 3;
optional string trailing_comments = 4;
repeated string leading_detached_comments = 6;
}
}
// Describes the relationship between generated code and its original source
// file. A GeneratedCodeInfo message is associated with only one generated
// source file, but may contain references to different source .proto files.
message GeneratedCodeInfo {
// An Annotation connects some span of text in generated code to an element
// of its generating .proto file.
repeated Annotation annotation = 1;
message Annotation {
// Identifies the element in the original source .proto file. This field
// is formatted the same as SourceCodeInfo.Location.path.
repeated int32 path = 1 [packed = true];
// Identifies the filesystem path to the original source .proto.
optional string source_file = 2;
// Identifies the starting offset in bytes in the generated code
// that relates to the identified object.
optional int32 begin = 3;
// Identifies the ending offset in bytes in the generated code that
// relates to the identified offset. The end offset should be one past
// the last relevant byte (so the length of the text = end - begin).
optional int32 end = 4;
}
}

View file

@ -0,0 +1,20 @@
syntax = "proto3";
package example_service;
service Test {
rpc ExampleUnaryUnary(ExampleRequest) returns (ExampleResponse);
rpc ExampleUnaryStream(ExampleRequest) returns (stream ExampleResponse);
rpc ExampleStreamUnary(stream ExampleRequest) returns (ExampleResponse);
rpc ExampleStreamStream(stream ExampleRequest) returns (stream ExampleResponse);
}
message ExampleRequest {
string example_string = 1;
int64 example_integer = 2;
}
message ExampleResponse {
string example_string = 1;
int64 example_integer = 2;
}

View file

@ -0,0 +1,86 @@
from typing import (
AsyncIterable,
AsyncIterator,
)
import pytest
from grpclib.testing import ChannelFor
from tests.output_aristaproto.example_service import (
ExampleRequest,
ExampleResponse,
TestBase,
TestStub,
)
class ExampleService(TestBase):
async def example_unary_unary(
self, example_request: ExampleRequest
) -> "ExampleResponse":
return ExampleResponse(
example_string=example_request.example_string,
example_integer=example_request.example_integer,
)
async def example_unary_stream(
self, example_request: ExampleRequest
) -> AsyncIterator["ExampleResponse"]:
response = ExampleResponse(
example_string=example_request.example_string,
example_integer=example_request.example_integer,
)
yield response
yield response
yield response
async def example_stream_unary(
self, example_request_iterator: AsyncIterator["ExampleRequest"]
) -> "ExampleResponse":
async for example_request in example_request_iterator:
return ExampleResponse(
example_string=example_request.example_string,
example_integer=example_request.example_integer,
)
async def example_stream_stream(
self, example_request_iterator: AsyncIterator["ExampleRequest"]
) -> AsyncIterator["ExampleResponse"]:
async for example_request in example_request_iterator:
yield ExampleResponse(
example_string=example_request.example_string,
example_integer=example_request.example_integer,
)
@pytest.mark.asyncio
async def test_calls_with_different_cardinalities():
example_request = ExampleRequest("test string", 42)
async with ChannelFor([ExampleService()]) as channel:
stub = TestStub(channel)
# unary unary
response = await stub.example_unary_unary(example_request)
assert response.example_string == example_request.example_string
assert response.example_integer == example_request.example_integer
# unary stream
async for response in stub.example_unary_stream(example_request):
assert response.example_string == example_request.example_string
assert response.example_integer == example_request.example_integer
# stream unary
async def request_iterator():
yield example_request
yield example_request
yield example_request
response = await stub.example_stream_unary(request_iterator())
assert response.example_string == example_request.example_string
assert response.example_integer == example_request.example_integer
# stream stream
async for response in stub.example_stream_stream(request_iterator()):
assert response.example_string == example_request.example_string
assert response.example_integer == example_request.example_integer

View file

@ -0,0 +1,7 @@
{
"int": 26,
"float": 26.0,
"str": "value-for-str",
"bytes": "001a",
"bool": true
}

View file

@ -0,0 +1,13 @@
syntax = "proto3";
package field_name_identical_to_type;
// Tests that messages may contain fields with names that are identical to their python types (PR #294)
message Test {
int32 int = 1;
float float = 2;
string str = 3;
bytes bytes = 4;
bool bool = 5;
}

View file

@ -0,0 +1,6 @@
{
"foo": 4294967295,
"bar": -2147483648,
"baz": "18446744073709551615",
"qux": "-9223372036854775808"
}

View file

@ -0,0 +1,10 @@
syntax = "proto3";
package fixed;
message Test {
fixed32 foo = 1;
sfixed32 bar = 2;
fixed64 baz = 3;
sfixed64 qux = 4;
}

View file

@ -0,0 +1,9 @@
{
"positive": "Infinity",
"negative": "-Infinity",
"nan": "NaN",
"three": 3.0,
"threePointOneFour": 3.14,
"negThree": -3.0,
"negThreePointOneFour": -3.14
}

View file

@ -0,0 +1,14 @@
syntax = "proto3";
package float;
// Some documentation about the Test message.
message Test {
double positive = 1;
double negative = 2;
double nan = 3;
double three = 4;
double three_point_one_four = 5;
double neg_three = 6;
double neg_three_point_one_four = 7;
}

View file

@ -0,0 +1,22 @@
syntax = "proto3";
import "google/protobuf/timestamp.proto";
package google_impl_behavior_equivalence;
message Foo { int64 bar = 1; }
message Test {
oneof group {
string string = 1;
int64 integer = 2;
Foo foo = 3;
}
}
message Spam {
google.protobuf.Timestamp ts = 1;
}
message Request { Empty foo = 1; }
message Empty {}

View file

@ -0,0 +1,93 @@
from datetime import (
datetime,
timezone,
)
import pytest
from google.protobuf import json_format
from google.protobuf.timestamp_pb2 import Timestamp
import aristaproto
from tests.output_aristaproto.google_impl_behavior_equivalence import (
Empty,
Foo,
Request,
Spam,
Test,
)
from tests.output_reference.google_impl_behavior_equivalence.google_impl_behavior_equivalence_pb2 import (
Empty as ReferenceEmpty,
Foo as ReferenceFoo,
Request as ReferenceRequest,
Spam as ReferenceSpam,
Test as ReferenceTest,
)
def test_oneof_serializes_similar_to_google_oneof():
tests = [
(Test(string="abc"), ReferenceTest(string="abc")),
(Test(integer=2), ReferenceTest(integer=2)),
(Test(foo=Foo(bar=1)), ReferenceTest(foo=ReferenceFoo(bar=1))),
# Default values should also behave the same within oneofs
(Test(string=""), ReferenceTest(string="")),
(Test(integer=0), ReferenceTest(integer=0)),
(Test(foo=Foo(bar=0)), ReferenceTest(foo=ReferenceFoo(bar=0))),
]
for message, message_reference in tests:
# NOTE: As of July 2020, MessageToJson inserts newlines in the output string so,
# just compare dicts
assert message.to_dict() == json_format.MessageToDict(message_reference)
def test_bytes_are_the_same_for_oneof():
message = Test(string="")
message_reference = ReferenceTest(string="")
message_bytes = bytes(message)
message_reference_bytes = message_reference.SerializeToString()
assert message_bytes == message_reference_bytes
message2 = Test().parse(message_reference_bytes)
message_reference2 = ReferenceTest()
message_reference2.ParseFromString(message_reference_bytes)
assert message == message2
assert message_reference == message_reference2
# None of these fields were explicitly set BUT they should not actually be null
# themselves
assert not hasattr(message, "foo")
assert object.__getattribute__(message, "foo") == aristaproto.PLACEHOLDER
assert not hasattr(message2, "foo")
assert object.__getattribute__(message2, "foo") == aristaproto.PLACEHOLDER
assert isinstance(message_reference.foo, ReferenceFoo)
assert isinstance(message_reference2.foo, ReferenceFoo)
@pytest.mark.parametrize("dt", (datetime.min.replace(tzinfo=timezone.utc),))
def test_datetime_clamping(dt): # see #407
ts = Timestamp()
ts.FromDatetime(dt)
assert bytes(Spam(dt)) == ReferenceSpam(ts=ts).SerializeToString()
message_bytes = bytes(Spam(dt))
assert (
Spam().parse(message_bytes).ts.timestamp()
== ReferenceSpam.FromString(message_bytes).ts.seconds
)
def test_empty_message_field():
message = Request()
reference_message = ReferenceRequest()
message.foo = Empty()
reference_message.foo.CopyFrom(ReferenceEmpty())
assert aristaproto.serialized_on_wire(message.foo)
assert reference_message.HasField("foo")
assert bytes(message) == reference_message.SerializeToString()

View file

@ -0,0 +1 @@
{}

View file

@ -0,0 +1,7 @@
{
"maybe": false,
"ts": "1972-01-01T10:00:20.021Z",
"duration": "1.200s",
"important": 10,
"empty": {}
}

View file

@ -0,0 +1,16 @@
syntax = "proto3";
package googletypes;
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
import "google/protobuf/empty.proto";
message Test {
google.protobuf.BoolValue maybe = 1;
google.protobuf.Timestamp ts = 2;
google.protobuf.Duration duration = 3;
google.protobuf.Int32Value important = 4;
google.protobuf.Empty empty = 5;
}

View file

@ -0,0 +1,29 @@
syntax = "proto3";
package googletypes_request;
import "google/protobuf/duration.proto";
import "google/protobuf/empty.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
// Tests that google types can be used as params
service Test {
rpc SendDouble (google.protobuf.DoubleValue) returns (Input);
rpc SendFloat (google.protobuf.FloatValue) returns (Input);
rpc SendInt64 (google.protobuf.Int64Value) returns (Input);
rpc SendUInt64 (google.protobuf.UInt64Value) returns (Input);
rpc SendInt32 (google.protobuf.Int32Value) returns (Input);
rpc SendUInt32 (google.protobuf.UInt32Value) returns (Input);
rpc SendBool (google.protobuf.BoolValue) returns (Input);
rpc SendString (google.protobuf.StringValue) returns (Input);
rpc SendBytes (google.protobuf.BytesValue) returns (Input);
rpc SendDatetime (google.protobuf.Timestamp) returns (Input);
rpc SendTimedelta (google.protobuf.Duration) returns (Input);
rpc SendEmpty (google.protobuf.Empty) returns (Input);
}
message Input {
}

View file

@ -0,0 +1,47 @@
from datetime import (
datetime,
timedelta,
)
from typing import (
Any,
Callable,
)
import pytest
import aristaproto.lib.google.protobuf as protobuf
from tests.mocks import MockChannel
from tests.output_aristaproto.googletypes_request import (
Input,
TestStub,
)
test_cases = [
(TestStub.send_double, protobuf.DoubleValue, 2.5),
(TestStub.send_float, protobuf.FloatValue, 2.5),
(TestStub.send_int64, protobuf.Int64Value, -64),
(TestStub.send_u_int64, protobuf.UInt64Value, 64),
(TestStub.send_int32, protobuf.Int32Value, -32),
(TestStub.send_u_int32, protobuf.UInt32Value, 32),
(TestStub.send_bool, protobuf.BoolValue, True),
(TestStub.send_string, protobuf.StringValue, "string"),
(TestStub.send_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]),
(TestStub.send_datetime, protobuf.Timestamp, datetime(2038, 1, 19, 3, 14, 8)),
(TestStub.send_timedelta, protobuf.Duration, timedelta(seconds=123456)),
]
@pytest.mark.asyncio
@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases)
async def test_channel_receives_wrapped_type(
service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value
):
wrapped_value = wrapper_class()
wrapped_value.value = value
channel = MockChannel(responses=[Input()])
service = TestStub(channel)
await service_method(service, wrapped_value)
assert channel.requests[0]["request"] == type(wrapped_value)

View file

@ -0,0 +1,23 @@
syntax = "proto3";
package googletypes_response;
import "google/protobuf/wrappers.proto";
// Tests that wrapped values can be used directly as return values
service Test {
rpc GetDouble (Input) returns (google.protobuf.DoubleValue);
rpc GetFloat (Input) returns (google.protobuf.FloatValue);
rpc GetInt64 (Input) returns (google.protobuf.Int64Value);
rpc GetUInt64 (Input) returns (google.protobuf.UInt64Value);
rpc GetInt32 (Input) returns (google.protobuf.Int32Value);
rpc GetUInt32 (Input) returns (google.protobuf.UInt32Value);
rpc GetBool (Input) returns (google.protobuf.BoolValue);
rpc GetString (Input) returns (google.protobuf.StringValue);
rpc GetBytes (Input) returns (google.protobuf.BytesValue);
}
message Input {
}

View file

@ -0,0 +1,64 @@
from typing import (
Any,
Callable,
Optional,
)
import pytest
import aristaproto.lib.google.protobuf as protobuf
from tests.mocks import MockChannel
from tests.output_aristaproto.googletypes_response import (
Input,
TestStub,
)
test_cases = [
(TestStub.get_double, protobuf.DoubleValue, 2.5),
(TestStub.get_float, protobuf.FloatValue, 2.5),
(TestStub.get_int64, protobuf.Int64Value, -64),
(TestStub.get_u_int64, protobuf.UInt64Value, 64),
(TestStub.get_int32, protobuf.Int32Value, -32),
(TestStub.get_u_int32, protobuf.UInt32Value, 32),
(TestStub.get_bool, protobuf.BoolValue, True),
(TestStub.get_string, protobuf.StringValue, "string"),
(TestStub.get_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]),
]
@pytest.mark.asyncio
@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases)
async def test_channel_receives_wrapped_type(
service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value
):
wrapped_value = wrapper_class()
wrapped_value.value = value
channel = MockChannel(responses=[wrapped_value])
service = TestStub(channel)
method_param = Input()
await service_method(service, method_param)
assert channel.requests[0]["response_type"] != Optional[type(value)]
assert channel.requests[0]["response_type"] == type(wrapped_value)
@pytest.mark.asyncio
@pytest.mark.xfail
@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases)
async def test_service_unwraps_response(
service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value
):
"""
grpclib does not unwrap wrapper values returned by services
"""
wrapped_value = wrapper_class()
wrapped_value.value = value
service = TestStub(MockChannel(responses=[wrapped_value]))
method_param = Input()
response_value = await service_method(service, method_param)
assert response_value == value
assert type(response_value) == type(value)

View file

@ -0,0 +1,26 @@
syntax = "proto3";
package googletypes_response_embedded;
import "google/protobuf/wrappers.proto";
// Tests that wrapped values are supported as part of output message
service Test {
rpc getOutput (Input) returns (Output);
}
message Input {
}
message Output {
google.protobuf.DoubleValue double_value = 1;
google.protobuf.FloatValue float_value = 2;
google.protobuf.Int64Value int64_value = 3;
google.protobuf.UInt64Value uint64_value = 4;
google.protobuf.Int32Value int32_value = 5;
google.protobuf.UInt32Value uint32_value = 6;
google.protobuf.BoolValue bool_value = 7;
google.protobuf.StringValue string_value = 8;
google.protobuf.BytesValue bytes_value = 9;
}

View file

@ -0,0 +1,40 @@
import pytest
from tests.mocks import MockChannel
from tests.output_aristaproto.googletypes_response_embedded import (
Input,
Output,
TestStub,
)
@pytest.mark.asyncio
async def test_service_passes_through_unwrapped_values_embedded_in_response():
"""
We do not not need to implement value unwrapping for embedded well-known types,
as this is already handled by grpclib. This test merely shows that this is the case.
"""
output = Output(
double_value=10.0,
float_value=12.0,
int64_value=-13,
uint64_value=14,
int32_value=-15,
uint32_value=16,
bool_value=True,
string_value="string",
bytes_value=bytes(0xFF)[0:4],
)
service = TestStub(MockChannel(responses=[output]))
response = await service.get_output(Input())
assert response.double_value == 10.0
assert response.float_value == 12.0
assert response.int64_value == -13
assert response.uint64_value == 14
assert response.int32_value == -15
assert response.uint32_value == 16
assert response.bool_value
assert response.string_value == "string"
assert response.bytes_value == bytes(0xFF)[0:4]

View file

@ -0,0 +1,13 @@
syntax = "proto3";
package googletypes_service_returns_empty;
import "google/protobuf/empty.proto";
service Test {
rpc Send (RequestMessage) returns (google.protobuf.Empty) {
}
}
message RequestMessage {
}

Some files were not shown because too many files have changed in this diff Show more