Adding upstream version 1.2+20240521.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
6b2864e4b9
commit
8512f66c5a
229 changed files with 19561 additions and 0 deletions
91
tests/README.md
Normal file
91
tests/README.md
Normal file
|
@ -0,0 +1,91 @@
|
|||
# Standard Tests Development Guide
|
||||
|
||||
Standard test cases are found in [aristaproto/tests/inputs](inputs), where each subdirectory represents a testcase, that is verified in isolation.
|
||||
|
||||
```
|
||||
inputs/
|
||||
bool/
|
||||
double/
|
||||
int32/
|
||||
...
|
||||
```
|
||||
|
||||
## Test case directory structure
|
||||
|
||||
Each testcase has a `<name>.proto` file with a message called `Test`, and optionally a matching `.json` file and a custom test called `test_*.py`.
|
||||
|
||||
```bash
|
||||
bool/
|
||||
bool.proto
|
||||
bool.json # optional
|
||||
test_bool.py # optional
|
||||
```
|
||||
|
||||
### proto
|
||||
|
||||
`<name>.proto` — *The protobuf message to test*
|
||||
|
||||
```protobuf
|
||||
syntax = "proto3";
|
||||
|
||||
message Test {
|
||||
bool value = 1;
|
||||
}
|
||||
```
|
||||
|
||||
You can add multiple `.proto` files to the test case, as long as one file matches the directory name.
|
||||
|
||||
### json
|
||||
|
||||
`<name>.json` — *Test-data to validate the message with*
|
||||
|
||||
```json
|
||||
{
|
||||
"value": true
|
||||
}
|
||||
```
|
||||
|
||||
### pytest
|
||||
|
||||
`test_<name>.py` — *Custom test to validate specific aspects of the generated class*
|
||||
|
||||
```python
|
||||
from tests.output_aristaproto.bool.bool import Test
|
||||
|
||||
def test_value():
|
||||
message = Test()
|
||||
assert not message.value, "Boolean is False by default"
|
||||
```
|
||||
|
||||
## Standard tests
|
||||
|
||||
The following tests are automatically executed for all cases:
|
||||
|
||||
- [x] Can the generated python code be imported?
|
||||
- [x] Can the generated message class be instantiated?
|
||||
- [x] Is the generated code compatible with the Google's `grpc_tools.protoc` implementation?
|
||||
- _when `.json` is present_
|
||||
|
||||
## Running the tests
|
||||
|
||||
- `pipenv run generate`
|
||||
This generates:
|
||||
- `aristaproto/tests/output_aristaproto` — *the plugin generated python classes*
|
||||
- `aristaproto/tests/output_reference` — *reference implementation classes*
|
||||
- `pipenv run test`
|
||||
|
||||
## Intentionally Failing tests
|
||||
|
||||
The standard test suite includes tests that fail by intention. These tests document known bugs and missing features that are intended to be corrected in the future.
|
||||
|
||||
When running `pytest`, they show up as `x` or `X` in the test results.
|
||||
|
||||
```
|
||||
aristaproto/tests/test_inputs.py ..x...x..x...x.X........xx........x.....x.......x.xx....x...................... [ 84%]
|
||||
```
|
||||
|
||||
- `.` — PASSED
|
||||
- `x` — XFAIL: expected failure
|
||||
- `X` — XPASS: expected failure, but still passed
|
||||
|
||||
Test cases marked for expected failure are declared in [inputs/config.py](inputs/config.py)
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
22
tests/conftest.py
Normal file
22
tests/conftest.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
import copy
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--repeat", type=int, default=1, help="repeat the operation multiple times"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def repeat(request):
|
||||
return request.config.getoption("repeat")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def reset_sys_path():
|
||||
original = copy.deepcopy(sys.path)
|
||||
yield
|
||||
sys.path = original
|
196
tests/generate.py
Executable file
196
tests/generate.py
Executable file
|
@ -0,0 +1,196 @@
|
|||
#!/usr/bin/env python
|
||||
import asyncio
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Set
|
||||
|
||||
from tests.util import (
|
||||
get_directories,
|
||||
inputs_path,
|
||||
output_path_aristaproto,
|
||||
output_path_aristaproto_pydantic,
|
||||
output_path_reference,
|
||||
protoc,
|
||||
)
|
||||
|
||||
|
||||
# Force pure-python implementation instead of C++, otherwise imports
|
||||
# break things because we can't properly reset the symbol database.
|
||||
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
|
||||
|
||||
|
||||
def clear_directory(dir_path: Path):
|
||||
for file_or_directory in dir_path.glob("*"):
|
||||
if file_or_directory.is_dir():
|
||||
shutil.rmtree(file_or_directory)
|
||||
else:
|
||||
file_or_directory.unlink()
|
||||
|
||||
|
||||
async def generate(whitelist: Set[str], verbose: bool):
|
||||
test_case_names = set(get_directories(inputs_path)) - {"__pycache__"}
|
||||
|
||||
path_whitelist = set()
|
||||
name_whitelist = set()
|
||||
for item in whitelist:
|
||||
if item in test_case_names:
|
||||
name_whitelist.add(item)
|
||||
continue
|
||||
path_whitelist.add(item)
|
||||
|
||||
generation_tasks = []
|
||||
for test_case_name in sorted(test_case_names):
|
||||
test_case_input_path = inputs_path.joinpath(test_case_name).resolve()
|
||||
if (
|
||||
whitelist
|
||||
and str(test_case_input_path) not in path_whitelist
|
||||
and test_case_name not in name_whitelist
|
||||
):
|
||||
continue
|
||||
generation_tasks.append(
|
||||
generate_test_case_output(test_case_input_path, test_case_name, verbose)
|
||||
)
|
||||
|
||||
failed_test_cases = []
|
||||
# Wait for all subprocs and match any failures to names to report
|
||||
for test_case_name, result in zip(
|
||||
sorted(test_case_names), await asyncio.gather(*generation_tasks)
|
||||
):
|
||||
if result != 0:
|
||||
failed_test_cases.append(test_case_name)
|
||||
|
||||
if len(failed_test_cases) > 0:
|
||||
sys.stderr.write(
|
||||
"\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n"
|
||||
)
|
||||
for failed_test_case in failed_test_cases:
|
||||
sys.stderr.write(f"- {failed_test_case}\n")
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
async def generate_test_case_output(
|
||||
test_case_input_path: Path, test_case_name: str, verbose: bool
|
||||
) -> int:
|
||||
"""
|
||||
Returns the max of the subprocess return values
|
||||
"""
|
||||
|
||||
test_case_output_path_reference = output_path_reference.joinpath(test_case_name)
|
||||
test_case_output_path_aristaproto = output_path_aristaproto
|
||||
test_case_output_path_aristaproto_pyd = output_path_aristaproto_pydantic
|
||||
|
||||
os.makedirs(test_case_output_path_reference, exist_ok=True)
|
||||
os.makedirs(test_case_output_path_aristaproto, exist_ok=True)
|
||||
os.makedirs(test_case_output_path_aristaproto_pyd, exist_ok=True)
|
||||
|
||||
clear_directory(test_case_output_path_reference)
|
||||
clear_directory(test_case_output_path_aristaproto)
|
||||
|
||||
(
|
||||
(ref_out, ref_err, ref_code),
|
||||
(plg_out, plg_err, plg_code),
|
||||
(plg_out_pyd, plg_err_pyd, plg_code_pyd),
|
||||
) = await asyncio.gather(
|
||||
protoc(test_case_input_path, test_case_output_path_reference, True),
|
||||
protoc(test_case_input_path, test_case_output_path_aristaproto, False),
|
||||
protoc(
|
||||
test_case_input_path, test_case_output_path_aristaproto_pyd, False, True
|
||||
),
|
||||
)
|
||||
|
||||
if ref_code == 0:
|
||||
print(f"\033[31;1;4mGenerated reference output for {test_case_name!r}\033[0m")
|
||||
else:
|
||||
print(
|
||||
f"\033[31;1;4mFailed to generate reference output for {test_case_name!r}\033[0m"
|
||||
)
|
||||
|
||||
if verbose:
|
||||
if ref_out:
|
||||
print("Reference stdout:")
|
||||
sys.stdout.buffer.write(ref_out)
|
||||
sys.stdout.buffer.flush()
|
||||
|
||||
if ref_err:
|
||||
print("Reference stderr:")
|
||||
sys.stderr.buffer.write(ref_err)
|
||||
sys.stderr.buffer.flush()
|
||||
|
||||
if plg_code == 0:
|
||||
print(f"\033[31;1;4mGenerated plugin output for {test_case_name!r}\033[0m")
|
||||
else:
|
||||
print(
|
||||
f"\033[31;1;4mFailed to generate plugin output for {test_case_name!r}\033[0m"
|
||||
)
|
||||
|
||||
if verbose:
|
||||
if plg_out:
|
||||
print("Plugin stdout:")
|
||||
sys.stdout.buffer.write(plg_out)
|
||||
sys.stdout.buffer.flush()
|
||||
|
||||
if plg_err:
|
||||
print("Plugin stderr:")
|
||||
sys.stderr.buffer.write(plg_err)
|
||||
sys.stderr.buffer.flush()
|
||||
|
||||
if plg_code_pyd == 0:
|
||||
print(
|
||||
f"\033[31;1;4mGenerated plugin (pydantic compatible) output for {test_case_name!r}\033[0m"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"\033[31;1;4mFailed to generate plugin (pydantic compatible) output for {test_case_name!r}\033[0m"
|
||||
)
|
||||
|
||||
if verbose:
|
||||
if plg_out_pyd:
|
||||
print("Plugin stdout:")
|
||||
sys.stdout.buffer.write(plg_out_pyd)
|
||||
sys.stdout.buffer.flush()
|
||||
|
||||
if plg_err_pyd:
|
||||
print("Plugin stderr:")
|
||||
sys.stderr.buffer.write(plg_err_pyd)
|
||||
sys.stderr.buffer.flush()
|
||||
|
||||
return max(ref_code, plg_code, plg_code_pyd)
|
||||
|
||||
|
||||
HELP = "\n".join(
|
||||
(
|
||||
"Usage: python generate.py [-h] [-v] [DIRECTORIES or NAMES]",
|
||||
"Generate python classes for standard tests.",
|
||||
"",
|
||||
"DIRECTORIES One or more relative or absolute directories of test-cases to generate classes for.",
|
||||
" python generate.py inputs/bool inputs/double inputs/enum",
|
||||
"",
|
||||
"NAMES One or more test-case names to generate classes for.",
|
||||
" python generate.py bool double enums",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
if set(sys.argv).intersection({"-h", "--help"}):
|
||||
print(HELP)
|
||||
return
|
||||
if sys.argv[1:2] == ["-v"]:
|
||||
verbose = True
|
||||
whitelist = set(sys.argv[2:])
|
||||
else:
|
||||
verbose = False
|
||||
whitelist = set(sys.argv[1:])
|
||||
|
||||
if platform.system() == "Windows":
|
||||
asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
|
||||
|
||||
asyncio.run(generate(whitelist, verbose))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
0
tests/grpc/__init__.py
Normal file
0
tests/grpc/__init__.py
Normal file
298
tests/grpc/test_grpclib_client.py
Normal file
298
tests/grpc/test_grpclib_client.py
Normal file
|
@ -0,0 +1,298 @@
|
|||
import asyncio
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
import grpclib
|
||||
import grpclib.client
|
||||
import grpclib.metadata
|
||||
import grpclib.server
|
||||
import pytest
|
||||
from grpclib.testing import ChannelFor
|
||||
|
||||
from aristaproto.grpc.util.async_channel import AsyncChannel
|
||||
from tests.output_aristaproto.service import (
|
||||
DoThingRequest,
|
||||
DoThingResponse,
|
||||
GetThingRequest,
|
||||
TestStub as ThingServiceClient,
|
||||
)
|
||||
|
||||
from .thing_service import ThingService
|
||||
|
||||
|
||||
async def _test_client(client: ThingServiceClient, name="clean room", **kwargs):
|
||||
response = await client.do_thing(DoThingRequest(name=name), **kwargs)
|
||||
assert response.names == [name]
|
||||
|
||||
|
||||
def _assert_request_meta_received(deadline, metadata):
|
||||
def server_side_test(stream):
|
||||
assert stream.deadline._timestamp == pytest.approx(
|
||||
deadline._timestamp, 1
|
||||
), "The provided deadline should be received serverside"
|
||||
assert (
|
||||
stream.metadata["authorization"] == metadata["authorization"]
|
||||
), "The provided authorization metadata should be received serverside"
|
||||
|
||||
return server_side_test
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def handler_trailer_only_unauthenticated():
|
||||
async def handler(stream: grpclib.server.Stream):
|
||||
await stream.recv_message()
|
||||
await stream.send_initial_metadata()
|
||||
await stream.send_trailing_metadata(status=grpclib.Status.UNAUTHENTICATED)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_simple_service_call():
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
await _test_client(ThingServiceClient(channel))
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_trailer_only_error_unary_unary(
|
||||
mocker, handler_trailer_only_unauthenticated
|
||||
):
|
||||
service = ThingService()
|
||||
mocker.patch.object(
|
||||
service,
|
||||
"do_thing",
|
||||
side_effect=handler_trailer_only_unauthenticated,
|
||||
autospec=True,
|
||||
)
|
||||
async with ChannelFor([service]) as channel:
|
||||
with pytest.raises(grpclib.exceptions.GRPCError) as e:
|
||||
await ThingServiceClient(channel).do_thing(DoThingRequest(name="something"))
|
||||
assert e.value.status == grpclib.Status.UNAUTHENTICATED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_trailer_only_error_stream_unary(
|
||||
mocker, handler_trailer_only_unauthenticated
|
||||
):
|
||||
service = ThingService()
|
||||
mocker.patch.object(
|
||||
service,
|
||||
"do_many_things",
|
||||
side_effect=handler_trailer_only_unauthenticated,
|
||||
autospec=True,
|
||||
)
|
||||
async with ChannelFor([service]) as channel:
|
||||
with pytest.raises(grpclib.exceptions.GRPCError) as e:
|
||||
await ThingServiceClient(channel).do_many_things(
|
||||
do_thing_request_iterator=[DoThingRequest(name="something")]
|
||||
)
|
||||
await _test_client(ThingServiceClient(channel))
|
||||
assert e.value.status == grpclib.Status.UNAUTHENTICATED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 8), reason="async mock spy does works for python3.8+"
|
||||
)
|
||||
async def test_service_call_mutable_defaults(mocker):
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
client = ThingServiceClient(channel)
|
||||
spy = mocker.spy(client, "_unary_unary")
|
||||
await _test_client(client)
|
||||
comments = spy.call_args_list[-1].args[1].comments
|
||||
await _test_client(client)
|
||||
assert spy.call_args_list[-1].args[1].comments is not comments
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_call_with_upfront_request_params():
|
||||
# Setting deadline
|
||||
deadline = grpclib.metadata.Deadline.from_timeout(22)
|
||||
metadata = {"authorization": "12345"}
|
||||
async with ChannelFor(
|
||||
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
|
||||
) as channel:
|
||||
await _test_client(
|
||||
ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||
)
|
||||
|
||||
# Setting timeout
|
||||
timeout = 99
|
||||
deadline = grpclib.metadata.Deadline.from_timeout(timeout)
|
||||
metadata = {"authorization": "12345"}
|
||||
async with ChannelFor(
|
||||
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
|
||||
) as channel:
|
||||
await _test_client(
|
||||
ThingServiceClient(channel, timeout=timeout, metadata=metadata)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_call_lower_level_with_overrides():
|
||||
THING_TO_DO = "get milk"
|
||||
|
||||
# Setting deadline
|
||||
deadline = grpclib.metadata.Deadline.from_timeout(22)
|
||||
metadata = {"authorization": "12345"}
|
||||
kwarg_deadline = grpclib.metadata.Deadline.from_timeout(28)
|
||||
kwarg_metadata = {"authorization": "12345"}
|
||||
async with ChannelFor(
|
||||
[ThingService(test_hook=_assert_request_meta_received(deadline, metadata))]
|
||||
) as channel:
|
||||
client = ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||
response = await client._unary_unary(
|
||||
"/service.Test/DoThing",
|
||||
DoThingRequest(THING_TO_DO),
|
||||
DoThingResponse,
|
||||
deadline=kwarg_deadline,
|
||||
metadata=kwarg_metadata,
|
||||
)
|
||||
assert response.names == [THING_TO_DO]
|
||||
|
||||
# Setting timeout
|
||||
timeout = 99
|
||||
deadline = grpclib.metadata.Deadline.from_timeout(timeout)
|
||||
metadata = {"authorization": "12345"}
|
||||
kwarg_timeout = 9000
|
||||
kwarg_deadline = grpclib.metadata.Deadline.from_timeout(kwarg_timeout)
|
||||
kwarg_metadata = {"authorization": "09876"}
|
||||
async with ChannelFor(
|
||||
[
|
||||
ThingService(
|
||||
test_hook=_assert_request_meta_received(kwarg_deadline, kwarg_metadata),
|
||||
)
|
||||
]
|
||||
) as channel:
|
||||
client = ThingServiceClient(channel, deadline=deadline, metadata=metadata)
|
||||
response = await client._unary_unary(
|
||||
"/service.Test/DoThing",
|
||||
DoThingRequest(THING_TO_DO),
|
||||
DoThingResponse,
|
||||
timeout=kwarg_timeout,
|
||||
metadata=kwarg_metadata,
|
||||
)
|
||||
assert response.names == [THING_TO_DO]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
("overrides_gen",),
|
||||
[
|
||||
(lambda: dict(timeout=10),),
|
||||
(lambda: dict(deadline=grpclib.metadata.Deadline.from_timeout(10)),),
|
||||
(lambda: dict(metadata={"authorization": str(uuid.uuid4())}),),
|
||||
(lambda: dict(timeout=20, metadata={"authorization": str(uuid.uuid4())}),),
|
||||
],
|
||||
)
|
||||
async def test_service_call_high_level_with_overrides(mocker, overrides_gen):
|
||||
overrides = overrides_gen()
|
||||
request_spy = mocker.spy(grpclib.client.Channel, "request")
|
||||
name = str(uuid.uuid4())
|
||||
defaults = dict(
|
||||
timeout=99,
|
||||
deadline=grpclib.metadata.Deadline.from_timeout(99),
|
||||
metadata={"authorization": name},
|
||||
)
|
||||
|
||||
async with ChannelFor(
|
||||
[
|
||||
ThingService(
|
||||
test_hook=_assert_request_meta_received(
|
||||
deadline=grpclib.metadata.Deadline.from_timeout(
|
||||
overrides.get("timeout", 99)
|
||||
),
|
||||
metadata=overrides.get("metadata", defaults.get("metadata")),
|
||||
)
|
||||
)
|
||||
]
|
||||
) as channel:
|
||||
client = ThingServiceClient(channel, **defaults)
|
||||
await _test_client(client, name=name, **overrides)
|
||||
assert request_spy.call_count == 1
|
||||
|
||||
# for python <3.8 request_spy.call_args.kwargs do not work
|
||||
_, request_spy_call_kwargs = request_spy.call_args_list[0]
|
||||
|
||||
# ensure all overrides were successful
|
||||
for key, value in overrides.items():
|
||||
assert key in request_spy_call_kwargs
|
||||
assert request_spy_call_kwargs[key] == value
|
||||
|
||||
# ensure default values were retained
|
||||
for key in set(defaults.keys()) - set(overrides.keys()):
|
||||
assert key in request_spy_call_kwargs
|
||||
assert request_spy_call_kwargs[key] == defaults[key]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_gen_for_unary_stream_request():
|
||||
thing_name = "my milkshakes"
|
||||
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
client = ThingServiceClient(channel)
|
||||
expected_versions = [5, 4, 3, 2, 1]
|
||||
async for response in client.get_thing_versions(
|
||||
GetThingRequest(name=thing_name)
|
||||
):
|
||||
assert response.name == thing_name
|
||||
assert response.version == expected_versions.pop()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_gen_for_stream_stream_request():
|
||||
some_things = ["cake", "cricket", "coral reef"]
|
||||
more_things = ["ball", "that", "56kmodem", "liberal humanism", "cheesesticks"]
|
||||
expected_things = (*some_things, *more_things)
|
||||
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
client = ThingServiceClient(channel)
|
||||
# Use an AsyncChannel to decouple sending and recieving, it'll send some_things
|
||||
# immediately and we'll use it to send more_things later, after recieving some
|
||||
# results
|
||||
request_chan = AsyncChannel()
|
||||
send_initial_requests = asyncio.ensure_future(
|
||||
request_chan.send_from(GetThingRequest(name) for name in some_things)
|
||||
)
|
||||
response_index = 0
|
||||
async for response in client.get_different_things(request_chan):
|
||||
assert response.name == expected_things[response_index]
|
||||
assert response.version == response_index + 1
|
||||
response_index += 1
|
||||
if more_things:
|
||||
# Send some more requests as we receive responses to be sure coordination of
|
||||
# send/receive events doesn't matter
|
||||
await request_chan.send(GetThingRequest(more_things.pop(0)))
|
||||
elif not send_initial_requests.done():
|
||||
# Make sure the sending task it completed
|
||||
await send_initial_requests
|
||||
else:
|
||||
# No more things to send make sure channel is closed
|
||||
request_chan.close()
|
||||
assert response_index == len(
|
||||
expected_things
|
||||
), "Didn't receive all expected responses"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stream_unary_with_empty_iterable():
|
||||
things = [] # empty
|
||||
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
client = ThingServiceClient(channel)
|
||||
requests = [DoThingRequest(name) for name in things]
|
||||
response = await client.do_many_things(requests)
|
||||
assert len(response.names) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stream_stream_with_empty_iterable():
|
||||
things = [] # empty
|
||||
|
||||
async with ChannelFor([ThingService()]) as channel:
|
||||
client = ThingServiceClient(channel)
|
||||
requests = [GetThingRequest(name) for name in things]
|
||||
responses = [
|
||||
response async for response in client.get_different_things(requests)
|
||||
]
|
||||
assert len(responses) == 0
|
99
tests/grpc/test_stream_stream.py
Normal file
99
tests/grpc/test_stream_stream.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncIterator
|
||||
|
||||
import pytest
|
||||
|
||||
import aristaproto
|
||||
from aristaproto.grpc.util.async_channel import AsyncChannel
|
||||
|
||||
|
||||
@dataclass
|
||||
class Message(aristaproto.Message):
|
||||
body: str = aristaproto.string_field(1)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def expected_responses():
|
||||
return [Message("Hello world 1"), Message("Hello world 2"), Message("Done")]
|
||||
|
||||
|
||||
class ClientStub:
|
||||
async def connect(self, requests: AsyncIterator):
|
||||
await asyncio.sleep(0.1)
|
||||
async for request in requests:
|
||||
await asyncio.sleep(0.1)
|
||||
yield request
|
||||
await asyncio.sleep(0.1)
|
||||
yield Message("Done")
|
||||
|
||||
|
||||
async def to_list(generator: AsyncIterator):
|
||||
return [value async for value in generator]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
# channel = Channel(host='127.0.0.1', port=50051)
|
||||
# return ClientStub(channel)
|
||||
return ClientStub()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_from_before_connect_and_close_automatically(
|
||||
client, expected_responses
|
||||
):
|
||||
requests = AsyncChannel()
|
||||
await requests.send_from(
|
||||
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=True
|
||||
)
|
||||
responses = client.connect(requests)
|
||||
|
||||
assert await to_list(responses) == expected_responses
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_from_after_connect_and_close_automatically(
|
||||
client, expected_responses
|
||||
):
|
||||
requests = AsyncChannel()
|
||||
responses = client.connect(requests)
|
||||
await requests.send_from(
|
||||
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=True
|
||||
)
|
||||
|
||||
assert await to_list(responses) == expected_responses
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_from_close_manually_immediately(client, expected_responses):
|
||||
requests = AsyncChannel()
|
||||
responses = client.connect(requests)
|
||||
await requests.send_from(
|
||||
[Message(body="Hello world 1"), Message(body="Hello world 2")], close=False
|
||||
)
|
||||
requests.close()
|
||||
|
||||
assert await to_list(responses) == expected_responses
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_individually_and_close_before_connect(client, expected_responses):
|
||||
requests = AsyncChannel()
|
||||
await requests.send(Message(body="Hello world 1"))
|
||||
await requests.send(Message(body="Hello world 2"))
|
||||
requests.close()
|
||||
responses = client.connect(requests)
|
||||
|
||||
assert await to_list(responses) == expected_responses
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_individually_and_close_after_connect(client, expected_responses):
|
||||
requests = AsyncChannel()
|
||||
await requests.send(Message(body="Hello world 1"))
|
||||
await requests.send(Message(body="Hello world 2"))
|
||||
responses = client.connect(requests)
|
||||
requests.close()
|
||||
|
||||
assert await to_list(responses) == expected_responses
|
85
tests/grpc/thing_service.py
Normal file
85
tests/grpc/thing_service.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
from typing import Dict
|
||||
|
||||
import grpclib
|
||||
import grpclib.server
|
||||
|
||||
from tests.output_aristaproto.service import (
|
||||
DoThingRequest,
|
||||
DoThingResponse,
|
||||
GetThingRequest,
|
||||
GetThingResponse,
|
||||
)
|
||||
|
||||
|
||||
class ThingService:
|
||||
def __init__(self, test_hook=None):
|
||||
# This lets us pass assertions to the servicer ;)
|
||||
self.test_hook = test_hook
|
||||
|
||||
async def do_thing(
|
||||
self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"
|
||||
):
|
||||
request = await stream.recv_message()
|
||||
if self.test_hook is not None:
|
||||
self.test_hook(stream)
|
||||
await stream.send_message(DoThingResponse([request.name]))
|
||||
|
||||
async def do_many_things(
|
||||
self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]"
|
||||
):
|
||||
thing_names = [request.name async for request in stream]
|
||||
if self.test_hook is not None:
|
||||
self.test_hook(stream)
|
||||
await stream.send_message(DoThingResponse(thing_names))
|
||||
|
||||
async def get_thing_versions(
|
||||
self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]"
|
||||
):
|
||||
request = await stream.recv_message()
|
||||
if self.test_hook is not None:
|
||||
self.test_hook(stream)
|
||||
for version_num in range(1, 6):
|
||||
await stream.send_message(
|
||||
GetThingResponse(name=request.name, version=version_num)
|
||||
)
|
||||
|
||||
async def get_different_things(
|
||||
self, stream: "grpclib.server.Stream[GetThingRequest, GetThingResponse]"
|
||||
):
|
||||
if self.test_hook is not None:
|
||||
self.test_hook(stream)
|
||||
# Respond to each input item immediately
|
||||
response_num = 0
|
||||
async for request in stream:
|
||||
response_num += 1
|
||||
await stream.send_message(
|
||||
GetThingResponse(name=request.name, version=response_num)
|
||||
)
|
||||
|
||||
def __mapping__(self) -> Dict[str, "grpclib.const.Handler"]:
|
||||
return {
|
||||
"/service.Test/DoThing": grpclib.const.Handler(
|
||||
self.do_thing,
|
||||
grpclib.const.Cardinality.UNARY_UNARY,
|
||||
DoThingRequest,
|
||||
DoThingResponse,
|
||||
),
|
||||
"/service.Test/DoManyThings": grpclib.const.Handler(
|
||||
self.do_many_things,
|
||||
grpclib.const.Cardinality.STREAM_UNARY,
|
||||
DoThingRequest,
|
||||
DoThingResponse,
|
||||
),
|
||||
"/service.Test/GetThingVersions": grpclib.const.Handler(
|
||||
self.get_thing_versions,
|
||||
grpclib.const.Cardinality.UNARY_STREAM,
|
||||
GetThingRequest,
|
||||
GetThingResponse,
|
||||
),
|
||||
"/service.Test/GetDifferentThings": grpclib.const.Handler(
|
||||
self.get_different_things,
|
||||
grpclib.const.Cardinality.STREAM_STREAM,
|
||||
GetThingRequest,
|
||||
GetThingResponse,
|
||||
),
|
||||
}
|
3
tests/inputs/bool/bool.json
Normal file
3
tests/inputs/bool/bool.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"value": true
|
||||
}
|
7
tests/inputs/bool/bool.proto
Normal file
7
tests/inputs/bool/bool.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bool;
|
||||
|
||||
message Test {
|
||||
bool value = 1;
|
||||
}
|
19
tests/inputs/bool/test_bool.py
Normal file
19
tests/inputs/bool/test_bool.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
import pytest
|
||||
|
||||
from tests.output_aristaproto.bool import Test
|
||||
from tests.output_aristaproto_pydantic.bool import Test as TestPyd
|
||||
|
||||
|
||||
def test_value():
|
||||
message = Test()
|
||||
assert not message.value, "Boolean is False by default"
|
||||
|
||||
|
||||
def test_pydantic_no_value():
|
||||
with pytest.raises(ValueError):
|
||||
TestPyd()
|
||||
|
||||
|
||||
def test_pydantic_value():
|
||||
message = Test(value=False)
|
||||
assert not message.value
|
3
tests/inputs/bytes/bytes.json
Normal file
3
tests/inputs/bytes/bytes.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"data": "SGVsbG8sIFdvcmxkIQ=="
|
||||
}
|
7
tests/inputs/bytes/bytes.proto
Normal file
7
tests/inputs/bytes/bytes.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bytes;
|
||||
|
||||
message Test {
|
||||
bytes data = 1;
|
||||
}
|
4
tests/inputs/casing/casing.json
Normal file
4
tests/inputs/casing/casing.json
Normal file
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"camelCase": 1,
|
||||
"snakeCase": "ONE"
|
||||
}
|
20
tests/inputs/casing/casing.proto
Normal file
20
tests/inputs/casing/casing.proto
Normal file
|
@ -0,0 +1,20 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package casing;
|
||||
|
||||
enum my_enum {
|
||||
ZERO = 0;
|
||||
ONE = 1;
|
||||
TWO = 2;
|
||||
}
|
||||
|
||||
message Test {
|
||||
int32 camelCase = 1;
|
||||
my_enum snake_case = 2;
|
||||
snake_case_message snake_case_message = 3;
|
||||
int32 UPPERCASE = 4;
|
||||
}
|
||||
|
||||
message snake_case_message {
|
||||
|
||||
}
|
23
tests/inputs/casing/test_casing.py
Normal file
23
tests/inputs/casing/test_casing.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
import tests.output_aristaproto.casing as casing
|
||||
from tests.output_aristaproto.casing import Test
|
||||
|
||||
|
||||
def test_message_attributes():
|
||||
message = Test()
|
||||
assert hasattr(
|
||||
message, "snake_case_message"
|
||||
), "snake_case field name is same in python"
|
||||
assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python"
|
||||
assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python"
|
||||
|
||||
|
||||
def test_message_casing():
|
||||
assert hasattr(
|
||||
casing, "SnakeCaseMessage"
|
||||
), "snake_case Message name is converted to CamelCase in python"
|
||||
|
||||
|
||||
def test_enum_casing():
|
||||
assert hasattr(
|
||||
casing, "MyEnum"
|
||||
), "snake_case Enum name is converted to CamelCase in python"
|
10
tests/inputs/casing_inner_class/casing_inner_class.proto
Normal file
10
tests/inputs/casing_inner_class/casing_inner_class.proto
Normal file
|
@ -0,0 +1,10 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package casing_inner_class;
|
||||
|
||||
message Test {
|
||||
message inner_class {
|
||||
sint32 old_exp = 1;
|
||||
}
|
||||
inner_class inner = 2;
|
||||
}
|
14
tests/inputs/casing_inner_class/test_casing_inner_class.py
Normal file
14
tests/inputs/casing_inner_class/test_casing_inner_class.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
import tests.output_aristaproto.casing_inner_class as casing_inner_class
|
||||
|
||||
|
||||
def test_message_casing_inner_class_name():
|
||||
assert hasattr(
|
||||
casing_inner_class, "TestInnerClass"
|
||||
), "Inline defined Message is correctly converted to CamelCase"
|
||||
|
||||
|
||||
def test_message_casing_inner_class_attributes():
|
||||
message = casing_inner_class.Test()
|
||||
assert hasattr(
|
||||
message.inner, "old_exp"
|
||||
), "Inline defined Message attribute is snake_case"
|
|
@ -0,0 +1,9 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package casing_message_field_uppercase;
|
||||
|
||||
message Test {
|
||||
int32 UPPERCASE = 1;
|
||||
int32 UPPERCASE_V2 = 2;
|
||||
int32 UPPER_CAMEL_CASE = 3;
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
from tests.output_aristaproto.casing_message_field_uppercase import Test
|
||||
|
||||
|
||||
def test_message_casing():
|
||||
message = Test()
|
||||
assert hasattr(
|
||||
message, "uppercase"
|
||||
), "UPPERCASE attribute is converted to 'uppercase' in python"
|
||||
assert hasattr(
|
||||
message, "uppercase_v2"
|
||||
), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python"
|
||||
assert hasattr(
|
||||
message, "upper_camel_case"
|
||||
), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python"
|
30
tests/inputs/config.py
Normal file
30
tests/inputs/config.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Test cases that are expected to fail, e.g. unimplemented features or bug-fixes.
|
||||
# Remove from list when fixed.
|
||||
xfail = {
|
||||
"namespace_keywords", # 70
|
||||
"googletypes_struct", # 9
|
||||
"googletypes_value", # 9
|
||||
"import_capitalized_package",
|
||||
"example", # This is the example in the readme. Not a test.
|
||||
}
|
||||
|
||||
services = {
|
||||
"googletypes_request",
|
||||
"googletypes_response",
|
||||
"googletypes_response_embedded",
|
||||
"service",
|
||||
"service_separate_packages",
|
||||
"import_service_input_message",
|
||||
"googletypes_service_returns_empty",
|
||||
"googletypes_service_returns_googletype",
|
||||
"example_service",
|
||||
"empty_service",
|
||||
"service_uppercase",
|
||||
}
|
||||
|
||||
|
||||
# Indicate json sample messages to skip when testing that json (de)serialization
|
||||
# is symmetrical becuase some cases legitimately are not symmetrical.
|
||||
# Each key references the name of the test scenario and the values in the tuple
|
||||
# Are the names of the json files.
|
||||
non_symmetrical_json = {"empty_repeated": ("empty_repeated",)}
|
6
tests/inputs/deprecated/deprecated.json
Normal file
6
tests/inputs/deprecated/deprecated.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"message": {
|
||||
"value": "hello"
|
||||
},
|
||||
"value": 10
|
||||
}
|
14
tests/inputs/deprecated/deprecated.proto
Normal file
14
tests/inputs/deprecated/deprecated.proto
Normal file
|
@ -0,0 +1,14 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package deprecated;
|
||||
|
||||
// Some documentation about the Test message.
|
||||
message Test {
|
||||
Message message = 1 [deprecated=true];
|
||||
int32 value = 2;
|
||||
}
|
||||
|
||||
message Message {
|
||||
option deprecated = true;
|
||||
string value = 1;
|
||||
}
|
3
tests/inputs/double/double-negative.json
Normal file
3
tests/inputs/double/double-negative.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"count": -123.45
|
||||
}
|
3
tests/inputs/double/double.json
Normal file
3
tests/inputs/double/double.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"count": 123.45
|
||||
}
|
7
tests/inputs/double/double.proto
Normal file
7
tests/inputs/double/double.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package double;
|
||||
|
||||
message Test {
|
||||
double count = 1;
|
||||
}
|
3
tests/inputs/empty_repeated/empty_repeated.json
Normal file
3
tests/inputs/empty_repeated/empty_repeated.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"msg": [{"values":[]}]
|
||||
}
|
11
tests/inputs/empty_repeated/empty_repeated.proto
Normal file
11
tests/inputs/empty_repeated/empty_repeated.proto
Normal file
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package empty_repeated;
|
||||
|
||||
message MessageA {
|
||||
repeated float values = 1;
|
||||
}
|
||||
|
||||
message Test {
|
||||
repeated MessageA msg = 1;
|
||||
}
|
7
tests/inputs/empty_service/empty_service.proto
Normal file
7
tests/inputs/empty_service/empty_service.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
/* Empty service without comments */
|
||||
syntax = "proto3";
|
||||
|
||||
package empty_service;
|
||||
|
||||
service Test {
|
||||
}
|
20
tests/inputs/entry/entry.proto
Normal file
20
tests/inputs/entry/entry.proto
Normal file
|
@ -0,0 +1,20 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package entry;
|
||||
|
||||
// This is a minimal example of a repeated message field that caused issues when
|
||||
// checking whether a message is a map.
|
||||
//
|
||||
// During the check wheter a field is a "map", the string "entry" is added to
|
||||
// the field name, checked against the type name and then further checks are
|
||||
// made against the nested type of a parent message. In this edge-case, the
|
||||
// first check would pass even though it shouldn't and that would cause an
|
||||
// error because the parent type does not have a "nested_type" attribute.
|
||||
|
||||
message Test {
|
||||
repeated ExportEntry export = 1;
|
||||
}
|
||||
|
||||
message ExportEntry {
|
||||
string name = 1;
|
||||
}
|
9
tests/inputs/enum/enum.json
Normal file
9
tests/inputs/enum/enum.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"choice": "FOUR",
|
||||
"choices": [
|
||||
"ZERO",
|
||||
"ONE",
|
||||
"THREE",
|
||||
"FOUR"
|
||||
]
|
||||
}
|
25
tests/inputs/enum/enum.proto
Normal file
25
tests/inputs/enum/enum.proto
Normal file
|
@ -0,0 +1,25 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package enum;
|
||||
|
||||
// Tests that enums are correctly serialized and that it correctly handles skipped and out-of-order enum values
|
||||
message Test {
|
||||
Choice choice = 1;
|
||||
repeated Choice choices = 2;
|
||||
}
|
||||
|
||||
enum Choice {
|
||||
ZERO = 0;
|
||||
ONE = 1;
|
||||
// TWO = 2;
|
||||
FOUR = 4;
|
||||
THREE = 3;
|
||||
}
|
||||
|
||||
// A "C" like enum with the enum name prefixed onto members, these should be stripped
|
||||
enum ArithmeticOperator {
|
||||
ARITHMETIC_OPERATOR_NONE = 0;
|
||||
ARITHMETIC_OPERATOR_PLUS = 1;
|
||||
ARITHMETIC_OPERATOR_MINUS = 2;
|
||||
ARITHMETIC_OPERATOR_0_PREFIXED = 3;
|
||||
}
|
114
tests/inputs/enum/test_enum.py
Normal file
114
tests/inputs/enum/test_enum.py
Normal file
|
@ -0,0 +1,114 @@
|
|||
from tests.output_aristaproto.enum import (
|
||||
ArithmeticOperator,
|
||||
Choice,
|
||||
Test,
|
||||
)
|
||||
|
||||
|
||||
def test_enum_set_and_get():
|
||||
assert Test(choice=Choice.ZERO).choice == Choice.ZERO
|
||||
assert Test(choice=Choice.ONE).choice == Choice.ONE
|
||||
assert Test(choice=Choice.THREE).choice == Choice.THREE
|
||||
assert Test(choice=Choice.FOUR).choice == Choice.FOUR
|
||||
|
||||
|
||||
def test_enum_set_with_int():
|
||||
assert Test(choice=0).choice == Choice.ZERO
|
||||
assert Test(choice=1).choice == Choice.ONE
|
||||
assert Test(choice=3).choice == Choice.THREE
|
||||
assert Test(choice=4).choice == Choice.FOUR
|
||||
|
||||
|
||||
def test_enum_is_comparable_with_int():
|
||||
assert Test(choice=Choice.ZERO).choice == 0
|
||||
assert Test(choice=Choice.ONE).choice == 1
|
||||
assert Test(choice=Choice.THREE).choice == 3
|
||||
assert Test(choice=Choice.FOUR).choice == 4
|
||||
|
||||
|
||||
def test_enum_to_dict():
|
||||
assert (
|
||||
"choice" not in Test(choice=Choice.ZERO).to_dict()
|
||||
), "Default enum value is not serialized"
|
||||
assert (
|
||||
Test(choice=Choice.ZERO).to_dict(include_default_values=True)["choice"]
|
||||
== "ZERO"
|
||||
)
|
||||
assert Test(choice=Choice.ONE).to_dict()["choice"] == "ONE"
|
||||
assert Test(choice=Choice.THREE).to_dict()["choice"] == "THREE"
|
||||
assert Test(choice=Choice.FOUR).to_dict()["choice"] == "FOUR"
|
||||
|
||||
|
||||
def test_repeated_enum_is_comparable_with_int():
|
||||
assert Test(choices=[Choice.ZERO]).choices == [0]
|
||||
assert Test(choices=[Choice.ONE]).choices == [1]
|
||||
assert Test(choices=[Choice.THREE]).choices == [3]
|
||||
assert Test(choices=[Choice.FOUR]).choices == [4]
|
||||
|
||||
|
||||
def test_repeated_enum_set_and_get():
|
||||
assert Test(choices=[Choice.ZERO]).choices == [Choice.ZERO]
|
||||
assert Test(choices=[Choice.ONE]).choices == [Choice.ONE]
|
||||
assert Test(choices=[Choice.THREE]).choices == [Choice.THREE]
|
||||
assert Test(choices=[Choice.FOUR]).choices == [Choice.FOUR]
|
||||
|
||||
|
||||
def test_repeated_enum_to_dict():
|
||||
assert Test(choices=[Choice.ZERO]).to_dict()["choices"] == ["ZERO"]
|
||||
assert Test(choices=[Choice.ONE]).to_dict()["choices"] == ["ONE"]
|
||||
assert Test(choices=[Choice.THREE]).to_dict()["choices"] == ["THREE"]
|
||||
assert Test(choices=[Choice.FOUR]).to_dict()["choices"] == ["FOUR"]
|
||||
|
||||
all_enums_dict = Test(
|
||||
choices=[Choice.ZERO, Choice.ONE, Choice.THREE, Choice.FOUR]
|
||||
).to_dict()
|
||||
assert (all_enums_dict["choices"]) == ["ZERO", "ONE", "THREE", "FOUR"]
|
||||
|
||||
|
||||
def test_repeated_enum_with_single_value_to_dict():
|
||||
assert Test(choices=Choice.ONE).to_dict()["choices"] == ["ONE"]
|
||||
assert Test(choices=1).to_dict()["choices"] == ["ONE"]
|
||||
|
||||
|
||||
def test_repeated_enum_with_non_list_iterables_to_dict():
|
||||
assert Test(choices=(1, 3)).to_dict()["choices"] == ["ONE", "THREE"]
|
||||
assert Test(choices=(1, 3)).to_dict()["choices"] == ["ONE", "THREE"]
|
||||
assert Test(choices=(Choice.ONE, Choice.THREE)).to_dict()["choices"] == [
|
||||
"ONE",
|
||||
"THREE",
|
||||
]
|
||||
|
||||
def enum_generator():
|
||||
yield Choice.ONE
|
||||
yield Choice.THREE
|
||||
|
||||
assert Test(choices=enum_generator()).to_dict()["choices"] == ["ONE", "THREE"]
|
||||
|
||||
|
||||
def test_enum_mapped_on_parse():
|
||||
# test default value
|
||||
b = Test().parse(bytes(Test()))
|
||||
assert b.choice.name == Choice.ZERO.name
|
||||
assert b.choices == []
|
||||
|
||||
# test non default value
|
||||
a = Test().parse(bytes(Test(choice=Choice.ONE)))
|
||||
assert a.choice.name == Choice.ONE.name
|
||||
assert b.choices == []
|
||||
|
||||
# test repeated
|
||||
c = Test().parse(bytes(Test(choices=[Choice.THREE, Choice.FOUR])))
|
||||
assert c.choices[0].name == Choice.THREE.name
|
||||
assert c.choices[1].name == Choice.FOUR.name
|
||||
|
||||
# bonus: defaults after empty init are also mapped
|
||||
assert Test().choice.name == Choice.ZERO.name
|
||||
|
||||
|
||||
def test_renamed_enum_members():
|
||||
assert set(ArithmeticOperator.__members__) == {
|
||||
"NONE",
|
||||
"PLUS",
|
||||
"MINUS",
|
||||
"_0_PREFIXED",
|
||||
}
|
911
tests/inputs/example/example.proto
Normal file
911
tests/inputs/example/example.proto
Normal file
|
@ -0,0 +1,911 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: kenton@google.com (Kenton Varda)
|
||||
// Based on original Protocol Buffers design by
|
||||
// Sanjay Ghemawat, Jeff Dean, and others.
|
||||
//
|
||||
// The messages in this file describe the definitions found in .proto files.
|
||||
// A valid .proto file can be translated directly to a FileDescriptorProto
|
||||
// without any other information (e.g. without reading its imports).
|
||||
|
||||
|
||||
syntax = "proto2";
|
||||
|
||||
package example;
|
||||
|
||||
// package google.protobuf;
|
||||
|
||||
option go_package = "google.golang.org/protobuf/types/descriptorpb";
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "DescriptorProtos";
|
||||
option csharp_namespace = "Google.Protobuf.Reflection";
|
||||
option objc_class_prefix = "GPB";
|
||||
option cc_enable_arenas = true;
|
||||
|
||||
// descriptor.proto must be optimized for speed because reflection-based
|
||||
// algorithms don't work during bootstrapping.
|
||||
option optimize_for = SPEED;
|
||||
|
||||
// The protocol compiler can output a FileDescriptorSet containing the .proto
|
||||
// files it parses.
|
||||
message FileDescriptorSet {
|
||||
repeated FileDescriptorProto file = 1;
|
||||
}
|
||||
|
||||
// Describes a complete .proto file.
|
||||
message FileDescriptorProto {
|
||||
optional string name = 1; // file name, relative to root of source tree
|
||||
optional string package = 2; // e.g. "foo", "foo.bar", etc.
|
||||
|
||||
// Names of files imported by this file.
|
||||
repeated string dependency = 3;
|
||||
// Indexes of the public imported files in the dependency list above.
|
||||
repeated int32 public_dependency = 10;
|
||||
// Indexes of the weak imported files in the dependency list.
|
||||
// For Google-internal migration only. Do not use.
|
||||
repeated int32 weak_dependency = 11;
|
||||
|
||||
// All top-level definitions in this file.
|
||||
repeated DescriptorProto message_type = 4;
|
||||
repeated EnumDescriptorProto enum_type = 5;
|
||||
repeated ServiceDescriptorProto service = 6;
|
||||
repeated FieldDescriptorProto extension = 7;
|
||||
|
||||
optional FileOptions options = 8;
|
||||
|
||||
// This field contains optional information about the original source code.
|
||||
// You may safely remove this entire field without harming runtime
|
||||
// functionality of the descriptors -- the information is needed only by
|
||||
// development tools.
|
||||
optional SourceCodeInfo source_code_info = 9;
|
||||
|
||||
// The syntax of the proto file.
|
||||
// The supported values are "proto2" and "proto3".
|
||||
optional string syntax = 12;
|
||||
}
|
||||
|
||||
// Describes a message type.
|
||||
message DescriptorProto {
|
||||
optional string name = 1;
|
||||
|
||||
repeated FieldDescriptorProto field = 2;
|
||||
repeated FieldDescriptorProto extension = 6;
|
||||
|
||||
repeated DescriptorProto nested_type = 3;
|
||||
repeated EnumDescriptorProto enum_type = 4;
|
||||
|
||||
message ExtensionRange {
|
||||
optional int32 start = 1; // Inclusive.
|
||||
optional int32 end = 2; // Exclusive.
|
||||
|
||||
optional ExtensionRangeOptions options = 3;
|
||||
}
|
||||
repeated ExtensionRange extension_range = 5;
|
||||
|
||||
repeated OneofDescriptorProto oneof_decl = 8;
|
||||
|
||||
optional MessageOptions options = 7;
|
||||
|
||||
// Range of reserved tag numbers. Reserved tag numbers may not be used by
|
||||
// fields or extension ranges in the same message. Reserved ranges may
|
||||
// not overlap.
|
||||
message ReservedRange {
|
||||
optional int32 start = 1; // Inclusive.
|
||||
optional int32 end = 2; // Exclusive.
|
||||
}
|
||||
repeated ReservedRange reserved_range = 9;
|
||||
// Reserved field names, which may not be used by fields in the same message.
|
||||
// A given name may only be reserved once.
|
||||
repeated string reserved_name = 10;
|
||||
}
|
||||
|
||||
message ExtensionRangeOptions {
|
||||
// The parser stores options it doesn't recognize here. See above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
|
||||
// Clients can define custom options in extensions of this message. See above.
|
||||
extensions 1000 to max;
|
||||
}
|
||||
|
||||
// Describes a field within a message.
|
||||
message FieldDescriptorProto {
|
||||
enum Type {
|
||||
// 0 is reserved for errors.
|
||||
// Order is weird for historical reasons.
|
||||
TYPE_DOUBLE = 1;
|
||||
TYPE_FLOAT = 2;
|
||||
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
|
||||
// negative values are likely.
|
||||
TYPE_INT64 = 3;
|
||||
TYPE_UINT64 = 4;
|
||||
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
|
||||
// negative values are likely.
|
||||
TYPE_INT32 = 5;
|
||||
TYPE_FIXED64 = 6;
|
||||
TYPE_FIXED32 = 7;
|
||||
TYPE_BOOL = 8;
|
||||
TYPE_STRING = 9;
|
||||
// Tag-delimited aggregate.
|
||||
// Group type is deprecated and not supported in proto3. However, Proto3
|
||||
// implementations should still be able to parse the group wire format and
|
||||
// treat group fields as unknown fields.
|
||||
TYPE_GROUP = 10;
|
||||
TYPE_MESSAGE = 11; // Length-delimited aggregate.
|
||||
|
||||
// New in version 2.
|
||||
TYPE_BYTES = 12;
|
||||
TYPE_UINT32 = 13;
|
||||
TYPE_ENUM = 14;
|
||||
TYPE_SFIXED32 = 15;
|
||||
TYPE_SFIXED64 = 16;
|
||||
TYPE_SINT32 = 17; // Uses ZigZag encoding.
|
||||
TYPE_SINT64 = 18; // Uses ZigZag encoding.
|
||||
}
|
||||
|
||||
enum Label {
|
||||
// 0 is reserved for errors
|
||||
LABEL_OPTIONAL = 1;
|
||||
LABEL_REQUIRED = 2;
|
||||
LABEL_REPEATED = 3;
|
||||
}
|
||||
|
||||
optional string name = 1;
|
||||
optional int32 number = 3;
|
||||
optional Label label = 4;
|
||||
|
||||
// If type_name is set, this need not be set. If both this and type_name
|
||||
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
|
||||
optional Type type = 5;
|
||||
|
||||
// For message and enum types, this is the name of the type. If the name
|
||||
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
|
||||
// rules are used to find the type (i.e. first the nested types within this
|
||||
// message are searched, then within the parent, on up to the root
|
||||
// namespace).
|
||||
optional string type_name = 6;
|
||||
|
||||
// For extensions, this is the name of the type being extended. It is
|
||||
// resolved in the same manner as type_name.
|
||||
optional string extendee = 2;
|
||||
|
||||
// For numeric types, contains the original text representation of the value.
|
||||
// For booleans, "true" or "false".
|
||||
// For strings, contains the default text contents (not escaped in any way).
|
||||
// For bytes, contains the C escaped value. All bytes >= 128 are escaped.
|
||||
// TODO(kenton): Base-64 encode?
|
||||
optional string default_value = 7;
|
||||
|
||||
// If set, gives the index of a oneof in the containing type's oneof_decl
|
||||
// list. This field is a member of that oneof.
|
||||
optional int32 oneof_index = 9;
|
||||
|
||||
// JSON name of this field. The value is set by protocol compiler. If the
|
||||
// user has set a "json_name" option on this field, that option's value
|
||||
// will be used. Otherwise, it's deduced from the field's name by converting
|
||||
// it to camelCase.
|
||||
optional string json_name = 10;
|
||||
|
||||
optional FieldOptions options = 8;
|
||||
|
||||
// If true, this is a proto3 "optional". When a proto3 field is optional, it
|
||||
// tracks presence regardless of field type.
|
||||
//
|
||||
// When proto3_optional is true, this field must be belong to a oneof to
|
||||
// signal to old proto3 clients that presence is tracked for this field. This
|
||||
// oneof is known as a "synthetic" oneof, and this field must be its sole
|
||||
// member (each proto3 optional field gets its own synthetic oneof). Synthetic
|
||||
// oneofs exist in the descriptor only, and do not generate any API. Synthetic
|
||||
// oneofs must be ordered after all "real" oneofs.
|
||||
//
|
||||
// For message fields, proto3_optional doesn't create any semantic change,
|
||||
// since non-repeated message fields always track presence. However it still
|
||||
// indicates the semantic detail of whether the user wrote "optional" or not.
|
||||
// This can be useful for round-tripping the .proto file. For consistency we
|
||||
// give message fields a synthetic oneof also, even though it is not required
|
||||
// to track presence. This is especially important because the parser can't
|
||||
// tell if a field is a message or an enum, so it must always create a
|
||||
// synthetic oneof.
|
||||
//
|
||||
// Proto2 optional fields do not set this flag, because they already indicate
|
||||
// optional with `LABEL_OPTIONAL`.
|
||||
optional bool proto3_optional = 17;
|
||||
}
|
||||
|
||||
// Describes a oneof.
|
||||
message OneofDescriptorProto {
|
||||
optional string name = 1;
|
||||
optional OneofOptions options = 2;
|
||||
}
|
||||
|
||||
// Describes an enum type.
|
||||
message EnumDescriptorProto {
|
||||
optional string name = 1;
|
||||
|
||||
repeated EnumValueDescriptorProto value = 2;
|
||||
|
||||
optional EnumOptions options = 3;
|
||||
|
||||
// Range of reserved numeric values. Reserved values may not be used by
|
||||
// entries in the same enum. Reserved ranges may not overlap.
|
||||
//
|
||||
// Note that this is distinct from DescriptorProto.ReservedRange in that it
|
||||
// is inclusive such that it can appropriately represent the entire int32
|
||||
// domain.
|
||||
message EnumReservedRange {
|
||||
optional int32 start = 1; // Inclusive.
|
||||
optional int32 end = 2; // Inclusive.
|
||||
}
|
||||
|
||||
// Range of reserved numeric values. Reserved numeric values may not be used
|
||||
// by enum values in the same enum declaration. Reserved ranges may not
|
||||
// overlap.
|
||||
repeated EnumReservedRange reserved_range = 4;
|
||||
|
||||
// Reserved enum value names, which may not be reused. A given name may only
|
||||
// be reserved once.
|
||||
repeated string reserved_name = 5;
|
||||
}
|
||||
|
||||
// Describes a value within an enum.
|
||||
message EnumValueDescriptorProto {
|
||||
optional string name = 1;
|
||||
optional int32 number = 2;
|
||||
|
||||
optional EnumValueOptions options = 3;
|
||||
}
|
||||
|
||||
// Describes a service.
|
||||
message ServiceDescriptorProto {
|
||||
optional string name = 1;
|
||||
repeated MethodDescriptorProto method = 2;
|
||||
|
||||
optional ServiceOptions options = 3;
|
||||
}
|
||||
|
||||
// Describes a method of a service.
|
||||
message MethodDescriptorProto {
|
||||
optional string name = 1;
|
||||
|
||||
// Input and output type names. These are resolved in the same way as
|
||||
// FieldDescriptorProto.type_name, but must refer to a message type.
|
||||
optional string input_type = 2;
|
||||
optional string output_type = 3;
|
||||
|
||||
optional MethodOptions options = 4;
|
||||
|
||||
// Identifies if client streams multiple client messages
|
||||
optional bool client_streaming = 5 [default = false];
|
||||
// Identifies if server streams multiple server messages
|
||||
optional bool server_streaming = 6 [default = false];
|
||||
}
|
||||
|
||||
|
||||
// ===================================================================
|
||||
// Options
|
||||
|
||||
// Each of the definitions above may have "options" attached. These are
|
||||
// just annotations which may cause code to be generated slightly differently
|
||||
// or may contain hints for code that manipulates protocol messages.
|
||||
//
|
||||
// Clients may define custom options as extensions of the *Options messages.
|
||||
// These extensions may not yet be known at parsing time, so the parser cannot
|
||||
// store the values in them. Instead it stores them in a field in the *Options
|
||||
// message called uninterpreted_option. This field must have the same name
|
||||
// across all *Options messages. We then use this field to populate the
|
||||
// extensions when we build a descriptor, at which point all protos have been
|
||||
// parsed and so all extensions are known.
|
||||
//
|
||||
// Extension numbers for custom options may be chosen as follows:
|
||||
// * For options which will only be used within a single application or
|
||||
// organization, or for experimental options, use field numbers 50000
|
||||
// through 99999. It is up to you to ensure that you do not use the
|
||||
// same number for multiple options.
|
||||
// * For options which will be published and used publicly by multiple
|
||||
// independent entities, e-mail protobuf-global-extension-registry@google.com
|
||||
// to reserve extension numbers. Simply provide your project name (e.g.
|
||||
// Objective-C plugin) and your project website (if available) -- there's no
|
||||
// need to explain how you intend to use them. Usually you only need one
|
||||
// extension number. You can declare multiple options with only one extension
|
||||
// number by putting them in a sub-message. See the Custom Options section of
|
||||
// the docs for examples:
|
||||
// https://developers.google.com/protocol-buffers/docs/proto#options
|
||||
// If this turns out to be popular, a web service will be set up
|
||||
// to automatically assign option numbers.
|
||||
|
||||
message FileOptions {
|
||||
|
||||
// Sets the Java package where classes generated from this .proto will be
|
||||
// placed. By default, the proto package is used, but this is often
|
||||
// inappropriate because proto packages do not normally start with backwards
|
||||
// domain names.
|
||||
optional string java_package = 1;
|
||||
|
||||
|
||||
// If set, all the classes from the .proto file are wrapped in a single
|
||||
// outer class with the given name. This applies to both Proto1
|
||||
// (equivalent to the old "--one_java_file" option) and Proto2 (where
|
||||
// a .proto always translates to a single class, but you may want to
|
||||
// explicitly choose the class name).
|
||||
optional string java_outer_classname = 8;
|
||||
|
||||
// If set true, then the Java code generator will generate a separate .java
|
||||
// file for each top-level message, enum, and service defined in the .proto
|
||||
// file. Thus, these types will *not* be nested inside the outer class
|
||||
// named by java_outer_classname. However, the outer class will still be
|
||||
// generated to contain the file's getDescriptor() method as well as any
|
||||
// top-level extensions defined in the file.
|
||||
optional bool java_multiple_files = 10 [default = false];
|
||||
|
||||
// This option does nothing.
|
||||
optional bool java_generate_equals_and_hash = 20 [deprecated=true];
|
||||
|
||||
// If set true, then the Java2 code generator will generate code that
|
||||
// throws an exception whenever an attempt is made to assign a non-UTF-8
|
||||
// byte sequence to a string field.
|
||||
// Message reflection will do the same.
|
||||
// However, an extension field still accepts non-UTF-8 byte sequences.
|
||||
// This option has no effect on when used with the lite runtime.
|
||||
optional bool java_string_check_utf8 = 27 [default = false];
|
||||
|
||||
|
||||
// Generated classes can be optimized for speed or code size.
|
||||
enum OptimizeMode {
|
||||
SPEED = 1; // Generate complete code for parsing, serialization,
|
||||
// etc.
|
||||
CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
|
||||
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
|
||||
}
|
||||
optional OptimizeMode optimize_for = 9 [default = SPEED];
|
||||
|
||||
// Sets the Go package where structs generated from this .proto will be
|
||||
// placed. If omitted, the Go package will be derived from the following:
|
||||
// - The basename of the package import path, if provided.
|
||||
// - Otherwise, the package statement in the .proto file, if present.
|
||||
// - Otherwise, the basename of the .proto file, without extension.
|
||||
optional string go_package = 11;
|
||||
|
||||
|
||||
|
||||
|
||||
// Should generic services be generated in each language? "Generic" services
|
||||
// are not specific to any particular RPC system. They are generated by the
|
||||
// main code generators in each language (without additional plugins).
|
||||
// Generic services were the only kind of service generation supported by
|
||||
// early versions of google.protobuf.
|
||||
//
|
||||
// Generic services are now considered deprecated in favor of using plugins
|
||||
// that generate code specific to your particular RPC system. Therefore,
|
||||
// these default to false. Old code which depends on generic services should
|
||||
// explicitly set them to true.
|
||||
optional bool cc_generic_services = 16 [default = false];
|
||||
optional bool java_generic_services = 17 [default = false];
|
||||
optional bool py_generic_services = 18 [default = false];
|
||||
optional bool php_generic_services = 42 [default = false];
|
||||
|
||||
// Is this file deprecated?
|
||||
// Depending on the target platform, this can emit Deprecated annotations
|
||||
// for everything in the file, or it will be completely ignored; in the very
|
||||
// least, this is a formalization for deprecating files.
|
||||
optional bool deprecated = 23 [default = false];
|
||||
|
||||
// Enables the use of arenas for the proto messages in this file. This applies
|
||||
// only to generated classes for C++.
|
||||
optional bool cc_enable_arenas = 31 [default = true];
|
||||
|
||||
|
||||
// Sets the objective c class prefix which is prepended to all objective c
|
||||
// generated classes from this .proto. There is no default.
|
||||
optional string objc_class_prefix = 36;
|
||||
|
||||
// Namespace for generated classes; defaults to the package.
|
||||
optional string csharp_namespace = 37;
|
||||
|
||||
// By default Swift generators will take the proto package and CamelCase it
|
||||
// replacing '.' with underscore and use that to prefix the types/symbols
|
||||
// defined. When this options is provided, they will use this value instead
|
||||
// to prefix the types/symbols defined.
|
||||
optional string swift_prefix = 39;
|
||||
|
||||
// Sets the php class prefix which is prepended to all php generated classes
|
||||
// from this .proto. Default is empty.
|
||||
optional string php_class_prefix = 40;
|
||||
|
||||
// Use this option to change the namespace of php generated classes. Default
|
||||
// is empty. When this option is empty, the package name will be used for
|
||||
// determining the namespace.
|
||||
optional string php_namespace = 41;
|
||||
|
||||
// Use this option to change the namespace of php generated metadata classes.
|
||||
// Default is empty. When this option is empty, the proto file name will be
|
||||
// used for determining the namespace.
|
||||
optional string php_metadata_namespace = 44;
|
||||
|
||||
// Use this option to change the package of ruby generated classes. Default
|
||||
// is empty. When this option is not set, the package name will be used for
|
||||
// determining the ruby package.
|
||||
optional string ruby_package = 45;
|
||||
|
||||
|
||||
// The parser stores options it doesn't recognize here.
|
||||
// See the documentation for the "Options" section above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
// Clients can define custom options in extensions of this message.
|
||||
// See the documentation for the "Options" section above.
|
||||
extensions 1000 to max;
|
||||
|
||||
reserved 38;
|
||||
}
|
||||
|
||||
message MessageOptions {
|
||||
// Set true to use the old proto1 MessageSet wire format for extensions.
|
||||
// This is provided for backwards-compatibility with the MessageSet wire
|
||||
// format. You should not use this for any other reason: It's less
|
||||
// efficient, has fewer features, and is more complicated.
|
||||
//
|
||||
// The message must be defined exactly as follows:
|
||||
// message Foo {
|
||||
// option message_set_wire_format = true;
|
||||
// extensions 4 to max;
|
||||
// }
|
||||
// Note that the message cannot have any defined fields; MessageSets only
|
||||
// have extensions.
|
||||
//
|
||||
// All extensions of your type must be singular messages; e.g. they cannot
|
||||
// be int32s, enums, or repeated messages.
|
||||
//
|
||||
// Because this is an option, the above two restrictions are not enforced by
|
||||
// the protocol compiler.
|
||||
optional bool message_set_wire_format = 1 [default = false];
|
||||
|
||||
// Disables the generation of the standard "descriptor()" accessor, which can
|
||||
// conflict with a field of the same name. This is meant to make migration
|
||||
// from proto1 easier; new code should avoid fields named "descriptor".
|
||||
optional bool no_standard_descriptor_accessor = 2 [default = false];
|
||||
|
||||
// Is this message deprecated?
|
||||
// Depending on the target platform, this can emit Deprecated annotations
|
||||
// for the message, or it will be completely ignored; in the very least,
|
||||
// this is a formalization for deprecating messages.
|
||||
optional bool deprecated = 3 [default = false];
|
||||
|
||||
// Whether the message is an automatically generated map entry type for the
|
||||
// maps field.
|
||||
//
|
||||
// For maps fields:
|
||||
// map<KeyType, ValueType> map_field = 1;
|
||||
// The parsed descriptor looks like:
|
||||
// message MapFieldEntry {
|
||||
// option map_entry = true;
|
||||
// optional KeyType key = 1;
|
||||
// optional ValueType value = 2;
|
||||
// }
|
||||
// repeated MapFieldEntry map_field = 1;
|
||||
//
|
||||
// Implementations may choose not to generate the map_entry=true message, but
|
||||
// use a native map in the target language to hold the keys and values.
|
||||
// The reflection APIs in such implementations still need to work as
|
||||
// if the field is a repeated message field.
|
||||
//
|
||||
// NOTE: Do not set the option in .proto files. Always use the maps syntax
|
||||
// instead. The option should only be implicitly set by the proto compiler
|
||||
// parser.
|
||||
optional bool map_entry = 7;
|
||||
|
||||
reserved 8; // javalite_serializable
|
||||
reserved 9; // javanano_as_lite
|
||||
|
||||
|
||||
// The parser stores options it doesn't recognize here. See above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
// Clients can define custom options in extensions of this message. See above.
|
||||
extensions 1000 to max;
|
||||
}
|
||||
|
||||
message FieldOptions {
|
||||
// The ctype option instructs the C++ code generator to use a different
|
||||
// representation of the field than it normally would. See the specific
|
||||
// options below. This option is not yet implemented in the open source
|
||||
// release -- sorry, we'll try to include it in a future version!
|
||||
optional CType ctype = 1 [default = STRING];
|
||||
enum CType {
|
||||
// Default mode.
|
||||
STRING = 0;
|
||||
|
||||
CORD = 1;
|
||||
|
||||
STRING_PIECE = 2;
|
||||
}
|
||||
// The packed option can be enabled for repeated primitive fields to enable
|
||||
// a more efficient representation on the wire. Rather than repeatedly
|
||||
// writing the tag and type for each element, the entire array is encoded as
|
||||
// a single length-delimited blob. In proto3, only explicit setting it to
|
||||
// false will avoid using packed encoding.
|
||||
optional bool packed = 2;
|
||||
|
||||
// The jstype option determines the JavaScript type used for values of the
|
||||
// field. The option is permitted only for 64 bit integral and fixed types
|
||||
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
|
||||
// is represented as JavaScript string, which avoids loss of precision that
|
||||
// can happen when a large value is converted to a floating point JavaScript.
|
||||
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
|
||||
// use the JavaScript "number" type. The behavior of the default option
|
||||
// JS_NORMAL is implementation dependent.
|
||||
//
|
||||
// This option is an enum to permit additional types to be added, e.g.
|
||||
// goog.math.Integer.
|
||||
optional JSType jstype = 6 [default = JS_NORMAL];
|
||||
enum JSType {
|
||||
// Use the default type.
|
||||
JS_NORMAL = 0;
|
||||
|
||||
// Use JavaScript strings.
|
||||
JS_STRING = 1;
|
||||
|
||||
// Use JavaScript numbers.
|
||||
JS_NUMBER = 2;
|
||||
}
|
||||
|
||||
// Should this field be parsed lazily? Lazy applies only to message-type
|
||||
// fields. It means that when the outer message is initially parsed, the
|
||||
// inner message's contents will not be parsed but instead stored in encoded
|
||||
// form. The inner message will actually be parsed when it is first accessed.
|
||||
//
|
||||
// This is only a hint. Implementations are free to choose whether to use
|
||||
// eager or lazy parsing regardless of the value of this option. However,
|
||||
// setting this option true suggests that the protocol author believes that
|
||||
// using lazy parsing on this field is worth the additional bookkeeping
|
||||
// overhead typically needed to implement it.
|
||||
//
|
||||
// This option does not affect the public interface of any generated code;
|
||||
// all method signatures remain the same. Furthermore, thread-safety of the
|
||||
// interface is not affected by this option; const methods remain safe to
|
||||
// call from multiple threads concurrently, while non-const methods continue
|
||||
// to require exclusive access.
|
||||
//
|
||||
//
|
||||
// Note that implementations may choose not to check required fields within
|
||||
// a lazy sub-message. That is, calling IsInitialized() on the outer message
|
||||
// may return true even if the inner message has missing required fields.
|
||||
// This is necessary because otherwise the inner message would have to be
|
||||
// parsed in order to perform the check, defeating the purpose of lazy
|
||||
// parsing. An implementation which chooses not to check required fields
|
||||
// must be consistent about it. That is, for any particular sub-message, the
|
||||
// implementation must either *always* check its required fields, or *never*
|
||||
// check its required fields, regardless of whether or not the message has
|
||||
// been parsed.
|
||||
optional bool lazy = 5 [default = false];
|
||||
|
||||
// Is this field deprecated?
|
||||
// Depending on the target platform, this can emit Deprecated annotations
|
||||
// for accessors, or it will be completely ignored; in the very least, this
|
||||
// is a formalization for deprecating fields.
|
||||
optional bool deprecated = 3 [default = false];
|
||||
|
||||
// For Google-internal migration only. Do not use.
|
||||
optional bool weak = 10 [default = false];
|
||||
|
||||
|
||||
// The parser stores options it doesn't recognize here. See above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
// Clients can define custom options in extensions of this message. See above.
|
||||
extensions 1000 to max;
|
||||
|
||||
reserved 4; // removed jtype
|
||||
}
|
||||
|
||||
message OneofOptions {
|
||||
// The parser stores options it doesn't recognize here. See above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
// Clients can define custom options in extensions of this message. See above.
|
||||
extensions 1000 to max;
|
||||
}
|
||||
|
||||
message EnumOptions {
|
||||
|
||||
// Set this option to true to allow mapping different tag names to the same
|
||||
// value.
|
||||
optional bool allow_alias = 2;
|
||||
|
||||
// Is this enum deprecated?
|
||||
// Depending on the target platform, this can emit Deprecated annotations
|
||||
// for the enum, or it will be completely ignored; in the very least, this
|
||||
// is a formalization for deprecating enums.
|
||||
optional bool deprecated = 3 [default = false];
|
||||
|
||||
reserved 5; // javanano_as_lite
|
||||
|
||||
// The parser stores options it doesn't recognize here. See above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
// Clients can define custom options in extensions of this message. See above.
|
||||
extensions 1000 to max;
|
||||
}
|
||||
|
||||
message EnumValueOptions {
|
||||
// Is this enum value deprecated?
|
||||
// Depending on the target platform, this can emit Deprecated annotations
|
||||
// for the enum value, or it will be completely ignored; in the very least,
|
||||
// this is a formalization for deprecating enum values.
|
||||
optional bool deprecated = 1 [default = false];
|
||||
|
||||
// The parser stores options it doesn't recognize here. See above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
// Clients can define custom options in extensions of this message. See above.
|
||||
extensions 1000 to max;
|
||||
}
|
||||
|
||||
message ServiceOptions {
|
||||
|
||||
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
|
||||
// framework. We apologize for hoarding these numbers to ourselves, but
|
||||
// we were already using them long before we decided to release Protocol
|
||||
// Buffers.
|
||||
|
||||
// Is this service deprecated?
|
||||
// Depending on the target platform, this can emit Deprecated annotations
|
||||
// for the service, or it will be completely ignored; in the very least,
|
||||
// this is a formalization for deprecating services.
|
||||
optional bool deprecated = 33 [default = false];
|
||||
|
||||
// The parser stores options it doesn't recognize here. See above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
// Clients can define custom options in extensions of this message. See above.
|
||||
extensions 1000 to max;
|
||||
}
|
||||
|
||||
message MethodOptions {
|
||||
|
||||
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
|
||||
// framework. We apologize for hoarding these numbers to ourselves, but
|
||||
// we were already using them long before we decided to release Protocol
|
||||
// Buffers.
|
||||
|
||||
// Is this method deprecated?
|
||||
// Depending on the target platform, this can emit Deprecated annotations
|
||||
// for the method, or it will be completely ignored; in the very least,
|
||||
// this is a formalization for deprecating methods.
|
||||
optional bool deprecated = 33 [default = false];
|
||||
|
||||
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
|
||||
// or neither? HTTP based RPC implementation may choose GET verb for safe
|
||||
// methods, and PUT verb for idempotent methods instead of the default POST.
|
||||
enum IdempotencyLevel {
|
||||
IDEMPOTENCY_UNKNOWN = 0;
|
||||
NO_SIDE_EFFECTS = 1; // implies idempotent
|
||||
IDEMPOTENT = 2; // idempotent, but may have side effects
|
||||
}
|
||||
optional IdempotencyLevel idempotency_level = 34
|
||||
[default = IDEMPOTENCY_UNKNOWN];
|
||||
|
||||
// The parser stores options it doesn't recognize here. See above.
|
||||
repeated UninterpretedOption uninterpreted_option = 999;
|
||||
|
||||
// Clients can define custom options in extensions of this message. See above.
|
||||
extensions 1000 to max;
|
||||
}
|
||||
|
||||
|
||||
// A message representing a option the parser does not recognize. This only
|
||||
// appears in options protos created by the compiler::Parser class.
|
||||
// DescriptorPool resolves these when building Descriptor objects. Therefore,
|
||||
// options protos in descriptor objects (e.g. returned by Descriptor::options(),
|
||||
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
|
||||
// in them.
|
||||
message UninterpretedOption {
|
||||
// The name of the uninterpreted option. Each string represents a segment in
|
||||
// a dot-separated name. is_extension is true iff a segment represents an
|
||||
// extension (denoted with parentheses in options specs in .proto files).
|
||||
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
|
||||
// "foo.(bar.baz).qux".
|
||||
message NamePart {
|
||||
required string name_part = 1;
|
||||
required bool is_extension = 2;
|
||||
}
|
||||
repeated NamePart name = 2;
|
||||
|
||||
// The value of the uninterpreted option, in whatever type the tokenizer
|
||||
// identified it as during parsing. Exactly one of these should be set.
|
||||
optional string identifier_value = 3;
|
||||
optional uint64 positive_int_value = 4;
|
||||
optional int64 negative_int_value = 5;
|
||||
optional double double_value = 6;
|
||||
optional bytes string_value = 7;
|
||||
optional string aggregate_value = 8;
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
// Optional source code info
|
||||
|
||||
// Encapsulates information about the original source file from which a
|
||||
// FileDescriptorProto was generated.
|
||||
message SourceCodeInfo {
|
||||
// A Location identifies a piece of source code in a .proto file which
|
||||
// corresponds to a particular definition. This information is intended
|
||||
// to be useful to IDEs, code indexers, documentation generators, and similar
|
||||
// tools.
|
||||
//
|
||||
// For example, say we have a file like:
|
||||
// message Foo {
|
||||
// optional string foo = 1;
|
||||
// }
|
||||
// Let's look at just the field definition:
|
||||
// optional string foo = 1;
|
||||
// ^ ^^ ^^ ^ ^^^
|
||||
// a bc de f ghi
|
||||
// We have the following locations:
|
||||
// span path represents
|
||||
// [a,i) [ 4, 0, 2, 0 ] The whole field definition.
|
||||
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
|
||||
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
|
||||
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
|
||||
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
|
||||
//
|
||||
// Notes:
|
||||
// - A location may refer to a repeated field itself (i.e. not to any
|
||||
// particular index within it). This is used whenever a set of elements are
|
||||
// logically enclosed in a single code segment. For example, an entire
|
||||
// extend block (possibly containing multiple extension definitions) will
|
||||
// have an outer location whose path refers to the "extensions" repeated
|
||||
// field without an index.
|
||||
// - Multiple locations may have the same path. This happens when a single
|
||||
// logical declaration is spread out across multiple places. The most
|
||||
// obvious example is the "extend" block again -- there may be multiple
|
||||
// extend blocks in the same scope, each of which will have the same path.
|
||||
// - A location's span is not always a subset of its parent's span. For
|
||||
// example, the "extendee" of an extension declaration appears at the
|
||||
// beginning of the "extend" block and is shared by all extensions within
|
||||
// the block.
|
||||
// - Just because a location's span is a subset of some other location's span
|
||||
// does not mean that it is a descendant. For example, a "group" defines
|
||||
// both a type and a field in a single declaration. Thus, the locations
|
||||
// corresponding to the type and field and their components will overlap.
|
||||
// - Code which tries to interpret locations should probably be designed to
|
||||
// ignore those that it doesn't understand, as more types of locations could
|
||||
// be recorded in the future.
|
||||
repeated Location location = 1;
|
||||
message Location {
|
||||
// Identifies which part of the FileDescriptorProto was defined at this
|
||||
// location.
|
||||
//
|
||||
// Each element is a field number or an index. They form a path from
|
||||
// the root FileDescriptorProto to the place where the definition. For
|
||||
// example, this path:
|
||||
// [ 4, 3, 2, 7, 1 ]
|
||||
// refers to:
|
||||
// file.message_type(3) // 4, 3
|
||||
// .field(7) // 2, 7
|
||||
// .name() // 1
|
||||
// This is because FileDescriptorProto.message_type has field number 4:
|
||||
// repeated DescriptorProto message_type = 4;
|
||||
// and DescriptorProto.field has field number 2:
|
||||
// repeated FieldDescriptorProto field = 2;
|
||||
// and FieldDescriptorProto.name has field number 1:
|
||||
// optional string name = 1;
|
||||
//
|
||||
// Thus, the above path gives the location of a field name. If we removed
|
||||
// the last element:
|
||||
// [ 4, 3, 2, 7 ]
|
||||
// this path refers to the whole field declaration (from the beginning
|
||||
// of the label to the terminating semicolon).
|
||||
repeated int32 path = 1 [packed = true];
|
||||
|
||||
// Always has exactly three or four elements: start line, start column,
|
||||
// end line (optional, otherwise assumed same as start line), end column.
|
||||
// These are packed into a single field for efficiency. Note that line
|
||||
// and column numbers are zero-based -- typically you will want to add
|
||||
// 1 to each before displaying to a user.
|
||||
repeated int32 span = 2 [packed = true];
|
||||
|
||||
// If this SourceCodeInfo represents a complete declaration, these are any
|
||||
// comments appearing before and after the declaration which appear to be
|
||||
// attached to the declaration.
|
||||
//
|
||||
// A series of line comments appearing on consecutive lines, with no other
|
||||
// tokens appearing on those lines, will be treated as a single comment.
|
||||
//
|
||||
// leading_detached_comments will keep paragraphs of comments that appear
|
||||
// before (but not connected to) the current element. Each paragraph,
|
||||
// separated by empty lines, will be one comment element in the repeated
|
||||
// field.
|
||||
//
|
||||
// Only the comment content is provided; comment markers (e.g. //) are
|
||||
// stripped out. For block comments, leading whitespace and an asterisk
|
||||
// will be stripped from the beginning of each line other than the first.
|
||||
// Newlines are included in the output.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// optional int32 foo = 1; // Comment attached to foo.
|
||||
// // Comment attached to bar.
|
||||
// optional int32 bar = 2;
|
||||
//
|
||||
// optional string baz = 3;
|
||||
// // Comment attached to baz.
|
||||
// // Another line attached to baz.
|
||||
//
|
||||
// // Comment attached to qux.
|
||||
// //
|
||||
// // Another line attached to qux.
|
||||
// optional double qux = 4;
|
||||
//
|
||||
// // Detached comment for corge. This is not leading or trailing comments
|
||||
// // to qux or corge because there are blank lines separating it from
|
||||
// // both.
|
||||
//
|
||||
// // Detached comment for corge paragraph 2.
|
||||
//
|
||||
// optional string corge = 5;
|
||||
// /* Block comment attached
|
||||
// * to corge. Leading asterisks
|
||||
// * will be removed. */
|
||||
// /* Block comment attached to
|
||||
// * grault. */
|
||||
// optional int32 grault = 6;
|
||||
//
|
||||
// // ignored detached comments.
|
||||
optional string leading_comments = 3;
|
||||
optional string trailing_comments = 4;
|
||||
repeated string leading_detached_comments = 6;
|
||||
}
|
||||
}
|
||||
|
||||
// Describes the relationship between generated code and its original source
|
||||
// file. A GeneratedCodeInfo message is associated with only one generated
|
||||
// source file, but may contain references to different source .proto files.
|
||||
message GeneratedCodeInfo {
|
||||
// An Annotation connects some span of text in generated code to an element
|
||||
// of its generating .proto file.
|
||||
repeated Annotation annotation = 1;
|
||||
message Annotation {
|
||||
// Identifies the element in the original source .proto file. This field
|
||||
// is formatted the same as SourceCodeInfo.Location.path.
|
||||
repeated int32 path = 1 [packed = true];
|
||||
|
||||
// Identifies the filesystem path to the original source .proto.
|
||||
optional string source_file = 2;
|
||||
|
||||
// Identifies the starting offset in bytes in the generated code
|
||||
// that relates to the identified object.
|
||||
optional int32 begin = 3;
|
||||
|
||||
// Identifies the ending offset in bytes in the generated code that
|
||||
// relates to the identified offset. The end offset should be one past
|
||||
// the last relevant byte (so the length of the text = end - begin).
|
||||
optional int32 end = 4;
|
||||
}
|
||||
}
|
20
tests/inputs/example_service/example_service.proto
Normal file
20
tests/inputs/example_service/example_service.proto
Normal file
|
@ -0,0 +1,20 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package example_service;
|
||||
|
||||
service Test {
|
||||
rpc ExampleUnaryUnary(ExampleRequest) returns (ExampleResponse);
|
||||
rpc ExampleUnaryStream(ExampleRequest) returns (stream ExampleResponse);
|
||||
rpc ExampleStreamUnary(stream ExampleRequest) returns (ExampleResponse);
|
||||
rpc ExampleStreamStream(stream ExampleRequest) returns (stream ExampleResponse);
|
||||
}
|
||||
|
||||
message ExampleRequest {
|
||||
string example_string = 1;
|
||||
int64 example_integer = 2;
|
||||
}
|
||||
|
||||
message ExampleResponse {
|
||||
string example_string = 1;
|
||||
int64 example_integer = 2;
|
||||
}
|
86
tests/inputs/example_service/test_example_service.py
Normal file
86
tests/inputs/example_service/test_example_service.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
from typing import (
|
||||
AsyncIterable,
|
||||
AsyncIterator,
|
||||
)
|
||||
|
||||
import pytest
|
||||
from grpclib.testing import ChannelFor
|
||||
|
||||
from tests.output_aristaproto.example_service import (
|
||||
ExampleRequest,
|
||||
ExampleResponse,
|
||||
TestBase,
|
||||
TestStub,
|
||||
)
|
||||
|
||||
|
||||
class ExampleService(TestBase):
|
||||
async def example_unary_unary(
|
||||
self, example_request: ExampleRequest
|
||||
) -> "ExampleResponse":
|
||||
return ExampleResponse(
|
||||
example_string=example_request.example_string,
|
||||
example_integer=example_request.example_integer,
|
||||
)
|
||||
|
||||
async def example_unary_stream(
|
||||
self, example_request: ExampleRequest
|
||||
) -> AsyncIterator["ExampleResponse"]:
|
||||
response = ExampleResponse(
|
||||
example_string=example_request.example_string,
|
||||
example_integer=example_request.example_integer,
|
||||
)
|
||||
yield response
|
||||
yield response
|
||||
yield response
|
||||
|
||||
async def example_stream_unary(
|
||||
self, example_request_iterator: AsyncIterator["ExampleRequest"]
|
||||
) -> "ExampleResponse":
|
||||
async for example_request in example_request_iterator:
|
||||
return ExampleResponse(
|
||||
example_string=example_request.example_string,
|
||||
example_integer=example_request.example_integer,
|
||||
)
|
||||
|
||||
async def example_stream_stream(
|
||||
self, example_request_iterator: AsyncIterator["ExampleRequest"]
|
||||
) -> AsyncIterator["ExampleResponse"]:
|
||||
async for example_request in example_request_iterator:
|
||||
yield ExampleResponse(
|
||||
example_string=example_request.example_string,
|
||||
example_integer=example_request.example_integer,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calls_with_different_cardinalities():
|
||||
example_request = ExampleRequest("test string", 42)
|
||||
|
||||
async with ChannelFor([ExampleService()]) as channel:
|
||||
stub = TestStub(channel)
|
||||
|
||||
# unary unary
|
||||
response = await stub.example_unary_unary(example_request)
|
||||
assert response.example_string == example_request.example_string
|
||||
assert response.example_integer == example_request.example_integer
|
||||
|
||||
# unary stream
|
||||
async for response in stub.example_unary_stream(example_request):
|
||||
assert response.example_string == example_request.example_string
|
||||
assert response.example_integer == example_request.example_integer
|
||||
|
||||
# stream unary
|
||||
async def request_iterator():
|
||||
yield example_request
|
||||
yield example_request
|
||||
yield example_request
|
||||
|
||||
response = await stub.example_stream_unary(request_iterator())
|
||||
assert response.example_string == example_request.example_string
|
||||
assert response.example_integer == example_request.example_integer
|
||||
|
||||
# stream stream
|
||||
async for response in stub.example_stream_stream(request_iterator()):
|
||||
assert response.example_string == example_request.example_string
|
||||
assert response.example_integer == example_request.example_integer
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"int": 26,
|
||||
"float": 26.0,
|
||||
"str": "value-for-str",
|
||||
"bytes": "001a",
|
||||
"bool": true
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package field_name_identical_to_type;
|
||||
|
||||
// Tests that messages may contain fields with names that are identical to their python types (PR #294)
|
||||
|
||||
message Test {
|
||||
int32 int = 1;
|
||||
float float = 2;
|
||||
string str = 3;
|
||||
bytes bytes = 4;
|
||||
bool bool = 5;
|
||||
}
|
6
tests/inputs/fixed/fixed.json
Normal file
6
tests/inputs/fixed/fixed.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"foo": 4294967295,
|
||||
"bar": -2147483648,
|
||||
"baz": "18446744073709551615",
|
||||
"qux": "-9223372036854775808"
|
||||
}
|
10
tests/inputs/fixed/fixed.proto
Normal file
10
tests/inputs/fixed/fixed.proto
Normal file
|
@ -0,0 +1,10 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package fixed;
|
||||
|
||||
message Test {
|
||||
fixed32 foo = 1;
|
||||
sfixed32 bar = 2;
|
||||
fixed64 baz = 3;
|
||||
sfixed64 qux = 4;
|
||||
}
|
9
tests/inputs/float/float.json
Normal file
9
tests/inputs/float/float.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"positive": "Infinity",
|
||||
"negative": "-Infinity",
|
||||
"nan": "NaN",
|
||||
"three": 3.0,
|
||||
"threePointOneFour": 3.14,
|
||||
"negThree": -3.0,
|
||||
"negThreePointOneFour": -3.14
|
||||
}
|
14
tests/inputs/float/float.proto
Normal file
14
tests/inputs/float/float.proto
Normal file
|
@ -0,0 +1,14 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package float;
|
||||
|
||||
// Some documentation about the Test message.
|
||||
message Test {
|
||||
double positive = 1;
|
||||
double negative = 2;
|
||||
double nan = 3;
|
||||
double three = 4;
|
||||
double three_point_one_four = 5;
|
||||
double neg_three = 6;
|
||||
double neg_three_point_one_four = 7;
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
syntax = "proto3";
|
||||
|
||||
import "google/protobuf/timestamp.proto";
|
||||
package google_impl_behavior_equivalence;
|
||||
|
||||
message Foo { int64 bar = 1; }
|
||||
|
||||
message Test {
|
||||
oneof group {
|
||||
string string = 1;
|
||||
int64 integer = 2;
|
||||
Foo foo = 3;
|
||||
}
|
||||
}
|
||||
|
||||
message Spam {
|
||||
google.protobuf.Timestamp ts = 1;
|
||||
}
|
||||
|
||||
message Request { Empty foo = 1; }
|
||||
|
||||
message Empty {}
|
|
@ -0,0 +1,93 @@
|
|||
from datetime import (
|
||||
datetime,
|
||||
timezone,
|
||||
)
|
||||
|
||||
import pytest
|
||||
from google.protobuf import json_format
|
||||
from google.protobuf.timestamp_pb2 import Timestamp
|
||||
|
||||
import aristaproto
|
||||
from tests.output_aristaproto.google_impl_behavior_equivalence import (
|
||||
Empty,
|
||||
Foo,
|
||||
Request,
|
||||
Spam,
|
||||
Test,
|
||||
)
|
||||
from tests.output_reference.google_impl_behavior_equivalence.google_impl_behavior_equivalence_pb2 import (
|
||||
Empty as ReferenceEmpty,
|
||||
Foo as ReferenceFoo,
|
||||
Request as ReferenceRequest,
|
||||
Spam as ReferenceSpam,
|
||||
Test as ReferenceTest,
|
||||
)
|
||||
|
||||
|
||||
def test_oneof_serializes_similar_to_google_oneof():
|
||||
tests = [
|
||||
(Test(string="abc"), ReferenceTest(string="abc")),
|
||||
(Test(integer=2), ReferenceTest(integer=2)),
|
||||
(Test(foo=Foo(bar=1)), ReferenceTest(foo=ReferenceFoo(bar=1))),
|
||||
# Default values should also behave the same within oneofs
|
||||
(Test(string=""), ReferenceTest(string="")),
|
||||
(Test(integer=0), ReferenceTest(integer=0)),
|
||||
(Test(foo=Foo(bar=0)), ReferenceTest(foo=ReferenceFoo(bar=0))),
|
||||
]
|
||||
for message, message_reference in tests:
|
||||
# NOTE: As of July 2020, MessageToJson inserts newlines in the output string so,
|
||||
# just compare dicts
|
||||
assert message.to_dict() == json_format.MessageToDict(message_reference)
|
||||
|
||||
|
||||
def test_bytes_are_the_same_for_oneof():
|
||||
message = Test(string="")
|
||||
message_reference = ReferenceTest(string="")
|
||||
|
||||
message_bytes = bytes(message)
|
||||
message_reference_bytes = message_reference.SerializeToString()
|
||||
|
||||
assert message_bytes == message_reference_bytes
|
||||
|
||||
message2 = Test().parse(message_reference_bytes)
|
||||
message_reference2 = ReferenceTest()
|
||||
message_reference2.ParseFromString(message_reference_bytes)
|
||||
|
||||
assert message == message2
|
||||
assert message_reference == message_reference2
|
||||
|
||||
# None of these fields were explicitly set BUT they should not actually be null
|
||||
# themselves
|
||||
assert not hasattr(message, "foo")
|
||||
assert object.__getattribute__(message, "foo") == aristaproto.PLACEHOLDER
|
||||
assert not hasattr(message2, "foo")
|
||||
assert object.__getattribute__(message2, "foo") == aristaproto.PLACEHOLDER
|
||||
|
||||
assert isinstance(message_reference.foo, ReferenceFoo)
|
||||
assert isinstance(message_reference2.foo, ReferenceFoo)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dt", (datetime.min.replace(tzinfo=timezone.utc),))
|
||||
def test_datetime_clamping(dt): # see #407
|
||||
ts = Timestamp()
|
||||
ts.FromDatetime(dt)
|
||||
assert bytes(Spam(dt)) == ReferenceSpam(ts=ts).SerializeToString()
|
||||
message_bytes = bytes(Spam(dt))
|
||||
|
||||
assert (
|
||||
Spam().parse(message_bytes).ts.timestamp()
|
||||
== ReferenceSpam.FromString(message_bytes).ts.seconds
|
||||
)
|
||||
|
||||
|
||||
def test_empty_message_field():
|
||||
message = Request()
|
||||
reference_message = ReferenceRequest()
|
||||
|
||||
message.foo = Empty()
|
||||
reference_message.foo.CopyFrom(ReferenceEmpty())
|
||||
|
||||
assert aristaproto.serialized_on_wire(message.foo)
|
||||
assert reference_message.HasField("foo")
|
||||
|
||||
assert bytes(message) == reference_message.SerializeToString()
|
1
tests/inputs/googletypes/googletypes-missing.json
Normal file
1
tests/inputs/googletypes/googletypes-missing.json
Normal file
|
@ -0,0 +1 @@
|
|||
{}
|
7
tests/inputs/googletypes/googletypes.json
Normal file
7
tests/inputs/googletypes/googletypes.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"maybe": false,
|
||||
"ts": "1972-01-01T10:00:20.021Z",
|
||||
"duration": "1.200s",
|
||||
"important": 10,
|
||||
"empty": {}
|
||||
}
|
16
tests/inputs/googletypes/googletypes.proto
Normal file
16
tests/inputs/googletypes/googletypes.proto
Normal file
|
@ -0,0 +1,16 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package googletypes;
|
||||
|
||||
import "google/protobuf/duration.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/protobuf/wrappers.proto";
|
||||
import "google/protobuf/empty.proto";
|
||||
|
||||
message Test {
|
||||
google.protobuf.BoolValue maybe = 1;
|
||||
google.protobuf.Timestamp ts = 2;
|
||||
google.protobuf.Duration duration = 3;
|
||||
google.protobuf.Int32Value important = 4;
|
||||
google.protobuf.Empty empty = 5;
|
||||
}
|
29
tests/inputs/googletypes_request/googletypes_request.proto
Normal file
29
tests/inputs/googletypes_request/googletypes_request.proto
Normal file
|
@ -0,0 +1,29 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package googletypes_request;
|
||||
|
||||
import "google/protobuf/duration.proto";
|
||||
import "google/protobuf/empty.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/protobuf/wrappers.proto";
|
||||
|
||||
// Tests that google types can be used as params
|
||||
|
||||
service Test {
|
||||
rpc SendDouble (google.protobuf.DoubleValue) returns (Input);
|
||||
rpc SendFloat (google.protobuf.FloatValue) returns (Input);
|
||||
rpc SendInt64 (google.protobuf.Int64Value) returns (Input);
|
||||
rpc SendUInt64 (google.protobuf.UInt64Value) returns (Input);
|
||||
rpc SendInt32 (google.protobuf.Int32Value) returns (Input);
|
||||
rpc SendUInt32 (google.protobuf.UInt32Value) returns (Input);
|
||||
rpc SendBool (google.protobuf.BoolValue) returns (Input);
|
||||
rpc SendString (google.protobuf.StringValue) returns (Input);
|
||||
rpc SendBytes (google.protobuf.BytesValue) returns (Input);
|
||||
rpc SendDatetime (google.protobuf.Timestamp) returns (Input);
|
||||
rpc SendTimedelta (google.protobuf.Duration) returns (Input);
|
||||
rpc SendEmpty (google.protobuf.Empty) returns (Input);
|
||||
}
|
||||
|
||||
message Input {
|
||||
|
||||
}
|
47
tests/inputs/googletypes_request/test_googletypes_request.py
Normal file
47
tests/inputs/googletypes_request/test_googletypes_request.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
from datetime import (
|
||||
datetime,
|
||||
timedelta,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
)
|
||||
|
||||
import pytest
|
||||
|
||||
import aristaproto.lib.google.protobuf as protobuf
|
||||
from tests.mocks import MockChannel
|
||||
from tests.output_aristaproto.googletypes_request import (
|
||||
Input,
|
||||
TestStub,
|
||||
)
|
||||
|
||||
|
||||
test_cases = [
|
||||
(TestStub.send_double, protobuf.DoubleValue, 2.5),
|
||||
(TestStub.send_float, protobuf.FloatValue, 2.5),
|
||||
(TestStub.send_int64, protobuf.Int64Value, -64),
|
||||
(TestStub.send_u_int64, protobuf.UInt64Value, 64),
|
||||
(TestStub.send_int32, protobuf.Int32Value, -32),
|
||||
(TestStub.send_u_int32, protobuf.UInt32Value, 32),
|
||||
(TestStub.send_bool, protobuf.BoolValue, True),
|
||||
(TestStub.send_string, protobuf.StringValue, "string"),
|
||||
(TestStub.send_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]),
|
||||
(TestStub.send_datetime, protobuf.Timestamp, datetime(2038, 1, 19, 3, 14, 8)),
|
||||
(TestStub.send_timedelta, protobuf.Duration, timedelta(seconds=123456)),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases)
|
||||
async def test_channel_receives_wrapped_type(
|
||||
service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value
|
||||
):
|
||||
wrapped_value = wrapper_class()
|
||||
wrapped_value.value = value
|
||||
channel = MockChannel(responses=[Input()])
|
||||
service = TestStub(channel)
|
||||
|
||||
await service_method(service, wrapped_value)
|
||||
|
||||
assert channel.requests[0]["request"] == type(wrapped_value)
|
23
tests/inputs/googletypes_response/googletypes_response.proto
Normal file
23
tests/inputs/googletypes_response/googletypes_response.proto
Normal file
|
@ -0,0 +1,23 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package googletypes_response;
|
||||
|
||||
import "google/protobuf/wrappers.proto";
|
||||
|
||||
// Tests that wrapped values can be used directly as return values
|
||||
|
||||
service Test {
|
||||
rpc GetDouble (Input) returns (google.protobuf.DoubleValue);
|
||||
rpc GetFloat (Input) returns (google.protobuf.FloatValue);
|
||||
rpc GetInt64 (Input) returns (google.protobuf.Int64Value);
|
||||
rpc GetUInt64 (Input) returns (google.protobuf.UInt64Value);
|
||||
rpc GetInt32 (Input) returns (google.protobuf.Int32Value);
|
||||
rpc GetUInt32 (Input) returns (google.protobuf.UInt32Value);
|
||||
rpc GetBool (Input) returns (google.protobuf.BoolValue);
|
||||
rpc GetString (Input) returns (google.protobuf.StringValue);
|
||||
rpc GetBytes (Input) returns (google.protobuf.BytesValue);
|
||||
}
|
||||
|
||||
message Input {
|
||||
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Optional,
|
||||
)
|
||||
|
||||
import pytest
|
||||
|
||||
import aristaproto.lib.google.protobuf as protobuf
|
||||
from tests.mocks import MockChannel
|
||||
from tests.output_aristaproto.googletypes_response import (
|
||||
Input,
|
||||
TestStub,
|
||||
)
|
||||
|
||||
|
||||
test_cases = [
|
||||
(TestStub.get_double, protobuf.DoubleValue, 2.5),
|
||||
(TestStub.get_float, protobuf.FloatValue, 2.5),
|
||||
(TestStub.get_int64, protobuf.Int64Value, -64),
|
||||
(TestStub.get_u_int64, protobuf.UInt64Value, 64),
|
||||
(TestStub.get_int32, protobuf.Int32Value, -32),
|
||||
(TestStub.get_u_int32, protobuf.UInt32Value, 32),
|
||||
(TestStub.get_bool, protobuf.BoolValue, True),
|
||||
(TestStub.get_string, protobuf.StringValue, "string"),
|
||||
(TestStub.get_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases)
|
||||
async def test_channel_receives_wrapped_type(
|
||||
service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value
|
||||
):
|
||||
wrapped_value = wrapper_class()
|
||||
wrapped_value.value = value
|
||||
channel = MockChannel(responses=[wrapped_value])
|
||||
service = TestStub(channel)
|
||||
method_param = Input()
|
||||
|
||||
await service_method(service, method_param)
|
||||
|
||||
assert channel.requests[0]["response_type"] != Optional[type(value)]
|
||||
assert channel.requests[0]["response_type"] == type(wrapped_value)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.xfail
|
||||
@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases)
|
||||
async def test_service_unwraps_response(
|
||||
service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value
|
||||
):
|
||||
"""
|
||||
grpclib does not unwrap wrapper values returned by services
|
||||
"""
|
||||
wrapped_value = wrapper_class()
|
||||
wrapped_value.value = value
|
||||
service = TestStub(MockChannel(responses=[wrapped_value]))
|
||||
method_param = Input()
|
||||
|
||||
response_value = await service_method(service, method_param)
|
||||
|
||||
assert response_value == value
|
||||
assert type(response_value) == type(value)
|
|
@ -0,0 +1,26 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package googletypes_response_embedded;
|
||||
|
||||
import "google/protobuf/wrappers.proto";
|
||||
|
||||
// Tests that wrapped values are supported as part of output message
|
||||
service Test {
|
||||
rpc getOutput (Input) returns (Output);
|
||||
}
|
||||
|
||||
message Input {
|
||||
|
||||
}
|
||||
|
||||
message Output {
|
||||
google.protobuf.DoubleValue double_value = 1;
|
||||
google.protobuf.FloatValue float_value = 2;
|
||||
google.protobuf.Int64Value int64_value = 3;
|
||||
google.protobuf.UInt64Value uint64_value = 4;
|
||||
google.protobuf.Int32Value int32_value = 5;
|
||||
google.protobuf.UInt32Value uint32_value = 6;
|
||||
google.protobuf.BoolValue bool_value = 7;
|
||||
google.protobuf.StringValue string_value = 8;
|
||||
google.protobuf.BytesValue bytes_value = 9;
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
import pytest
|
||||
|
||||
from tests.mocks import MockChannel
|
||||
from tests.output_aristaproto.googletypes_response_embedded import (
|
||||
Input,
|
||||
Output,
|
||||
TestStub,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_passes_through_unwrapped_values_embedded_in_response():
|
||||
"""
|
||||
We do not not need to implement value unwrapping for embedded well-known types,
|
||||
as this is already handled by grpclib. This test merely shows that this is the case.
|
||||
"""
|
||||
output = Output(
|
||||
double_value=10.0,
|
||||
float_value=12.0,
|
||||
int64_value=-13,
|
||||
uint64_value=14,
|
||||
int32_value=-15,
|
||||
uint32_value=16,
|
||||
bool_value=True,
|
||||
string_value="string",
|
||||
bytes_value=bytes(0xFF)[0:4],
|
||||
)
|
||||
|
||||
service = TestStub(MockChannel(responses=[output]))
|
||||
response = await service.get_output(Input())
|
||||
|
||||
assert response.double_value == 10.0
|
||||
assert response.float_value == 12.0
|
||||
assert response.int64_value == -13
|
||||
assert response.uint64_value == 14
|
||||
assert response.int32_value == -15
|
||||
assert response.uint32_value == 16
|
||||
assert response.bool_value
|
||||
assert response.string_value == "string"
|
||||
assert response.bytes_value == bytes(0xFF)[0:4]
|
|
@ -0,0 +1,13 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package googletypes_service_returns_empty;
|
||||
|
||||
import "google/protobuf/empty.proto";
|
||||
|
||||
service Test {
|
||||
rpc Send (RequestMessage) returns (google.protobuf.Empty) {
|
||||
}
|
||||
}
|
||||
|
||||
message RequestMessage {
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package googletypes_service_returns_googletype;
|
||||
|
||||
import "google/protobuf/empty.proto";
|
||||
import "google/protobuf/struct.proto";
|
||||
|
||||
// Tests that imports are generated correctly when returning Google well-known types
|
||||
|
||||
service Test {
|
||||
rpc GetEmpty (RequestMessage) returns (google.protobuf.Empty);
|
||||
rpc GetStruct (RequestMessage) returns (google.protobuf.Struct);
|
||||
rpc GetListValue (RequestMessage) returns (google.protobuf.ListValue);
|
||||
rpc GetValue (RequestMessage) returns (google.protobuf.Value);
|
||||
}
|
||||
|
||||
message RequestMessage {
|
||||
}
|
5
tests/inputs/googletypes_struct/googletypes_struct.json
Normal file
5
tests/inputs/googletypes_struct/googletypes_struct.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"struct": {
|
||||
"key": true
|
||||
}
|
||||
}
|
9
tests/inputs/googletypes_struct/googletypes_struct.proto
Normal file
9
tests/inputs/googletypes_struct/googletypes_struct.proto
Normal file
|
@ -0,0 +1,9 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package googletypes_struct;
|
||||
|
||||
import "google/protobuf/struct.proto";
|
||||
|
||||
message Test {
|
||||
google.protobuf.Struct struct = 1;
|
||||
}
|
11
tests/inputs/googletypes_value/googletypes_value.json
Normal file
11
tests/inputs/googletypes_value/googletypes_value.json
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"value1": "hello world",
|
||||
"value2": true,
|
||||
"value3": 1,
|
||||
"value4": null,
|
||||
"value5": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
}
|
15
tests/inputs/googletypes_value/googletypes_value.proto
Normal file
15
tests/inputs/googletypes_value/googletypes_value.proto
Normal file
|
@ -0,0 +1,15 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package googletypes_value;
|
||||
|
||||
import "google/protobuf/struct.proto";
|
||||
|
||||
// Tests that fields of type google.protobuf.Value can contain arbitrary JSON-values.
|
||||
|
||||
message Test {
|
||||
google.protobuf.Value value1 = 1;
|
||||
google.protobuf.Value value2 = 2;
|
||||
google.protobuf.Value value3 = 3;
|
||||
google.protobuf.Value value4 = 4;
|
||||
google.protobuf.Value value5 = 5;
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
syntax = "proto3";
|
||||
|
||||
|
||||
package import_capitalized_package.Capitalized;
|
||||
|
||||
message Message {
|
||||
|
||||
}
|
11
tests/inputs/import_capitalized_package/test.proto
Normal file
11
tests/inputs/import_capitalized_package/test.proto
Normal file
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_capitalized_package;
|
||||
|
||||
import "capitalized.proto";
|
||||
|
||||
// Tests that we can import from a package with a capital name, that looks like a nested type, but isn't.
|
||||
|
||||
message Test {
|
||||
Capitalized.Message message = 1;
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_child_package_from_package.package.childpackage;
|
||||
|
||||
message ChildMessage {
|
||||
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_child_package_from_package;
|
||||
|
||||
import "package_message.proto";
|
||||
|
||||
// Tests generated imports when a message in a package refers to a message in a nested child package.
|
||||
|
||||
message Test {
|
||||
package.PackageMessage message = 1;
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
syntax = "proto3";
|
||||
|
||||
import "child.proto";
|
||||
|
||||
package import_child_package_from_package.package;
|
||||
|
||||
message PackageMessage {
|
||||
package.childpackage.ChildMessage c = 1;
|
||||
}
|
7
tests/inputs/import_child_package_from_root/child.proto
Normal file
7
tests/inputs/import_child_package_from_root/child.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_child_package_from_root.childpackage;
|
||||
|
||||
message Message {
|
||||
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_child_package_from_root;
|
||||
|
||||
import "child.proto";
|
||||
|
||||
// Tests generated imports when a message in root refers to a message in a child package.
|
||||
|
||||
message Test {
|
||||
childpackage.Message child = 1;
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_circular_dependency;
|
||||
|
||||
import "root.proto";
|
||||
import "other.proto";
|
||||
|
||||
// This test-case verifies support for circular dependencies in the generated python files.
|
||||
//
|
||||
// This is important because we generate 1 python file/module per package, rather than 1 file per proto file.
|
||||
//
|
||||
// Scenario:
|
||||
//
|
||||
// The proto messages depend on each other in a non-circular way:
|
||||
//
|
||||
// Test -------> RootPackageMessage <--------------.
|
||||
// `------------------------------------> OtherPackageMessage
|
||||
//
|
||||
// Test and RootPackageMessage are in different files, but belong to the same package (root):
|
||||
//
|
||||
// (Test -------> RootPackageMessage) <------------.
|
||||
// `------------------------------------> OtherPackageMessage
|
||||
//
|
||||
// After grouping the packages into single files or modules, a circular dependency is created:
|
||||
//
|
||||
// (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage)
|
||||
message Test {
|
||||
RootPackageMessage message = 1;
|
||||
other.OtherPackageMessage other = 2;
|
||||
}
|
8
tests/inputs/import_circular_dependency/other.proto
Normal file
8
tests/inputs/import_circular_dependency/other.proto
Normal file
|
@ -0,0 +1,8 @@
|
|||
syntax = "proto3";
|
||||
|
||||
import "root.proto";
|
||||
package import_circular_dependency.other;
|
||||
|
||||
message OtherPackageMessage {
|
||||
RootPackageMessage rootPackageMessage = 1;
|
||||
}
|
7
tests/inputs/import_circular_dependency/root.proto
Normal file
7
tests/inputs/import_circular_dependency/root.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_circular_dependency;
|
||||
|
||||
message RootPackageMessage {
|
||||
|
||||
}
|
6
tests/inputs/import_cousin_package/cousin.proto
Normal file
6
tests/inputs/import_cousin_package/cousin.proto
Normal file
|
@ -0,0 +1,6 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_cousin_package.cousin.cousin_subpackage;
|
||||
|
||||
message CousinMessage {
|
||||
}
|
11
tests/inputs/import_cousin_package/test.proto
Normal file
11
tests/inputs/import_cousin_package/test.proto
Normal file
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_cousin_package.test.subpackage;
|
||||
|
||||
import "cousin.proto";
|
||||
|
||||
// Verify that we can import message unrelated to us
|
||||
|
||||
message Test {
|
||||
cousin.cousin_subpackage.CousinMessage message = 1;
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_cousin_package_same_name.cousin.subpackage;
|
||||
|
||||
message CousinMessage {
|
||||
}
|
11
tests/inputs/import_cousin_package_same_name/test.proto
Normal file
11
tests/inputs/import_cousin_package_same_name/test.proto
Normal file
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_cousin_package_same_name.test.subpackage;
|
||||
|
||||
import "cousin.proto";
|
||||
|
||||
// Verify that we can import a message unrelated to us, in a subpackage with the same name as us.
|
||||
|
||||
message Test {
|
||||
cousin.subpackage.CousinMessage message = 1;
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_packages_same_name;
|
||||
|
||||
import "users_v1.proto";
|
||||
import "posts_v1.proto";
|
||||
|
||||
// Tests generated message can correctly reference two packages with the same leaf-name
|
||||
|
||||
message Test {
|
||||
users.v1.User user = 1;
|
||||
posts.v1.Post post = 2;
|
||||
}
|
7
tests/inputs/import_packages_same_name/posts_v1.proto
Normal file
7
tests/inputs/import_packages_same_name/posts_v1.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_packages_same_name.posts.v1;
|
||||
|
||||
message Post {
|
||||
|
||||
}
|
7
tests/inputs/import_packages_same_name/users_v1.proto
Normal file
7
tests/inputs/import_packages_same_name/users_v1.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_packages_same_name.users.v1;
|
||||
|
||||
message User {
|
||||
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
syntax = "proto3";
|
||||
|
||||
import "parent_package_message.proto";
|
||||
|
||||
package import_parent_package_from_child.parent.child;
|
||||
|
||||
// Tests generated imports when a message refers to a message defined in its parent package
|
||||
|
||||
message Test {
|
||||
ParentPackageMessage message_implicit = 1;
|
||||
parent.ParentPackageMessage message_explicit = 2;
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_parent_package_from_child.parent;
|
||||
|
||||
message ParentPackageMessage {
|
||||
}
|
11
tests/inputs/import_root_package_from_child/child.proto
Normal file
11
tests/inputs/import_root_package_from_child/child.proto
Normal file
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_root_package_from_child.child;
|
||||
|
||||
import "root.proto";
|
||||
|
||||
// Verify that we can import root message from child package
|
||||
|
||||
message Test {
|
||||
RootMessage message = 1;
|
||||
}
|
7
tests/inputs/import_root_package_from_child/root.proto
Normal file
7
tests/inputs/import_root_package_from_child/root.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_root_package_from_child;
|
||||
|
||||
|
||||
message RootMessage {
|
||||
}
|
11
tests/inputs/import_root_sibling/import_root_sibling.proto
Normal file
11
tests/inputs/import_root_sibling/import_root_sibling.proto
Normal file
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_root_sibling;
|
||||
|
||||
import "sibling.proto";
|
||||
|
||||
// Tests generated imports when a message in the root package refers to another message in the root package
|
||||
|
||||
message Test {
|
||||
SiblingMessage sibling = 1;
|
||||
}
|
7
tests/inputs/import_root_sibling/sibling.proto
Normal file
7
tests/inputs/import_root_sibling/sibling.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_root_sibling;
|
||||
|
||||
message SiblingMessage {
|
||||
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_service_input_message.child;
|
||||
|
||||
message ChildRequestMessage {
|
||||
int32 child_argument = 1;
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_service_input_message;
|
||||
|
||||
import "request_message.proto";
|
||||
import "child_package_request_message.proto";
|
||||
|
||||
// Tests generated service correctly imports the RequestMessage
|
||||
|
||||
service Test {
|
||||
rpc DoThing (RequestMessage) returns (RequestResponse);
|
||||
rpc DoThing2 (child.ChildRequestMessage) returns (RequestResponse);
|
||||
rpc DoThing3 (Nested.RequestMessage) returns (RequestResponse);
|
||||
}
|
||||
|
||||
|
||||
message RequestResponse {
|
||||
int32 value = 1;
|
||||
}
|
||||
|
||||
message Nested {
|
||||
message RequestMessage {
|
||||
int32 nestedArgument = 1;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package import_service_input_message;
|
||||
|
||||
message RequestMessage {
|
||||
int32 argument = 1;
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
import pytest
|
||||
|
||||
from tests.mocks import MockChannel
|
||||
from tests.output_aristaproto.import_service_input_message import (
|
||||
NestedRequestMessage,
|
||||
RequestMessage,
|
||||
RequestResponse,
|
||||
TestStub,
|
||||
)
|
||||
from tests.output_aristaproto.import_service_input_message.child import (
|
||||
ChildRequestMessage,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_correctly_imports_reference_message():
|
||||
mock_response = RequestResponse(value=10)
|
||||
service = TestStub(MockChannel([mock_response]))
|
||||
response = await service.do_thing(RequestMessage(1))
|
||||
assert mock_response == response
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_correctly_imports_reference_message_from_child_package():
|
||||
mock_response = RequestResponse(value=10)
|
||||
service = TestStub(MockChannel([mock_response]))
|
||||
response = await service.do_thing2(ChildRequestMessage(1))
|
||||
assert mock_response == response
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_correctly_imports_nested_reference():
|
||||
mock_response = RequestResponse(value=10)
|
||||
service = TestStub(MockChannel([mock_response]))
|
||||
response = await service.do_thing3(NestedRequestMessage(1))
|
||||
assert mock_response == response
|
4
tests/inputs/int32/int32.json
Normal file
4
tests/inputs/int32/int32.json
Normal file
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"positive": 150,
|
||||
"negative": -150
|
||||
}
|
10
tests/inputs/int32/int32.proto
Normal file
10
tests/inputs/int32/int32.proto
Normal file
|
@ -0,0 +1,10 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package int32;
|
||||
|
||||
// Some documentation about the Test message.
|
||||
message Test {
|
||||
// Some documentation about the count.
|
||||
int32 positive = 1;
|
||||
int32 negative = 2;
|
||||
}
|
7
tests/inputs/map/map.json
Normal file
7
tests/inputs/map/map.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"counts": {
|
||||
"item1": 1,
|
||||
"item2": 2,
|
||||
"item3": 3
|
||||
}
|
||||
}
|
7
tests/inputs/map/map.proto
Normal file
7
tests/inputs/map/map.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package map;
|
||||
|
||||
message Test {
|
||||
map<string, int32> counts = 1;
|
||||
}
|
10
tests/inputs/mapmessage/mapmessage.json
Normal file
10
tests/inputs/mapmessage/mapmessage.json
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"items": {
|
||||
"foo": {
|
||||
"count": 1
|
||||
},
|
||||
"bar": {
|
||||
"count": 2
|
||||
}
|
||||
}
|
||||
}
|
11
tests/inputs/mapmessage/mapmessage.proto
Normal file
11
tests/inputs/mapmessage/mapmessage.proto
Normal file
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package mapmessage;
|
||||
|
||||
message Test {
|
||||
map<string, Nested> items = 1;
|
||||
}
|
||||
|
||||
message Nested {
|
||||
int32 count = 1;
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"int": "value-for-int",
|
||||
"float": "value-for-float",
|
||||
"complex": "value-for-complex",
|
||||
"list": "value-for-list",
|
||||
"tuple": "value-for-tuple",
|
||||
"range": "value-for-range",
|
||||
"str": "value-for-str",
|
||||
"bytearray": "value-for-bytearray",
|
||||
"bytes": "value-for-bytes",
|
||||
"memoryview": "value-for-memoryview",
|
||||
"set": "value-for-set",
|
||||
"frozenset": "value-for-frozenset",
|
||||
"map": "value-for-map",
|
||||
"bool": "value-for-bool"
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package namespace_builtin_types;
|
||||
|
||||
// Tests that messages may contain fields with names that are python types
|
||||
|
||||
message Test {
|
||||
// https://docs.python.org/2/library/stdtypes.html#numeric-types-int-float-long-complex
|
||||
string int = 1;
|
||||
string float = 2;
|
||||
string complex = 3;
|
||||
|
||||
// https://docs.python.org/3/library/stdtypes.html#sequence-types-list-tuple-range
|
||||
string list = 4;
|
||||
string tuple = 5;
|
||||
string range = 6;
|
||||
|
||||
// https://docs.python.org/3/library/stdtypes.html#str
|
||||
string str = 7;
|
||||
|
||||
// https://docs.python.org/3/library/stdtypes.html#bytearray-objects
|
||||
string bytearray = 8;
|
||||
|
||||
// https://docs.python.org/3/library/stdtypes.html#bytes-and-bytearray-operations
|
||||
string bytes = 9;
|
||||
|
||||
// https://docs.python.org/3/library/stdtypes.html#memory-views
|
||||
string memoryview = 10;
|
||||
|
||||
// https://docs.python.org/3/library/stdtypes.html#set-types-set-frozenset
|
||||
string set = 11;
|
||||
string frozenset = 12;
|
||||
|
||||
// https://docs.python.org/3/library/stdtypes.html#dict
|
||||
string map = 13;
|
||||
string dict = 14;
|
||||
|
||||
// https://docs.python.org/3/library/stdtypes.html#boolean-values
|
||||
string bool = 15;
|
||||
}
|
37
tests/inputs/namespace_keywords/namespace_keywords.json
Normal file
37
tests/inputs/namespace_keywords/namespace_keywords.json
Normal file
|
@ -0,0 +1,37 @@
|
|||
{
|
||||
"False": 1,
|
||||
"None": 2,
|
||||
"True": 3,
|
||||
"and": 4,
|
||||
"as": 5,
|
||||
"assert": 6,
|
||||
"async": 7,
|
||||
"await": 8,
|
||||
"break": 9,
|
||||
"class": 10,
|
||||
"continue": 11,
|
||||
"def": 12,
|
||||
"del": 13,
|
||||
"elif": 14,
|
||||
"else": 15,
|
||||
"except": 16,
|
||||
"finally": 17,
|
||||
"for": 18,
|
||||
"from": 19,
|
||||
"global": 20,
|
||||
"if": 21,
|
||||
"import": 22,
|
||||
"in": 23,
|
||||
"is": 24,
|
||||
"lambda": 25,
|
||||
"nonlocal": 26,
|
||||
"not": 27,
|
||||
"or": 28,
|
||||
"pass": 29,
|
||||
"raise": 30,
|
||||
"return": 31,
|
||||
"try": 32,
|
||||
"while": 33,
|
||||
"with": 34,
|
||||
"yield": 35
|
||||
}
|
46
tests/inputs/namespace_keywords/namespace_keywords.proto
Normal file
46
tests/inputs/namespace_keywords/namespace_keywords.proto
Normal file
|
@ -0,0 +1,46 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package namespace_keywords;
|
||||
|
||||
// Tests that messages may contain fields that are Python keywords
|
||||
//
|
||||
// Generated with Python 3.7.6
|
||||
// print('\n'.join(f'string {k} = {i+1};' for i,k in enumerate(keyword.kwlist)))
|
||||
|
||||
message Test {
|
||||
string False = 1;
|
||||
string None = 2;
|
||||
string True = 3;
|
||||
string and = 4;
|
||||
string as = 5;
|
||||
string assert = 6;
|
||||
string async = 7;
|
||||
string await = 8;
|
||||
string break = 9;
|
||||
string class = 10;
|
||||
string continue = 11;
|
||||
string def = 12;
|
||||
string del = 13;
|
||||
string elif = 14;
|
||||
string else = 15;
|
||||
string except = 16;
|
||||
string finally = 17;
|
||||
string for = 18;
|
||||
string from = 19;
|
||||
string global = 20;
|
||||
string if = 21;
|
||||
string import = 22;
|
||||
string in = 23;
|
||||
string is = 24;
|
||||
string lambda = 25;
|
||||
string nonlocal = 26;
|
||||
string not = 27;
|
||||
string or = 28;
|
||||
string pass = 29;
|
||||
string raise = 30;
|
||||
string return = 31;
|
||||
string try = 32;
|
||||
string while = 33;
|
||||
string with = 34;
|
||||
string yield = 35;
|
||||
}
|
7
tests/inputs/nested/nested.json
Normal file
7
tests/inputs/nested/nested.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"nested": {
|
||||
"count": 150
|
||||
},
|
||||
"sibling": {},
|
||||
"msg": "THIS"
|
||||
}
|
26
tests/inputs/nested/nested.proto
Normal file
26
tests/inputs/nested/nested.proto
Normal file
|
@ -0,0 +1,26 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package nested;
|
||||
|
||||
// A test message with a nested message inside of it.
|
||||
message Test {
|
||||
// This is the nested type.
|
||||
message Nested {
|
||||
// Stores a simple counter.
|
||||
int32 count = 1;
|
||||
}
|
||||
// This is the nested enum.
|
||||
enum Msg {
|
||||
NONE = 0;
|
||||
THIS = 1;
|
||||
}
|
||||
|
||||
Nested nested = 1;
|
||||
Sibling sibling = 2;
|
||||
Sibling sibling2 = 3;
|
||||
Msg msg = 4;
|
||||
}
|
||||
|
||||
message Sibling {
|
||||
int32 foo = 1;
|
||||
}
|
21
tests/inputs/nested2/nested2.proto
Normal file
21
tests/inputs/nested2/nested2.proto
Normal file
|
@ -0,0 +1,21 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package nested2;
|
||||
|
||||
import "package.proto";
|
||||
|
||||
message Game {
|
||||
message Player {
|
||||
enum Race {
|
||||
human = 0;
|
||||
orc = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
message Test {
|
||||
Game game = 1;
|
||||
Game.Player GamePlayer = 2;
|
||||
Game.Player.Race GamePlayerRace = 3;
|
||||
equipment.Weapon Weapon = 4;
|
||||
}
|
7
tests/inputs/nested2/package.proto
Normal file
7
tests/inputs/nested2/package.proto
Normal file
|
@ -0,0 +1,7 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package nested2.equipment;
|
||||
|
||||
message Weapon {
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue