summaryrefslogtreecommitdiffstats
path: root/src/aristaproto
diff options
context:
space:
mode:
Diffstat (limited to 'src/aristaproto')
-rw-r--r--src/aristaproto/__init__.py2038
-rw-r--r--src/aristaproto/_types.py14
-rw-r--r--src/aristaproto/_version.py4
-rw-r--r--src/aristaproto/casing.py143
-rw-r--r--src/aristaproto/compile/__init__.py0
-rw-r--r--src/aristaproto/compile/importing.py176
-rw-r--r--src/aristaproto/compile/naming.py21
-rw-r--r--src/aristaproto/enum.py195
-rw-r--r--src/aristaproto/grpc/__init__.py0
-rw-r--r--src/aristaproto/grpc/grpclib_client.py177
-rw-r--r--src/aristaproto/grpc/grpclib_server.py33
-rw-r--r--src/aristaproto/grpc/util/__init__.py0
-rw-r--r--src/aristaproto/grpc/util/async_channel.py193
-rw-r--r--src/aristaproto/lib/__init__.py0
-rw-r--r--src/aristaproto/lib/google/__init__.py0
-rw-r--r--src/aristaproto/lib/google/protobuf/__init__.py1
-rw-r--r--src/aristaproto/lib/google/protobuf/compiler/__init__.py1
-rw-r--r--src/aristaproto/lib/pydantic/__init__.py0
-rw-r--r--src/aristaproto/lib/pydantic/google/__init__.py0
-rw-r--r--src/aristaproto/lib/pydantic/google/protobuf/__init__.py2589
-rw-r--r--src/aristaproto/lib/pydantic/google/protobuf/compiler/__init__.py210
-rw-r--r--src/aristaproto/lib/std/__init__.py0
-rw-r--r--src/aristaproto/lib/std/google/__init__.py0
-rw-r--r--src/aristaproto/lib/std/google/protobuf/__init__.py2526
-rw-r--r--src/aristaproto/lib/std/google/protobuf/compiler/__init__.py198
-rw-r--r--src/aristaproto/plugin/__init__.py1
-rw-r--r--src/aristaproto/plugin/__main__.py4
-rw-r--r--src/aristaproto/plugin/compiler.py50
-rwxr-xr-xsrc/aristaproto/plugin/main.py52
-rw-r--r--src/aristaproto/plugin/models.py851
-rw-r--r--src/aristaproto/plugin/parser.py221
-rw-r--r--src/aristaproto/plugin/plugin.bat2
-rw-r--r--src/aristaproto/py.typed0
-rw-r--r--src/aristaproto/templates/template.py.j2257
-rw-r--r--src/aristaproto/utils.py56
35 files changed, 10013 insertions, 0 deletions
diff --git a/src/aristaproto/__init__.py b/src/aristaproto/__init__.py
new file mode 100644
index 0000000..79d71c5
--- /dev/null
+++ b/src/aristaproto/__init__.py
@@ -0,0 +1,2038 @@
+from __future__ import annotations
+
+import dataclasses
+import enum as builtin_enum
+import json
+import math
+import struct
+import sys
+import typing
+import warnings
+from abc import ABC
+from base64 import (
+ b64decode,
+ b64encode,
+)
+from copy import deepcopy
+from datetime import (
+ datetime,
+ timedelta,
+ timezone,
+)
+from io import BytesIO
+from itertools import count
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ ClassVar,
+ Dict,
+ Generator,
+ Iterable,
+ Mapping,
+ Optional,
+ Set,
+ Tuple,
+ Type,
+ Union,
+ get_type_hints,
+)
+
+from dateutil.parser import isoparse
+from typing_extensions import Self
+
+from ._types import T
+from ._version import __version__
+from .casing import (
+ camel_case,
+ safe_snake_case,
+ snake_case,
+)
+from .enum import Enum as Enum
+from .grpc.grpclib_client import ServiceStub as ServiceStub
+from .utils import (
+ classproperty,
+ hybridmethod,
+)
+
+
+if TYPE_CHECKING:
+ from _typeshed import (
+ SupportsRead,
+ SupportsWrite,
+ )
+
+
+# Proto 3 data types
+TYPE_ENUM = "enum"
+TYPE_BOOL = "bool"
+TYPE_INT32 = "int32"
+TYPE_INT64 = "int64"
+TYPE_UINT32 = "uint32"
+TYPE_UINT64 = "uint64"
+TYPE_SINT32 = "sint32"
+TYPE_SINT64 = "sint64"
+TYPE_FLOAT = "float"
+TYPE_DOUBLE = "double"
+TYPE_FIXED32 = "fixed32"
+TYPE_SFIXED32 = "sfixed32"
+TYPE_FIXED64 = "fixed64"
+TYPE_SFIXED64 = "sfixed64"
+TYPE_STRING = "string"
+TYPE_BYTES = "bytes"
+TYPE_MESSAGE = "message"
+TYPE_MAP = "map"
+
+# Fields that use a fixed amount of space (4 or 8 bytes)
+FIXED_TYPES = [
+ TYPE_FLOAT,
+ TYPE_DOUBLE,
+ TYPE_FIXED32,
+ TYPE_SFIXED32,
+ TYPE_FIXED64,
+ TYPE_SFIXED64,
+]
+
+# Fields that are numerical 64-bit types
+INT_64_TYPES = [TYPE_INT64, TYPE_UINT64, TYPE_SINT64, TYPE_FIXED64, TYPE_SFIXED64]
+
+# Fields that are efficiently packed when
+PACKED_TYPES = [
+ TYPE_ENUM,
+ TYPE_BOOL,
+ TYPE_INT32,
+ TYPE_INT64,
+ TYPE_UINT32,
+ TYPE_UINT64,
+ TYPE_SINT32,
+ TYPE_SINT64,
+ TYPE_FLOAT,
+ TYPE_DOUBLE,
+ TYPE_FIXED32,
+ TYPE_SFIXED32,
+ TYPE_FIXED64,
+ TYPE_SFIXED64,
+]
+
+# Wire types
+# https://developers.google.com/protocol-buffers/docs/encoding#structure
+WIRE_VARINT = 0
+WIRE_FIXED_64 = 1
+WIRE_LEN_DELIM = 2
+WIRE_FIXED_32 = 5
+
+# Mappings of which Proto 3 types correspond to which wire types.
+WIRE_VARINT_TYPES = [
+ TYPE_ENUM,
+ TYPE_BOOL,
+ TYPE_INT32,
+ TYPE_INT64,
+ TYPE_UINT32,
+ TYPE_UINT64,
+ TYPE_SINT32,
+ TYPE_SINT64,
+]
+
+WIRE_FIXED_32_TYPES = [TYPE_FLOAT, TYPE_FIXED32, TYPE_SFIXED32]
+WIRE_FIXED_64_TYPES = [TYPE_DOUBLE, TYPE_FIXED64, TYPE_SFIXED64]
+WIRE_LEN_DELIM_TYPES = [TYPE_STRING, TYPE_BYTES, TYPE_MESSAGE, TYPE_MAP]
+
+# Indicator of message delimitation in streams
+SIZE_DELIMITED = -1
+
+
+class _DateTime(datetime):
+ """Subclass of datetime with an attribute to store the original nanos value from a Timestamp field"""
+
+ __slots__ = "_nanos"
+
+ @property
+ def nanos(self):
+ return self._nanos
+
+
+# Protobuf datetimes start at the Unix Epoch in 1970 in UTC.
+def datetime_default_gen() -> _DateTime:
+ return _DateTime(1970, 1, 1, tzinfo=timezone.utc)
+
+
+DATETIME_ZERO = datetime_default_gen()
+
+# Special protobuf json doubles
+INFINITY = "Infinity"
+NEG_INFINITY = "-Infinity"
+NAN = "NaN"
+
+
+class Casing(builtin_enum.Enum):
+ """Casing constants for serialization."""
+
+ CAMEL = camel_case #: A camelCase sterilization function.
+ SNAKE = snake_case #: A snake_case sterilization function.
+
+
+PLACEHOLDER: Any = object()
+
+
+@dataclasses.dataclass(frozen=True)
+class FieldMetadata:
+ """Stores internal metadata used for parsing & serialization."""
+
+ # Protobuf field number
+ number: int
+ # Protobuf type name
+ proto_type: str
+ # Map information if the proto_type is a map
+ map_types: Optional[Tuple[str, str]] = None
+ # Groups several "one-of" fields together
+ group: Optional[str] = None
+ # Describes the wrapped type (e.g. when using google.protobuf.BoolValue)
+ wraps: Optional[str] = None
+ # Is the field optional
+ optional: Optional[bool] = False
+
+ @staticmethod
+ def get(field: dataclasses.Field) -> "FieldMetadata":
+ """Returns the field metadata for a dataclass field."""
+ return field.metadata["aristaproto"]
+
+
+def dataclass_field(
+ number: int,
+ proto_type: str,
+ *,
+ map_types: Optional[Tuple[str, str]] = None,
+ group: Optional[str] = None,
+ wraps: Optional[str] = None,
+ optional: bool = False,
+) -> dataclasses.Field:
+ """Creates a dataclass field with attached protobuf metadata."""
+ return dataclasses.field(
+ default=None if optional else PLACEHOLDER,
+ metadata={
+ "aristaproto": FieldMetadata(
+ number, proto_type, map_types, group, wraps, optional
+ )
+ },
+ )
+
+
+# Note: the fields below return `Any` to prevent type errors in the generated
+# data classes since the types won't match with `Field` and they get swapped
+# out at runtime. The generated dataclass variables are still typed correctly.
+
+
+def enum_field(number: int, group: Optional[str] = None, optional: bool = False) -> Any:
+ return dataclass_field(number, TYPE_ENUM, group=group, optional=optional)
+
+
+def bool_field(number: int, group: Optional[str] = None, optional: bool = False) -> Any:
+ return dataclass_field(number, TYPE_BOOL, group=group, optional=optional)
+
+
+def int32_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_INT32, group=group, optional=optional)
+
+
+def int64_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_INT64, group=group, optional=optional)
+
+
+def uint32_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_UINT32, group=group, optional=optional)
+
+
+def uint64_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_UINT64, group=group, optional=optional)
+
+
+def sint32_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_SINT32, group=group, optional=optional)
+
+
+def sint64_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_SINT64, group=group, optional=optional)
+
+
+def float_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_FLOAT, group=group, optional=optional)
+
+
+def double_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_DOUBLE, group=group, optional=optional)
+
+
+def fixed32_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_FIXED32, group=group, optional=optional)
+
+
+def fixed64_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_FIXED64, group=group, optional=optional)
+
+
+def sfixed32_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_SFIXED32, group=group, optional=optional)
+
+
+def sfixed64_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_SFIXED64, group=group, optional=optional)
+
+
+def string_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_STRING, group=group, optional=optional)
+
+
+def bytes_field(
+ number: int, group: Optional[str] = None, optional: bool = False
+) -> Any:
+ return dataclass_field(number, TYPE_BYTES, group=group, optional=optional)
+
+
+def message_field(
+ number: int,
+ group: Optional[str] = None,
+ wraps: Optional[str] = None,
+ optional: bool = False,
+) -> Any:
+ return dataclass_field(
+ number, TYPE_MESSAGE, group=group, wraps=wraps, optional=optional
+ )
+
+
+def map_field(
+ number: int, key_type: str, value_type: str, group: Optional[str] = None
+) -> Any:
+ return dataclass_field(
+ number, TYPE_MAP, map_types=(key_type, value_type), group=group
+ )
+
+
+def _pack_fmt(proto_type: str) -> str:
+ """Returns a little-endian format string for reading/writing binary."""
+ return {
+ TYPE_DOUBLE: "<d",
+ TYPE_FLOAT: "<f",
+ TYPE_FIXED32: "<I",
+ TYPE_FIXED64: "<Q",
+ TYPE_SFIXED32: "<i",
+ TYPE_SFIXED64: "<q",
+ }[proto_type]
+
+
+def dump_varint(value: int, stream: "SupportsWrite[bytes]") -> None:
+ """Encodes a single varint and dumps it into the provided stream."""
+ if value < -(1 << 63):
+ raise ValueError(
+ "Negative value is not representable as a 64-bit integer - unable to encode a varint within 10 bytes."
+ )
+ elif value < 0:
+ value += 1 << 64
+
+ bits = value & 0x7F
+ value >>= 7
+ while value:
+ stream.write((0x80 | bits).to_bytes(1, "little"))
+ bits = value & 0x7F
+ value >>= 7
+ stream.write(bits.to_bytes(1, "little"))
+
+
+def encode_varint(value: int) -> bytes:
+ """Encodes a single varint value for serialization."""
+ with BytesIO() as stream:
+ dump_varint(value, stream)
+ return stream.getvalue()
+
+
+def size_varint(value: int) -> int:
+ """Calculates the size in bytes that a value would take as a varint."""
+ if value < -(1 << 63):
+ raise ValueError(
+ "Negative value is not representable as a 64-bit integer - unable to encode a varint within 10 bytes."
+ )
+ elif value < 0:
+ return 10
+ elif value == 0:
+ return 1
+ else:
+ return math.ceil(value.bit_length() / 7)
+
+
+def _preprocess_single(proto_type: str, wraps: str, value: Any) -> bytes:
+ """Adjusts values before serialization."""
+ if proto_type in (
+ TYPE_ENUM,
+ TYPE_BOOL,
+ TYPE_INT32,
+ TYPE_INT64,
+ TYPE_UINT32,
+ TYPE_UINT64,
+ ):
+ return encode_varint(value)
+ elif proto_type in (TYPE_SINT32, TYPE_SINT64):
+ # Handle zig-zag encoding.
+ return encode_varint(value << 1 if value >= 0 else (value << 1) ^ (~0))
+ elif proto_type in FIXED_TYPES:
+ return struct.pack(_pack_fmt(proto_type), value)
+ elif proto_type == TYPE_STRING:
+ return value.encode("utf-8")
+ elif proto_type == TYPE_MESSAGE:
+ if isinstance(value, datetime):
+ # Convert the `datetime` to a timestamp message.
+ value = _Timestamp.from_datetime(value)
+ elif isinstance(value, timedelta):
+ # Convert the `timedelta` to a duration message.
+ value = _Duration.from_timedelta(value)
+ elif wraps:
+ if value is None:
+ return b""
+ value = _get_wrapper(wraps)(value=value)
+
+ return bytes(value)
+
+ return value
+
+
+def _len_preprocessed_single(proto_type: str, wraps: str, value: Any) -> int:
+ """Calculate the size of adjusted values for serialization without fully serializing them."""
+ if proto_type in (
+ TYPE_ENUM,
+ TYPE_BOOL,
+ TYPE_INT32,
+ TYPE_INT64,
+ TYPE_UINT32,
+ TYPE_UINT64,
+ ):
+ return size_varint(value)
+ elif proto_type in (TYPE_SINT32, TYPE_SINT64):
+ # Handle zig-zag encoding.
+ return size_varint(value << 1 if value >= 0 else (value << 1) ^ (~0))
+ elif proto_type in FIXED_TYPES:
+ return len(struct.pack(_pack_fmt(proto_type), value))
+ elif proto_type == TYPE_STRING:
+ return len(value.encode("utf-8"))
+ elif proto_type == TYPE_MESSAGE:
+ if isinstance(value, datetime):
+ # Convert the `datetime` to a timestamp message.
+ value = _Timestamp.from_datetime(value)
+ elif isinstance(value, timedelta):
+ # Convert the `timedelta` to a duration message.
+ value = _Duration.from_timedelta(value)
+ elif wraps:
+ if value is None:
+ return 0
+ value = _get_wrapper(wraps)(value=value)
+
+ return len(bytes(value))
+
+ return len(value)
+
+
+def _serialize_single(
+ field_number: int,
+ proto_type: str,
+ value: Any,
+ *,
+ serialize_empty: bool = False,
+ wraps: str = "",
+) -> bytes:
+ """Serializes a single field and value."""
+ value = _preprocess_single(proto_type, wraps, value)
+
+ output = bytearray()
+ if proto_type in WIRE_VARINT_TYPES:
+ key = encode_varint(field_number << 3)
+ output += key + value
+ elif proto_type in WIRE_FIXED_32_TYPES:
+ key = encode_varint((field_number << 3) | 5)
+ output += key + value
+ elif proto_type in WIRE_FIXED_64_TYPES:
+ key = encode_varint((field_number << 3) | 1)
+ output += key + value
+ elif proto_type in WIRE_LEN_DELIM_TYPES:
+ if len(value) or serialize_empty or wraps:
+ key = encode_varint((field_number << 3) | 2)
+ output += key + encode_varint(len(value)) + value
+ else:
+ raise NotImplementedError(proto_type)
+
+ return bytes(output)
+
+
+def _len_single(
+ field_number: int,
+ proto_type: str,
+ value: Any,
+ *,
+ serialize_empty: bool = False,
+ wraps: str = "",
+) -> int:
+ """Calculates the size of a serialized single field and value."""
+ size = _len_preprocessed_single(proto_type, wraps, value)
+ if proto_type in WIRE_VARINT_TYPES:
+ size += size_varint(field_number << 3)
+ elif proto_type in WIRE_FIXED_32_TYPES:
+ size += size_varint((field_number << 3) | 5)
+ elif proto_type in WIRE_FIXED_64_TYPES:
+ size += size_varint((field_number << 3) | 1)
+ elif proto_type in WIRE_LEN_DELIM_TYPES:
+ if size or serialize_empty or wraps:
+ size += size_varint((field_number << 3) | 2) + size_varint(size)
+ else:
+ raise NotImplementedError(proto_type)
+
+ return size
+
+
+def _parse_float(value: Any) -> float:
+ """Parse the given value to a float
+
+ Parameters
+ ----------
+ value: Any
+ Value to parse
+
+ Returns
+ -------
+ float
+ Parsed value
+ """
+ if value == INFINITY:
+ return float("inf")
+ if value == NEG_INFINITY:
+ return -float("inf")
+ if value == NAN:
+ return float("nan")
+ return float(value)
+
+
+def _dump_float(value: float) -> Union[float, str]:
+ """Dump the given float to JSON
+
+ Parameters
+ ----------
+ value: float
+ Value to dump
+
+ Returns
+ -------
+ Union[float, str]
+ Dumped value, either a float or the strings
+ """
+ if value == float("inf"):
+ return INFINITY
+ if value == -float("inf"):
+ return NEG_INFINITY
+ if isinstance(value, float) and math.isnan(value):
+ return NAN
+ return value
+
+
+def load_varint(stream: "SupportsRead[bytes]") -> Tuple[int, bytes]:
+ """
+ Load a single varint value from a stream. Returns the value and the raw bytes read.
+ """
+ result = 0
+ raw = b""
+ for shift in count(0, 7):
+ if shift >= 64:
+ raise ValueError("Too many bytes when decoding varint.")
+ b = stream.read(1)
+ if not b:
+ raise EOFError("Stream ended unexpectedly while attempting to load varint.")
+ raw += b
+ b_int = int.from_bytes(b, byteorder="little")
+ result |= (b_int & 0x7F) << shift
+ if not (b_int & 0x80):
+ return result, raw
+
+
+def decode_varint(buffer: bytes, pos: int) -> Tuple[int, int]:
+ """
+ Decode a single varint value from a byte buffer. Returns the value and the
+ new position in the buffer.
+ """
+ with BytesIO(buffer) as stream:
+ stream.seek(pos)
+ value, raw = load_varint(stream)
+ return value, pos + len(raw)
+
+
+@dataclasses.dataclass(frozen=True)
+class ParsedField:
+ number: int
+ wire_type: int
+ value: Any
+ raw: bytes
+
+
+def load_fields(stream: "SupportsRead[bytes]") -> Generator[ParsedField, None, None]:
+ while True:
+ try:
+ num_wire, raw = load_varint(stream)
+ except EOFError:
+ return
+ number = num_wire >> 3
+ wire_type = num_wire & 0x7
+
+ decoded: Any = None
+ if wire_type == WIRE_VARINT:
+ decoded, r = load_varint(stream)
+ raw += r
+ elif wire_type == WIRE_FIXED_64:
+ decoded = stream.read(8)
+ raw += decoded
+ elif wire_type == WIRE_LEN_DELIM:
+ length, r = load_varint(stream)
+ decoded = stream.read(length)
+ raw += r
+ raw += decoded
+ elif wire_type == WIRE_FIXED_32:
+ decoded = stream.read(4)
+ raw += decoded
+
+ yield ParsedField(number=number, wire_type=wire_type, value=decoded, raw=raw)
+
+
+def parse_fields(value: bytes) -> Generator[ParsedField, None, None]:
+ i = 0
+ while i < len(value):
+ start = i
+ num_wire, i = decode_varint(value, i)
+ number = num_wire >> 3
+ wire_type = num_wire & 0x7
+
+ decoded: Any = None
+ if wire_type == WIRE_VARINT:
+ decoded, i = decode_varint(value, i)
+ elif wire_type == WIRE_FIXED_64:
+ decoded, i = value[i : i + 8], i + 8
+ elif wire_type == WIRE_LEN_DELIM:
+ length, i = decode_varint(value, i)
+ decoded = value[i : i + length]
+ i += length
+ elif wire_type == WIRE_FIXED_32:
+ decoded, i = value[i : i + 4], i + 4
+
+ yield ParsedField(
+ number=number, wire_type=wire_type, value=decoded, raw=value[start:i]
+ )
+
+
+class ProtoClassMetadata:
+ __slots__ = (
+ "oneof_group_by_field",
+ "oneof_field_by_group",
+ "default_gen",
+ "cls_by_field",
+ "field_name_by_number",
+ "meta_by_field_name",
+ "sorted_field_names",
+ )
+
+ oneof_group_by_field: Dict[str, str]
+ oneof_field_by_group: Dict[str, Set[dataclasses.Field]]
+ field_name_by_number: Dict[int, str]
+ meta_by_field_name: Dict[str, FieldMetadata]
+ sorted_field_names: Tuple[str, ...]
+ default_gen: Dict[str, Callable[[], Any]]
+ cls_by_field: Dict[str, Type]
+
+ def __init__(self, cls: Type["Message"]):
+ by_field = {}
+ by_group: Dict[str, Set] = {}
+ by_field_name = {}
+ by_field_number = {}
+
+ fields = dataclasses.fields(cls)
+ for field in fields:
+ meta = FieldMetadata.get(field)
+
+ if meta.group:
+ # This is part of a one-of group.
+ by_field[field.name] = meta.group
+
+ by_group.setdefault(meta.group, set()).add(field)
+
+ by_field_name[field.name] = meta
+ by_field_number[meta.number] = field.name
+
+ self.oneof_group_by_field = by_field
+ self.oneof_field_by_group = by_group
+ self.field_name_by_number = by_field_number
+ self.meta_by_field_name = by_field_name
+ self.sorted_field_names = tuple(
+ by_field_number[number] for number in sorted(by_field_number)
+ )
+ self.default_gen = self._get_default_gen(cls, fields)
+ self.cls_by_field = self._get_cls_by_field(cls, fields)
+
+ @staticmethod
+ def _get_default_gen(
+ cls: Type["Message"], fields: Iterable[dataclasses.Field]
+ ) -> Dict[str, Callable[[], Any]]:
+ return {field.name: cls._get_field_default_gen(field) for field in fields}
+
+ @staticmethod
+ def _get_cls_by_field(
+ cls: Type["Message"], fields: Iterable[dataclasses.Field]
+ ) -> Dict[str, Type]:
+ field_cls = {}
+
+ for field in fields:
+ meta = FieldMetadata.get(field)
+ if meta.proto_type == TYPE_MAP:
+ assert meta.map_types
+ kt = cls._cls_for(field, index=0)
+ vt = cls._cls_for(field, index=1)
+ field_cls[field.name] = dataclasses.make_dataclass(
+ "Entry",
+ [
+ ("key", kt, dataclass_field(1, meta.map_types[0])),
+ ("value", vt, dataclass_field(2, meta.map_types[1])),
+ ],
+ bases=(Message,),
+ )
+ field_cls[f"{field.name}.value"] = vt
+ else:
+ field_cls[field.name] = cls._cls_for(field)
+
+ return field_cls
+
+
+class Message(ABC):
+ """
+ The base class for protobuf messages, all generated messages will inherit from
+ this. This class registers the message fields which are used by the serializers and
+ parsers to go between the Python, binary and JSON representations of the message.
+
+ .. container:: operations
+
+ .. describe:: bytes(x)
+
+ Calls :meth:`__bytes__`.
+
+ .. describe:: bool(x)
+
+ Calls :meth:`__bool__`.
+ """
+
+ _serialized_on_wire: bool
+ _unknown_fields: bytes
+ _group_current: Dict[str, str]
+ _aristaproto_meta: ClassVar[ProtoClassMetadata]
+
+ def __post_init__(self) -> None:
+ # Keep track of whether every field was default
+ all_sentinel = True
+
+ # Set current field of each group after `__init__` has already been run.
+ group_current: Dict[str, Optional[str]] = {}
+ for field_name, meta in self._aristaproto.meta_by_field_name.items():
+ if meta.group:
+ group_current.setdefault(meta.group)
+
+ value = self.__raw_get(field_name)
+ if value is not PLACEHOLDER and not (meta.optional and value is None):
+ # Found a non-sentinel value
+ all_sentinel = False
+
+ if meta.group:
+ # This was set, so make it the selected value of the one-of.
+ group_current[meta.group] = field_name
+
+ # Now that all the defaults are set, reset it!
+ self.__dict__["_serialized_on_wire"] = not all_sentinel
+ self.__dict__["_unknown_fields"] = b""
+ self.__dict__["_group_current"] = group_current
+
+ def __raw_get(self, name: str) -> Any:
+ return super().__getattribute__(name)
+
+ def __eq__(self, other) -> bool:
+ if type(self) is not type(other):
+ return NotImplemented
+
+ for field_name in self._aristaproto.meta_by_field_name:
+ self_val = self.__raw_get(field_name)
+ other_val = other.__raw_get(field_name)
+ if self_val is PLACEHOLDER:
+ if other_val is PLACEHOLDER:
+ continue
+ self_val = self._get_field_default(field_name)
+ elif other_val is PLACEHOLDER:
+ other_val = other._get_field_default(field_name)
+
+ if self_val != other_val:
+ # We consider two nan values to be the same for the
+ # purposes of comparing messages (otherwise a message
+ # is not equal to itself)
+ if (
+ isinstance(self_val, float)
+ and isinstance(other_val, float)
+ and math.isnan(self_val)
+ and math.isnan(other_val)
+ ):
+ continue
+ else:
+ return False
+
+ return True
+
+ def __repr__(self) -> str:
+ parts = [
+ f"{field_name}={value!r}"
+ for field_name in self._aristaproto.sorted_field_names
+ for value in (self.__raw_get(field_name),)
+ if value is not PLACEHOLDER
+ ]
+ return f"{self.__class__.__name__}({', '.join(parts)})"
+
+ def __rich_repr__(self) -> Iterable[Tuple[str, Any, Any]]:
+ for field_name in self._aristaproto.sorted_field_names:
+ yield field_name, self.__raw_get(field_name), PLACEHOLDER
+
+ if not TYPE_CHECKING:
+
+ def __getattribute__(self, name: str) -> Any:
+ """
+ Lazily initialize default values to avoid infinite recursion for recursive
+ message types.
+ Raise :class:`AttributeError` on attempts to access unset ``oneof`` fields.
+ """
+ try:
+ group_current = super().__getattribute__("_group_current")
+ except AttributeError:
+ pass
+ else:
+ if name not in {"__class__", "_aristaproto"}:
+ group = self._aristaproto.oneof_group_by_field.get(name)
+ if group is not None and group_current[group] != name:
+ if sys.version_info < (3, 10):
+ raise AttributeError(
+ f"{group!r} is set to {group_current[group]!r}, not {name!r}"
+ )
+ else:
+ raise AttributeError(
+ f"{group!r} is set to {group_current[group]!r}, not {name!r}",
+ name=name,
+ obj=self,
+ )
+
+ value = super().__getattribute__(name)
+ if value is not PLACEHOLDER:
+ return value
+
+ value = self._get_field_default(name)
+ super().__setattr__(name, value)
+ return value
+
+ def __setattr__(self, attr: str, value: Any) -> None:
+ if (
+ isinstance(value, Message)
+ and hasattr(value, "_aristaproto")
+ and not value._aristaproto.meta_by_field_name
+ ):
+ value._serialized_on_wire = True
+
+ if attr != "_serialized_on_wire":
+ # Track when a field has been set.
+ self.__dict__["_serialized_on_wire"] = True
+
+ if hasattr(self, "_group_current"): # __post_init__ had already run
+ if attr in self._aristaproto.oneof_group_by_field:
+ group = self._aristaproto.oneof_group_by_field[attr]
+ for field in self._aristaproto.oneof_field_by_group[group]:
+ if field.name == attr:
+ self._group_current[group] = field.name
+ else:
+ super().__setattr__(field.name, PLACEHOLDER)
+
+ super().__setattr__(attr, value)
+
+ def __bool__(self) -> bool:
+ """True if the Message has any fields with non-default values."""
+ return any(
+ self.__raw_get(field_name)
+ not in (PLACEHOLDER, self._get_field_default(field_name))
+ for field_name in self._aristaproto.meta_by_field_name
+ )
+
+ def __deepcopy__(self: T, _: Any = {}) -> T:
+ kwargs = {}
+ for name in self._aristaproto.sorted_field_names:
+ value = self.__raw_get(name)
+ if value is not PLACEHOLDER:
+ kwargs[name] = deepcopy(value)
+ return self.__class__(**kwargs) # type: ignore
+
+ def __copy__(self: T, _: Any = {}) -> T:
+ kwargs = {}
+ for name in self._aristaproto.sorted_field_names:
+ value = self.__raw_get(name)
+ if value is not PLACEHOLDER:
+ kwargs[name] = value
+ return self.__class__(**kwargs) # type: ignore
+
+ @classproperty
+ def _aristaproto(cls: type[Self]) -> ProtoClassMetadata: # type: ignore
+ """
+ Lazy initialize metadata for each protobuf class.
+ It may be initialized multiple times in a multi-threaded environment,
+ but that won't affect the correctness.
+ """
+ try:
+ return cls._aristaproto_meta
+ except AttributeError:
+ cls._aristaproto_meta = meta = ProtoClassMetadata(cls)
+ return meta
+
+ def dump(self, stream: "SupportsWrite[bytes]", delimit: bool = False) -> None:
+ """
+ Dumps the binary encoded Protobuf message to the stream.
+
+ Parameters
+ -----------
+ stream: :class:`BinaryIO`
+ The stream to dump the message to.
+ delimit:
+ Whether to prefix the message with a varint declaring its size.
+ """
+ if delimit == SIZE_DELIMITED:
+ dump_varint(len(self), stream)
+
+ for field_name, meta in self._aristaproto.meta_by_field_name.items():
+ try:
+ value = getattr(self, field_name)
+ except AttributeError:
+ continue
+
+ if value is None:
+ # Optional items should be skipped. This is used for the Google
+ # wrapper types and proto3 field presence/optional fields.
+ continue
+
+ # Being selected in a a group means this field is the one that is
+ # currently set in a `oneof` group, so it must be serialized even
+ # if the value is the default zero value.
+ #
+ # Note that proto3 field presence/optional fields are put in a
+ # synthetic single-item oneof by protoc, which helps us ensure we
+ # send the value even if the value is the default zero value.
+ selected_in_group = bool(meta.group) or meta.optional
+
+ # Empty messages can still be sent on the wire if they were
+ # set (or received empty).
+ serialize_empty = isinstance(value, Message) and value._serialized_on_wire
+
+ include_default_value_for_oneof = self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+
+ if value == self._get_field_default(field_name) and not (
+ selected_in_group or serialize_empty or include_default_value_for_oneof
+ ):
+ # Default (zero) values are not serialized. Two exceptions are
+ # if this is the selected oneof item or if we know we have to
+ # serialize an empty message (i.e. zero value was explicitly
+ # set by the user).
+ continue
+
+ if isinstance(value, list):
+ if meta.proto_type in PACKED_TYPES:
+ # Packed lists look like a length-delimited field. First,
+ # preprocess/encode each value into a buffer and then
+ # treat it like a field of raw bytes.
+ buf = bytearray()
+ for item in value:
+ buf += _preprocess_single(meta.proto_type, "", item)
+ stream.write(_serialize_single(meta.number, TYPE_BYTES, buf))
+ else:
+ for item in value:
+ stream.write(
+ _serialize_single(
+ meta.number,
+ meta.proto_type,
+ item,
+ wraps=meta.wraps or "",
+ serialize_empty=True,
+ )
+ # if it's an empty message it still needs to be represented
+ # as an item in the repeated list
+ or b"\n\x00"
+ )
+
+ elif isinstance(value, dict):
+ for k, v in value.items():
+ assert meta.map_types
+ sk = _serialize_single(1, meta.map_types[0], k)
+ sv = _serialize_single(2, meta.map_types[1], v)
+ stream.write(
+ _serialize_single(meta.number, meta.proto_type, sk + sv)
+ )
+ else:
+ # If we have an empty string and we're including the default value for
+ # a oneof, make sure we serialize it. This ensures that the byte string
+ # output isn't simply an empty string. This also ensures that round trip
+ # serialization will keep `which_one_of` calls consistent.
+ if (
+ isinstance(value, str)
+ and value == ""
+ and include_default_value_for_oneof
+ ):
+ serialize_empty = True
+
+ stream.write(
+ _serialize_single(
+ meta.number,
+ meta.proto_type,
+ value,
+ serialize_empty=serialize_empty or bool(selected_in_group),
+ wraps=meta.wraps or "",
+ )
+ )
+
+ stream.write(self._unknown_fields)
+
+ def __bytes__(self) -> bytes:
+ """
+ Get the binary encoded Protobuf representation of this message instance.
+ """
+ with BytesIO() as stream:
+ self.dump(stream)
+ return stream.getvalue()
+
+ def __len__(self) -> int:
+ """
+ Get the size of the encoded Protobuf representation of this message instance.
+ """
+ size = 0
+ for field_name, meta in self._aristaproto.meta_by_field_name.items():
+ try:
+ value = getattr(self, field_name)
+ except AttributeError:
+ continue
+
+ if value is None:
+ # Optional items should be skipped. This is used for the Google
+ # wrapper types and proto3 field presence/optional fields.
+ continue
+
+ # Being selected in a group means this field is the one that is
+ # currently set in a `oneof` group, so it must be serialized even
+ # if the value is the default zero value.
+ #
+ # Note that proto3 field presence/optional fields are put in a
+ # synthetic single-item oneof by protoc, which helps us ensure we
+ # send the value even if the value is the default zero value.
+ selected_in_group = bool(meta.group)
+
+ # Empty messages can still be sent on the wire if they were
+ # set (or received empty).
+ serialize_empty = isinstance(value, Message) and value._serialized_on_wire
+
+ include_default_value_for_oneof = self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+
+ if value == self._get_field_default(field_name) and not (
+ selected_in_group or serialize_empty or include_default_value_for_oneof
+ ):
+ # Default (zero) values are not serialized. Two exceptions are
+ # if this is the selected oneof item or if we know we have to
+ # serialize an empty message (i.e. zero value was explicitly
+ # set by the user).
+ continue
+
+ if isinstance(value, list):
+ if meta.proto_type in PACKED_TYPES:
+ # Packed lists look like a length-delimited field. First,
+ # preprocess/encode each value into a buffer and then
+ # treat it like a field of raw bytes.
+ buf = bytearray()
+ for item in value:
+ buf += _preprocess_single(meta.proto_type, "", item)
+ size += _len_single(meta.number, TYPE_BYTES, buf)
+ else:
+ for item in value:
+ size += (
+ _len_single(
+ meta.number,
+ meta.proto_type,
+ item,
+ wraps=meta.wraps or "",
+ serialize_empty=True,
+ )
+ # if it's an empty message it still needs to be represented
+ # as an item in the repeated list
+ or 2
+ )
+
+ elif isinstance(value, dict):
+ for k, v in value.items():
+ assert meta.map_types
+ sk = _serialize_single(1, meta.map_types[0], k)
+ sv = _serialize_single(2, meta.map_types[1], v)
+ size += _len_single(meta.number, meta.proto_type, sk + sv)
+ else:
+ # If we have an empty string and we're including the default value for
+ # a oneof, make sure we serialize it. This ensures that the byte string
+ # output isn't simply an empty string. This also ensures that round trip
+ # serialization will keep `which_one_of` calls consistent.
+ if (
+ isinstance(value, str)
+ and value == ""
+ and include_default_value_for_oneof
+ ):
+ serialize_empty = True
+
+ size += _len_single(
+ meta.number,
+ meta.proto_type,
+ value,
+ serialize_empty=serialize_empty or bool(selected_in_group),
+ wraps=meta.wraps or "",
+ )
+
+ size += len(self._unknown_fields)
+ return size
+
+ # For compatibility with other libraries
+ def SerializeToString(self: T) -> bytes:
+ """
+ Get the binary encoded Protobuf representation of this message instance.
+
+ .. note::
+ This is a method for compatibility with other libraries,
+ you should really use ``bytes(x)``.
+
+ Returns
+ --------
+ :class:`bytes`
+ The binary encoded Protobuf representation of this message instance
+ """
+ return bytes(self)
+
+ def __getstate__(self) -> bytes:
+ return bytes(self)
+
+ def __setstate__(self: T, pickled_bytes: bytes) -> T:
+ return self.parse(pickled_bytes)
+
+ def __reduce__(self) -> Tuple[Any, ...]:
+ return (self.__class__.FromString, (bytes(self),))
+
+ @classmethod
+ def _type_hint(cls, field_name: str) -> Type:
+ return cls._type_hints()[field_name]
+
+ @classmethod
+ def _type_hints(cls) -> Dict[str, Type]:
+ module = sys.modules[cls.__module__]
+ return get_type_hints(cls, module.__dict__, {})
+
+ @classmethod
+ def _cls_for(cls, field: dataclasses.Field, index: int = 0) -> Type:
+ """Get the message class for a field from the type hints."""
+ field_cls = cls._type_hint(field.name)
+ if hasattr(field_cls, "__args__") and index >= 0:
+ if field_cls.__args__ is not None:
+ field_cls = field_cls.__args__[index]
+ return field_cls
+
+ def _get_field_default(self, field_name: str) -> Any:
+ with warnings.catch_warnings():
+ # ignore warnings when initialising deprecated field defaults
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
+ return self._aristaproto.default_gen[field_name]()
+
+ @classmethod
+ def _get_field_default_gen(cls, field: dataclasses.Field) -> Any:
+ t = cls._type_hint(field.name)
+
+ if hasattr(t, "__origin__"):
+ if t.__origin__ is dict:
+ # This is some kind of map (dict in Python).
+ return dict
+ elif t.__origin__ is list:
+ # This is some kind of list (repeated) field.
+ return list
+ elif t.__origin__ is Union and t.__args__[1] is type(None):
+ # This is an optional field (either wrapped, or using proto3
+ # field presence). For setting the default we really don't care
+ # what kind of field it is.
+ return type(None)
+ else:
+ return t
+ elif issubclass(t, Enum):
+ # Enums always default to zero.
+ return t.try_value
+ elif t is datetime:
+ # Offsets are relative to 1970-01-01T00:00:00Z
+ return datetime_default_gen
+ else:
+ # This is either a primitive scalar or another message type. Calling
+ # it should result in its zero value.
+ return t
+
+ def _postprocess_single(
+ self, wire_type: int, meta: FieldMetadata, field_name: str, value: Any
+ ) -> Any:
+ """Adjusts values after parsing."""
+ if wire_type == WIRE_VARINT:
+ if meta.proto_type in (TYPE_INT32, TYPE_INT64):
+ bits = int(meta.proto_type[3:])
+ value = value & ((1 << bits) - 1)
+ signbit = 1 << (bits - 1)
+ value = int((value ^ signbit) - signbit)
+ elif meta.proto_type in (TYPE_SINT32, TYPE_SINT64):
+ # Undo zig-zag encoding
+ value = (value >> 1) ^ (-(value & 1))
+ elif meta.proto_type == TYPE_BOOL:
+ # Booleans use a varint encoding, so convert it to true/false.
+ value = value > 0
+ elif meta.proto_type == TYPE_ENUM:
+ # Convert enum ints to python enum instances
+ value = self._aristaproto.cls_by_field[field_name].try_value(value)
+ elif wire_type in (WIRE_FIXED_32, WIRE_FIXED_64):
+ fmt = _pack_fmt(meta.proto_type)
+ value = struct.unpack(fmt, value)[0]
+ elif wire_type == WIRE_LEN_DELIM:
+ if meta.proto_type == TYPE_STRING:
+ value = str(value, "utf-8")
+ elif meta.proto_type == TYPE_MESSAGE:
+ cls = self._aristaproto.cls_by_field[field_name]
+
+ if cls == datetime:
+ value = _Timestamp().parse(value).to_datetime()
+ elif cls == timedelta:
+ value = _Duration().parse(value).to_timedelta()
+ elif meta.wraps:
+ # This is a Google wrapper value message around a single
+ # scalar type.
+ value = _get_wrapper(meta.wraps)().parse(value).value
+ else:
+ value = cls().parse(value)
+ value._serialized_on_wire = True
+ elif meta.proto_type == TYPE_MAP:
+ value = self._aristaproto.cls_by_field[field_name]().parse(value)
+
+ return value
+
+ def _include_default_value_for_oneof(
+ self, field_name: str, meta: FieldMetadata
+ ) -> bool:
+ return (
+ meta.group is not None and self._group_current.get(meta.group) == field_name
+ )
+
+ def load(
+ self: T,
+ stream: "SupportsRead[bytes]",
+ size: Optional[int] = None,
+ ) -> T:
+ """
+ Load the binary encoded Protobuf from a stream into this message instance. This
+ returns the instance itself and is therefore assignable and chainable.
+
+ Parameters
+ -----------
+ stream: :class:`bytes`
+ The stream to load the message from.
+ size: :class:`Optional[int]`
+ The size of the message in the stream.
+ Reads stream until EOF if ``None`` is given.
+ Reads based on a size delimiter prefix varint if SIZE_DELIMITED is given.
+
+ Returns
+ --------
+ :class:`Message`
+ The initialized message.
+ """
+ # If the message is delimited, parse the message delimiter
+ if size == SIZE_DELIMITED:
+ size, _ = load_varint(stream)
+
+ # Got some data over the wire
+ self._serialized_on_wire = True
+ proto_meta = self._aristaproto
+ read = 0
+ for parsed in load_fields(stream):
+ field_name = proto_meta.field_name_by_number.get(parsed.number)
+ if not field_name:
+ self._unknown_fields += parsed.raw
+ continue
+
+ meta = proto_meta.meta_by_field_name[field_name]
+
+ value: Any
+ if parsed.wire_type == WIRE_LEN_DELIM and meta.proto_type in PACKED_TYPES:
+ # This is a packed repeated field.
+ pos = 0
+ value = []
+ while pos < len(parsed.value):
+ if meta.proto_type in (TYPE_FLOAT, TYPE_FIXED32, TYPE_SFIXED32):
+ decoded, pos = parsed.value[pos : pos + 4], pos + 4
+ wire_type = WIRE_FIXED_32
+ elif meta.proto_type in (TYPE_DOUBLE, TYPE_FIXED64, TYPE_SFIXED64):
+ decoded, pos = parsed.value[pos : pos + 8], pos + 8
+ wire_type = WIRE_FIXED_64
+ else:
+ decoded, pos = decode_varint(parsed.value, pos)
+ wire_type = WIRE_VARINT
+ decoded = self._postprocess_single(
+ wire_type, meta, field_name, decoded
+ )
+ value.append(decoded)
+ else:
+ value = self._postprocess_single(
+ parsed.wire_type, meta, field_name, parsed.value
+ )
+
+ try:
+ current = getattr(self, field_name)
+ except AttributeError:
+ current = self._get_field_default(field_name)
+ setattr(self, field_name, current)
+
+ if meta.proto_type == TYPE_MAP:
+ # Value represents a single key/value pair entry in the map.
+ current[value.key] = value.value
+ elif isinstance(current, list) and not isinstance(value, list):
+ current.append(value)
+ else:
+ setattr(self, field_name, value)
+
+ # If we have now loaded the expected length of the message, stop
+ if size is not None:
+ prev = read
+ read += len(parsed.raw)
+ if read == size:
+ break
+ elif read > size:
+ raise ValueError(
+ f"Expected message of size {size}, can only read "
+ f"either {prev} or {read} bytes - there is no "
+ "message of the expected size in the stream."
+ )
+
+ if size is not None and read < size:
+ raise ValueError(
+ f"Expected message of size {size}, but was only able to "
+ f"read {read} bytes - the stream may have ended too soon,"
+ " or the expected size may have been incorrect."
+ )
+
+ return self
+
+ def parse(self: T, data: bytes) -> T:
+ """
+ Parse the binary encoded Protobuf into this message instance. This
+ returns the instance itself and is therefore assignable and chainable.
+
+ Parameters
+ -----------
+ data: :class:`bytes`
+ The data to parse the message from.
+
+ Returns
+ --------
+ :class:`Message`
+ The initialized message.
+ """
+ with BytesIO(data) as stream:
+ return self.load(stream)
+
+ # For compatibility with other libraries.
+ @classmethod
+ def FromString(cls: Type[T], data: bytes) -> T:
+ """
+ Parse the binary encoded Protobuf into this message instance. This
+ returns the instance itself and is therefore assignable and chainable.
+
+ .. note::
+ This is a method for compatibility with other libraries,
+ you should really use :meth:`parse`.
+
+
+ Parameters
+ -----------
+ data: :class:`bytes`
+ The data to parse the protobuf from.
+
+ Returns
+ --------
+ :class:`Message`
+ The initialized message.
+ """
+ return cls().parse(data)
+
+ def to_dict(
+ self, casing: Casing = Casing.CAMEL, include_default_values: bool = False
+ ) -> Dict[str, Any]:
+ """
+ Returns a JSON serializable dict representation of this object.
+
+ Parameters
+ -----------
+ casing: :class:`Casing`
+ The casing to use for key values. Default is :attr:`Casing.CAMEL` for
+ compatibility purposes.
+ include_default_values: :class:`bool`
+ If ``True`` will include the default values of fields. Default is ``False``.
+ E.g. an ``int32`` field will be included with a value of ``0`` if this is
+ set to ``True``, otherwise this would be ignored.
+
+ Returns
+ --------
+ Dict[:class:`str`, Any]
+ The JSON serializable dict representation of this object.
+ """
+ output: Dict[str, Any] = {}
+ field_types = self._type_hints()
+ defaults = self._aristaproto.default_gen
+ for field_name, meta in self._aristaproto.meta_by_field_name.items():
+ field_is_repeated = defaults[field_name] is list
+ try:
+ value = getattr(self, field_name)
+ except AttributeError:
+ value = self._get_field_default(field_name)
+ cased_name = casing(field_name).rstrip("_") # type: ignore
+ if meta.proto_type == TYPE_MESSAGE:
+ if isinstance(value, datetime):
+ if (
+ value != DATETIME_ZERO
+ or include_default_values
+ or self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+ ):
+ output[cased_name] = _Timestamp.timestamp_to_json(value)
+ elif isinstance(value, timedelta):
+ if (
+ value != timedelta(0)
+ or include_default_values
+ or self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+ ):
+ output[cased_name] = _Duration.delta_to_json(value)
+ elif meta.wraps:
+ if value is not None or include_default_values:
+ output[cased_name] = value
+ elif field_is_repeated:
+ # Convert each item.
+ cls = self._aristaproto.cls_by_field[field_name]
+ if cls == datetime:
+ value = [_Timestamp.timestamp_to_json(i) for i in value]
+ elif cls == timedelta:
+ value = [_Duration.delta_to_json(i) for i in value]
+ else:
+ value = [
+ i.to_dict(casing, include_default_values) for i in value
+ ]
+ if value or include_default_values:
+ output[cased_name] = value
+ elif value is None:
+ if include_default_values:
+ output[cased_name] = value
+ elif (
+ value._serialized_on_wire
+ or include_default_values
+ or self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+ ):
+ output[cased_name] = value.to_dict(casing, include_default_values)
+ elif meta.proto_type == TYPE_MAP:
+ output_map = {**value}
+ for k in value:
+ if hasattr(value[k], "to_dict"):
+ output_map[k] = value[k].to_dict(casing, include_default_values)
+
+ if value or include_default_values:
+ output[cased_name] = output_map
+ elif (
+ value != self._get_field_default(field_name)
+ or include_default_values
+ or self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+ ):
+ if meta.proto_type in INT_64_TYPES:
+ if field_is_repeated:
+ output[cased_name] = [str(n) for n in value]
+ elif value is None:
+ if include_default_values:
+ output[cased_name] = value
+ else:
+ output[cased_name] = str(value)
+ elif meta.proto_type == TYPE_BYTES:
+ if field_is_repeated:
+ output[cased_name] = [
+ b64encode(b).decode("utf8") for b in value
+ ]
+ elif value is None and include_default_values:
+ output[cased_name] = value
+ else:
+ output[cased_name] = b64encode(value).decode("utf8")
+ elif meta.proto_type == TYPE_ENUM:
+ if field_is_repeated:
+ enum_class = field_types[field_name].__args__[0]
+ if isinstance(value, typing.Iterable) and not isinstance(
+ value, str
+ ):
+ output[cased_name] = [enum_class(el).name for el in value]
+ else:
+ # transparently upgrade single value to repeated
+ output[cased_name] = [enum_class(value).name]
+ elif value is None:
+ if include_default_values:
+ output[cased_name] = value
+ elif meta.optional:
+ enum_class = field_types[field_name].__args__[0]
+ output[cased_name] = enum_class(value).name
+ else:
+ enum_class = field_types[field_name] # noqa
+ output[cased_name] = enum_class(value).name
+ elif meta.proto_type in (TYPE_FLOAT, TYPE_DOUBLE):
+ if field_is_repeated:
+ output[cased_name] = [_dump_float(n) for n in value]
+ else:
+ output[cased_name] = _dump_float(value)
+ else:
+ output[cased_name] = value
+ return output
+
+ @classmethod
+ def _from_dict_init(cls, mapping: Mapping[str, Any]) -> Mapping[str, Any]:
+ init_kwargs: Dict[str, Any] = {}
+ for key, value in mapping.items():
+ field_name = safe_snake_case(key)
+ try:
+ meta = cls._aristaproto.meta_by_field_name[field_name]
+ except KeyError:
+ continue
+ if value is None:
+ continue
+
+ if meta.proto_type == TYPE_MESSAGE:
+ sub_cls = cls._aristaproto.cls_by_field[field_name]
+ if sub_cls == datetime:
+ value = (
+ [isoparse(item) for item in value]
+ if isinstance(value, list)
+ else isoparse(value)
+ )
+ elif sub_cls == timedelta:
+ value = (
+ [timedelta(seconds=float(item[:-1])) for item in value]
+ if isinstance(value, list)
+ else timedelta(seconds=float(value[:-1]))
+ )
+ elif not meta.wraps:
+ value = (
+ [sub_cls.from_dict(item) for item in value]
+ if isinstance(value, list)
+ else sub_cls.from_dict(value)
+ )
+ elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE:
+ sub_cls = cls._aristaproto.cls_by_field[f"{field_name}.value"]
+ value = {k: sub_cls.from_dict(v) for k, v in value.items()}
+ else:
+ if meta.proto_type in INT_64_TYPES:
+ value = (
+ [int(n) for n in value]
+ if isinstance(value, list)
+ else int(value)
+ )
+ elif meta.proto_type == TYPE_BYTES:
+ value = (
+ [b64decode(n) for n in value]
+ if isinstance(value, list)
+ else b64decode(value)
+ )
+ elif meta.proto_type == TYPE_ENUM:
+ enum_cls = cls._aristaproto.cls_by_field[field_name]
+ if isinstance(value, list):
+ value = [enum_cls.from_string(e) for e in value]
+ elif isinstance(value, str):
+ value = enum_cls.from_string(value)
+ elif meta.proto_type in (TYPE_FLOAT, TYPE_DOUBLE):
+ value = (
+ [_parse_float(n) for n in value]
+ if isinstance(value, list)
+ else _parse_float(value)
+ )
+
+ init_kwargs[field_name] = value
+ return init_kwargs
+
+ @hybridmethod
+ def from_dict(cls: type[Self], value: Mapping[str, Any]) -> Self: # type: ignore
+ """
+ Parse the key/value pairs into the a new message instance.
+
+ Parameters
+ -----------
+ value: Dict[:class:`str`, Any]
+ The dictionary to parse from.
+
+ Returns
+ --------
+ :class:`Message`
+ The initialized message.
+ """
+ self = cls(**cls._from_dict_init(value))
+ self._serialized_on_wire = True
+ return self
+
+ @from_dict.instancemethod
+ def from_dict(self, value: Mapping[str, Any]) -> Self:
+ """
+ Parse the key/value pairs into the current message instance. This returns the
+ instance itself and is therefore assignable and chainable.
+
+ Parameters
+ -----------
+ value: Dict[:class:`str`, Any]
+ The dictionary to parse from.
+
+ Returns
+ --------
+ :class:`Message`
+ The initialized message.
+ """
+ self._serialized_on_wire = True
+ for field, value in self._from_dict_init(value).items():
+ setattr(self, field, value)
+ return self
+
+ def to_json(
+ self,
+ indent: Union[None, int, str] = None,
+ include_default_values: bool = False,
+ casing: Casing = Casing.CAMEL,
+ ) -> str:
+ """A helper function to parse the message instance into its JSON
+ representation.
+
+ This is equivalent to::
+
+ json.dumps(message.to_dict(), indent=indent)
+
+ Parameters
+ -----------
+ indent: Optional[Union[:class:`int`, :class:`str`]]
+ The indent to pass to :func:`json.dumps`.
+
+ include_default_values: :class:`bool`
+ If ``True`` will include the default values of fields. Default is ``False``.
+ E.g. an ``int32`` field will be included with a value of ``0`` if this is
+ set to ``True``, otherwise this would be ignored.
+
+ casing: :class:`Casing`
+ The casing to use for key values. Default is :attr:`Casing.CAMEL` for
+ compatibility purposes.
+
+ Returns
+ --------
+ :class:`str`
+ The JSON representation of the message.
+ """
+ return json.dumps(
+ self.to_dict(include_default_values=include_default_values, casing=casing),
+ indent=indent,
+ )
+
+ def from_json(self: T, value: Union[str, bytes]) -> T:
+ """A helper function to return the message instance from its JSON
+ representation. This returns the instance itself and is therefore assignable
+ and chainable.
+
+ This is equivalent to::
+
+ return message.from_dict(json.loads(value))
+
+ Parameters
+ -----------
+ value: Union[:class:`str`, :class:`bytes`]
+ The value to pass to :func:`json.loads`.
+
+ Returns
+ --------
+ :class:`Message`
+ The initialized message.
+ """
+ return self.from_dict(json.loads(value))
+
+ def to_pydict(
+ self, casing: Casing = Casing.CAMEL, include_default_values: bool = False
+ ) -> Dict[str, Any]:
+ """
+ Returns a python dict representation of this object.
+
+ Parameters
+ -----------
+ casing: :class:`Casing`
+ The casing to use for key values. Default is :attr:`Casing.CAMEL` for
+ compatibility purposes.
+ include_default_values: :class:`bool`
+ If ``True`` will include the default values of fields. Default is ``False``.
+ E.g. an ``int32`` field will be included with a value of ``0`` if this is
+ set to ``True``, otherwise this would be ignored.
+
+ Returns
+ --------
+ Dict[:class:`str`, Any]
+ The python dict representation of this object.
+ """
+ output: Dict[str, Any] = {}
+ defaults = self._aristaproto.default_gen
+ for field_name, meta in self._aristaproto.meta_by_field_name.items():
+ field_is_repeated = defaults[field_name] is list
+ value = getattr(self, field_name)
+ cased_name = casing(field_name).rstrip("_") # type: ignore
+ if meta.proto_type == TYPE_MESSAGE:
+ if isinstance(value, datetime):
+ if (
+ value != DATETIME_ZERO
+ or include_default_values
+ or self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+ ):
+ output[cased_name] = value
+ elif isinstance(value, timedelta):
+ if (
+ value != timedelta(0)
+ or include_default_values
+ or self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+ ):
+ output[cased_name] = value
+ elif meta.wraps:
+ if value is not None or include_default_values:
+ output[cased_name] = value
+ elif field_is_repeated:
+ # Convert each item.
+ value = [i.to_pydict(casing, include_default_values) for i in value]
+ if value or include_default_values:
+ output[cased_name] = value
+ elif value is None:
+ if include_default_values:
+ output[cased_name] = None
+ elif (
+ value._serialized_on_wire
+ or include_default_values
+ or self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+ ):
+ output[cased_name] = value.to_pydict(casing, include_default_values)
+ elif meta.proto_type == TYPE_MAP:
+ for k in value:
+ if hasattr(value[k], "to_pydict"):
+ value[k] = value[k].to_pydict(casing, include_default_values)
+
+ if value or include_default_values:
+ output[cased_name] = value
+ elif (
+ value != self._get_field_default(field_name)
+ or include_default_values
+ or self._include_default_value_for_oneof(
+ field_name=field_name, meta=meta
+ )
+ ):
+ output[cased_name] = value
+ return output
+
+ def from_pydict(self: T, value: Mapping[str, Any]) -> T:
+ """
+ Parse the key/value pairs into the current message instance. This returns the
+ instance itself and is therefore assignable and chainable.
+
+ Parameters
+ -----------
+ value: Dict[:class:`str`, Any]
+ The dictionary to parse from.
+
+ Returns
+ --------
+ :class:`Message`
+ The initialized message.
+ """
+ self._serialized_on_wire = True
+ for key in value:
+ field_name = safe_snake_case(key)
+ meta = self._aristaproto.meta_by_field_name.get(field_name)
+ if not meta:
+ continue
+
+ if value[key] is not None:
+ if meta.proto_type == TYPE_MESSAGE:
+ v = getattr(self, field_name)
+ if isinstance(v, list):
+ cls = self._aristaproto.cls_by_field[field_name]
+ for item in value[key]:
+ v.append(cls().from_pydict(item))
+ elif isinstance(v, datetime):
+ v = value[key]
+ elif isinstance(v, timedelta):
+ v = value[key]
+ elif meta.wraps:
+ v = value[key]
+ else:
+ # NOTE: `from_pydict` mutates the underlying message, so no
+ # assignment here is necessary.
+ v.from_pydict(value[key])
+ elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE:
+ v = getattr(self, field_name)
+ cls = self._aristaproto.cls_by_field[f"{field_name}.value"]
+ for k in value[key]:
+ v[k] = cls().from_pydict(value[key][k])
+ else:
+ v = value[key]
+
+ if v is not None:
+ setattr(self, field_name, v)
+ return self
+
+ def is_set(self, name: str) -> bool:
+ """
+ Check if field with the given name has been set.
+
+ Parameters
+ -----------
+ name: :class:`str`
+ The name of the field to check for.
+
+ Returns
+ --------
+ :class:`bool`
+ `True` if field has been set, otherwise `False`.
+ """
+ default = (
+ PLACEHOLDER
+ if not self._aristaproto.meta_by_field_name[name].optional
+ else None
+ )
+ return self.__raw_get(name) is not default
+
+ @classmethod
+ def _validate_field_groups(cls, values):
+ group_to_one_ofs = cls._aristaproto.oneof_field_by_group
+ field_name_to_meta = cls._aristaproto.meta_by_field_name
+
+ for group, field_set in group_to_one_ofs.items():
+ if len(field_set) == 1:
+ (field,) = field_set
+ field_name = field.name
+ meta = field_name_to_meta[field_name]
+
+ # This is a synthetic oneof; we should ignore it's presence and not consider it as a oneof.
+ if meta.optional:
+ continue
+
+ set_fields = [
+ field.name for field in field_set if values[field.name] is not None
+ ]
+
+ if not set_fields:
+ raise ValueError(f"Group {group} has no value; all fields are None")
+ elif len(set_fields) > 1:
+ set_fields_str = ", ".join(set_fields)
+ raise ValueError(
+ f"Group {group} has more than one value; fields {set_fields_str} are not None"
+ )
+
+ return values
+
+
+Message.__annotations__ = {} # HACK to avoid typing.get_type_hints breaking :)
+
+# monkey patch (de-)serialization functions of class `Message`
+# with functions from `betterproto-rust-codec` if available
+try:
+ import betterproto_rust_codec
+
+ def __parse_patch(self: T, data: bytes) -> T:
+ betterproto_rust_codec.deserialize(self, data)
+ return self
+
+ def __bytes_patch(self) -> bytes:
+ return betterproto_rust_codec.serialize(self)
+
+ Message.parse = __parse_patch
+ Message.__bytes__ = __bytes_patch
+except ModuleNotFoundError:
+ pass
+
+
+def serialized_on_wire(message: Message) -> bool:
+ """
+ If this message was or should be serialized on the wire. This can be used to detect
+ presence (e.g. optional wrapper message) and is used internally during
+ parsing/serialization.
+
+ Returns
+ --------
+ :class:`bool`
+ Whether this message was or should be serialized on the wire.
+ """
+ return message._serialized_on_wire
+
+
+def which_one_of(message: Message, group_name: str) -> Tuple[str, Optional[Any]]:
+ """
+ Return the name and value of a message's one-of field group.
+
+ Returns
+ --------
+ Tuple[:class:`str`, Any]
+ The field name and the value for that field.
+ """
+ field_name = message._group_current.get(group_name)
+ if not field_name:
+ return "", None
+ return field_name, getattr(message, field_name)
+
+
+# Circular import workaround: google.protobuf depends on base classes defined above.
+from .lib.google.protobuf import ( # noqa
+ BoolValue,
+ BytesValue,
+ DoubleValue,
+ Duration,
+ EnumValue,
+ FloatValue,
+ Int32Value,
+ Int64Value,
+ StringValue,
+ Timestamp,
+ UInt32Value,
+ UInt64Value,
+)
+
+
+class _Duration(Duration):
+ @classmethod
+ def from_timedelta(
+ cls, delta: timedelta, *, _1_microsecond: timedelta = timedelta(microseconds=1)
+ ) -> "_Duration":
+ total_ms = delta // _1_microsecond
+ seconds = int(total_ms / 1e6)
+ nanos = int((total_ms % 1e6) * 1e3)
+ return cls(seconds, nanos)
+
+ def to_timedelta(self) -> timedelta:
+ return timedelta(seconds=self.seconds, microseconds=self.nanos / 1e3)
+
+ @staticmethod
+ def delta_to_json(delta: timedelta) -> str:
+ parts = str(delta.total_seconds()).split(".")
+ if len(parts) > 1:
+ while len(parts[1]) not in (3, 6, 9):
+ parts[1] = f"{parts[1]}0"
+ return f"{'.'.join(parts)}s"
+
+
+class _Timestamp(Timestamp):
+ @classmethod
+ def from_datetime(cls, dt: datetime) -> "_Timestamp":
+ # manual epoch offset calulation to avoid rounding errors,
+ # to support negative timestamps (before 1970) and skirt
+ # around datetime bugs (apparently 0 isn't a year in [0, 9999]??)
+ offset = dt - DATETIME_ZERO
+ # below is the same as timedelta.total_seconds() but without dividing by 1e6
+ # so we end up with microseconds as integers instead of seconds as float
+ offset_us = (
+ offset.days * 24 * 60 * 60 + offset.seconds
+ ) * 10**6 + offset.microseconds
+ seconds, us = divmod(offset_us, 10**6)
+ # If ths given datetime is our subclass containing nanos from the original Timestamp
+ # We will prefer those nanos over the datetime micros.
+ if isinstance(dt, _DateTime) and dt.nanos:
+ return cls(seconds, dt.nanos)
+ return cls(seconds, us * 1000)
+
+ def to_datetime(self) -> _DateTime:
+ # datetime.fromtimestamp() expects a timestamp in seconds, not microseconds
+ # if we pass it as a floating point number, we will run into rounding errors
+ # see also #407
+ offset = timedelta(seconds=self.seconds, microseconds=self.nanos // 1000)
+ dt = DATETIME_ZERO + offset
+ # Store the original nanos in our subclass of datetime.
+ setattr(dt, "_nanos", self.nanos)
+ return dt
+
+ @staticmethod
+ def timestamp_to_json(dt: datetime) -> str:
+ # If ths given datetime is our subclass containing nanos from the original Timestamp
+ # We will prefer those nanos over the datetime micros.
+ if isinstance(dt, _DateTime) and dt.nanos:
+ nanos = dt.nanos
+ else:
+ nanos = dt.microsecond * 1e3
+ if dt.tzinfo is not None:
+ # change timezone aware datetime objects to utc
+ dt = dt.astimezone(timezone.utc)
+ copy = dt.replace(microsecond=0, tzinfo=None)
+ result = copy.isoformat()
+ if (nanos % 1e9) == 0:
+ # If there are 0 fractional digits, the fractional
+ # point '.' should be omitted when serializing.
+ return f"{result}Z"
+ if (nanos % 1e6) == 0:
+ # Serialize 3 fractional digits.
+ return f"{result}.{int(nanos // 1e6) :03d}Z"
+ if (nanos % 1e3) == 0:
+ # Serialize 6 fractional digits.
+ return f"{result}.{int(nanos // 1e3) :06d}Z"
+ # Serialize 9 fractional digits.
+ return f"{result}.{nanos:09d}"
+
+
+def _get_wrapper(proto_type: str) -> Type:
+ """Get the wrapper message class for a wrapped type."""
+
+ # TODO: include ListValue and NullValue?
+ return {
+ TYPE_BOOL: BoolValue,
+ TYPE_BYTES: BytesValue,
+ TYPE_DOUBLE: DoubleValue,
+ TYPE_FLOAT: FloatValue,
+ TYPE_ENUM: EnumValue,
+ TYPE_INT32: Int32Value,
+ TYPE_INT64: Int64Value,
+ TYPE_STRING: StringValue,
+ TYPE_UINT32: UInt32Value,
+ TYPE_UINT64: UInt64Value,
+ }[proto_type]
diff --git a/src/aristaproto/_types.py b/src/aristaproto/_types.py
new file mode 100644
index 0000000..616d550
--- /dev/null
+++ b/src/aristaproto/_types.py
@@ -0,0 +1,14 @@
+from typing import (
+ TYPE_CHECKING,
+ TypeVar,
+)
+
+
+if TYPE_CHECKING:
+ from grpclib._typing import IProtoMessage
+
+ from . import Message
+
+# Bound type variable to allow methods to return `self` of subclasses
+T = TypeVar("T", bound="Message")
+ST = TypeVar("ST", bound="IProtoMessage")
diff --git a/src/aristaproto/_version.py b/src/aristaproto/_version.py
new file mode 100644
index 0000000..347a391
--- /dev/null
+++ b/src/aristaproto/_version.py
@@ -0,0 +1,4 @@
+from importlib import metadata
+
+
+__version__ = metadata.version("aristaproto")
diff --git a/src/aristaproto/casing.py b/src/aristaproto/casing.py
new file mode 100644
index 0000000..f7d0832
--- /dev/null
+++ b/src/aristaproto/casing.py
@@ -0,0 +1,143 @@
+import keyword
+import re
+
+
+# Word delimiters and symbols that will not be preserved when re-casing.
+# language=PythonRegExp
+SYMBOLS = "[^a-zA-Z0-9]*"
+
+# Optionally capitalized word.
+# language=PythonRegExp
+WORD = "[A-Z]*[a-z]*[0-9]*"
+
+# Uppercase word, not followed by lowercase letters.
+# language=PythonRegExp
+WORD_UPPER = "[A-Z]+(?![a-z])[0-9]*"
+
+
+def safe_snake_case(value: str) -> str:
+ """Snake case a value taking into account Python keywords."""
+ value = snake_case(value)
+ value = sanitize_name(value)
+ return value
+
+
+def snake_case(value: str, strict: bool = True) -> str:
+ """
+ Join words with an underscore into lowercase and remove symbols.
+
+ Parameters
+ -----------
+ value: :class:`str`
+ The value to convert.
+ strict: :class:`bool`
+ Whether or not to force single underscores.
+
+ Returns
+ --------
+ :class:`str`
+ The value in snake_case.
+ """
+
+ def substitute_word(symbols: str, word: str, is_start: bool) -> str:
+ if not word:
+ return ""
+ if strict:
+ delimiter_count = 0 if is_start else 1 # Single underscore if strict.
+ elif is_start:
+ delimiter_count = len(symbols)
+ elif word.isupper() or word.islower():
+ delimiter_count = max(
+ 1, len(symbols)
+ ) # Preserve all delimiters if not strict.
+ else:
+ delimiter_count = len(symbols) + 1 # Extra underscore for leading capital.
+
+ return ("_" * delimiter_count) + word.lower()
+
+ snake = re.sub(
+ f"(^)?({SYMBOLS})({WORD_UPPER}|{WORD})",
+ lambda groups: substitute_word(groups[2], groups[3], groups[1] is not None),
+ value,
+ )
+ return snake
+
+
+def pascal_case(value: str, strict: bool = True) -> str:
+ """
+ Capitalize each word and remove symbols.
+
+ Parameters
+ -----------
+ value: :class:`str`
+ The value to convert.
+ strict: :class:`bool`
+ Whether or not to output only alphanumeric characters.
+
+ Returns
+ --------
+ :class:`str`
+ The value in PascalCase.
+ """
+
+ def substitute_word(symbols, word):
+ if strict:
+ return word.capitalize() # Remove all delimiters
+
+ if word.islower():
+ delimiter_length = len(symbols[:-1]) # Lose one delimiter
+ else:
+ delimiter_length = len(symbols) # Preserve all delimiters
+
+ return ("_" * delimiter_length) + word.capitalize()
+
+ return re.sub(
+ f"({SYMBOLS})({WORD_UPPER}|{WORD})",
+ lambda groups: substitute_word(groups[1], groups[2]),
+ value,
+ )
+
+
+def camel_case(value: str, strict: bool = True) -> str:
+ """
+ Capitalize all words except first and remove symbols.
+
+ Parameters
+ -----------
+ value: :class:`str`
+ The value to convert.
+ strict: :class:`bool`
+ Whether or not to output only alphanumeric characters.
+
+ Returns
+ --------
+ :class:`str`
+ The value in camelCase.
+ """
+ return lowercase_first(pascal_case(value, strict=strict))
+
+
+def lowercase_first(value: str) -> str:
+ """
+ Lower cases the first character of the value.
+
+ Parameters
+ ----------
+ value: :class:`str`
+ The value to lower case.
+
+ Returns
+ -------
+ :class:`str`
+ The lower cased string.
+ """
+ return value[0:1].lower() + value[1:]
+
+
+def sanitize_name(value: str) -> str:
+ # https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles
+ if keyword.iskeyword(value):
+ return f"{value}_"
+ if not value.isidentifier():
+ return f"_{value}"
+ return value
diff --git a/src/aristaproto/compile/__init__.py b/src/aristaproto/compile/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/compile/__init__.py
diff --git a/src/aristaproto/compile/importing.py b/src/aristaproto/compile/importing.py
new file mode 100644
index 0000000..8486ddd
--- /dev/null
+++ b/src/aristaproto/compile/importing.py
@@ -0,0 +1,176 @@
+import os
+import re
+from typing import (
+ Dict,
+ List,
+ Set,
+ Tuple,
+ Type,
+)
+
+from ..casing import safe_snake_case
+from ..lib.google import protobuf as google_protobuf
+from .naming import pythonize_class_name
+
+
+WRAPPER_TYPES: Dict[str, Type] = {
+ ".google.protobuf.DoubleValue": google_protobuf.DoubleValue,
+ ".google.protobuf.FloatValue": google_protobuf.FloatValue,
+ ".google.protobuf.Int32Value": google_protobuf.Int32Value,
+ ".google.protobuf.Int64Value": google_protobuf.Int64Value,
+ ".google.protobuf.UInt32Value": google_protobuf.UInt32Value,
+ ".google.protobuf.UInt64Value": google_protobuf.UInt64Value,
+ ".google.protobuf.BoolValue": google_protobuf.BoolValue,
+ ".google.protobuf.StringValue": google_protobuf.StringValue,
+ ".google.protobuf.BytesValue": google_protobuf.BytesValue,
+}
+
+
+def parse_source_type_name(field_type_name: str) -> Tuple[str, str]:
+ """
+ Split full source type name into package and type name.
+ E.g. 'root.package.Message' -> ('root.package', 'Message')
+ 'root.Message.SomeEnum' -> ('root', 'Message.SomeEnum')
+ """
+ package_match = re.match(r"^\.?([^A-Z]+)\.(.+)", field_type_name)
+ if package_match:
+ package = package_match.group(1)
+ name = package_match.group(2)
+ else:
+ package = ""
+ name = field_type_name.lstrip(".")
+ return package, name
+
+
+def get_type_reference(
+ *,
+ package: str,
+ imports: set,
+ source_type: str,
+ unwrap: bool = True,
+ pydantic: bool = False,
+) -> str:
+ """
+ Return a Python type name for a proto type reference. Adds the import if
+ necessary. Unwraps well known type if required.
+ """
+ if unwrap:
+ if source_type in WRAPPER_TYPES:
+ wrapped_type = type(WRAPPER_TYPES[source_type]().value)
+ return f"Optional[{wrapped_type.__name__}]"
+
+ if source_type == ".google.protobuf.Duration":
+ return "timedelta"
+
+ elif source_type == ".google.protobuf.Timestamp":
+ return "datetime"
+
+ source_package, source_type = parse_source_type_name(source_type)
+
+ current_package: List[str] = package.split(".") if package else []
+ py_package: List[str] = source_package.split(".") if source_package else []
+ py_type: str = pythonize_class_name(source_type)
+
+ compiling_google_protobuf = current_package == ["google", "protobuf"]
+ importing_google_protobuf = py_package == ["google", "protobuf"]
+ if importing_google_protobuf and not compiling_google_protobuf:
+ py_package = (
+ ["aristaproto", "lib"] + (["pydantic"] if pydantic else []) + py_package
+ )
+
+ if py_package[:1] == ["aristaproto"]:
+ return reference_absolute(imports, py_package, py_type)
+
+ if py_package == current_package:
+ return reference_sibling(py_type)
+
+ if py_package[: len(current_package)] == current_package:
+ return reference_descendent(current_package, imports, py_package, py_type)
+
+ if current_package[: len(py_package)] == py_package:
+ return reference_ancestor(current_package, imports, py_package, py_type)
+
+ return reference_cousin(current_package, imports, py_package, py_type)
+
+
+def reference_absolute(imports: Set[str], py_package: List[str], py_type: str) -> str:
+ """
+ Returns a reference to a python type located in the root, i.e. sys.path.
+ """
+ string_import = ".".join(py_package)
+ string_alias = safe_snake_case(string_import)
+ imports.add(f"import {string_import} as {string_alias}")
+ return f'"{string_alias}.{py_type}"'
+
+
+def reference_sibling(py_type: str) -> str:
+ """
+ Returns a reference to a python type within the same package as the current package.
+ """
+ return f'"{py_type}"'
+
+
+def reference_descendent(
+ current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
+) -> str:
+ """
+ Returns a reference to a python type in a package that is a descendent of the
+ current package, and adds the required import that is aliased to avoid name
+ conflicts.
+ """
+ importing_descendent = py_package[len(current_package) :]
+ string_from = ".".join(importing_descendent[:-1])
+ string_import = importing_descendent[-1]
+ if string_from:
+ string_alias = "_".join(importing_descendent)
+ imports.add(f"from .{string_from} import {string_import} as {string_alias}")
+ return f'"{string_alias}.{py_type}"'
+ else:
+ imports.add(f"from . import {string_import}")
+ return f'"{string_import}.{py_type}"'
+
+
+def reference_ancestor(
+ current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
+) -> str:
+ """
+ Returns a reference to a python type in a package which is an ancestor to the
+ current package, and adds the required import that is aliased (if possible) to avoid
+ name conflicts.
+
+ Adds trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34).
+ """
+ distance_up = len(current_package) - len(py_package)
+ if py_package:
+ string_import = py_package[-1]
+ string_alias = f"_{'_' * distance_up}{string_import}__"
+ string_from = f"..{'.' * distance_up}"
+ imports.add(f"from {string_from} import {string_import} as {string_alias}")
+ return f'"{string_alias}.{py_type}"'
+ else:
+ string_alias = f"{'_' * distance_up}{py_type}__"
+ imports.add(f"from .{'.' * distance_up} import {py_type} as {string_alias}")
+ return f'"{string_alias}"'
+
+
+def reference_cousin(
+ current_package: List[str], imports: Set[str], py_package: List[str], py_type: str
+) -> str:
+ """
+ Returns a reference to a python type in a package that is not descendent, ancestor
+ or sibling, and adds the required import that is aliased to avoid name conflicts.
+ """
+ shared_ancestry = os.path.commonprefix([current_package, py_package]) # type: ignore
+ distance_up = len(current_package) - len(shared_ancestry)
+ string_from = f".{'.' * distance_up}" + ".".join(
+ py_package[len(shared_ancestry) : -1]
+ )
+ string_import = py_package[-1]
+ # Add trailing __ to avoid name mangling (python.org/dev/peps/pep-0008/#id34)
+ string_alias = (
+ f"{'_' * distance_up}"
+ + safe_snake_case(".".join(py_package[len(shared_ancestry) :]))
+ + "__"
+ )
+ imports.add(f"from {string_from} import {string_import} as {string_alias}")
+ return f'"{string_alias}.{py_type}"'
diff --git a/src/aristaproto/compile/naming.py b/src/aristaproto/compile/naming.py
new file mode 100644
index 0000000..0c45dde
--- /dev/null
+++ b/src/aristaproto/compile/naming.py
@@ -0,0 +1,21 @@
+from aristaproto import casing
+
+
+def pythonize_class_name(name: str) -> str:
+ return casing.pascal_case(name)
+
+
+def pythonize_field_name(name: str) -> str:
+ return casing.safe_snake_case(name)
+
+
+def pythonize_method_name(name: str) -> str:
+ return casing.safe_snake_case(name)
+
+
+def pythonize_enum_member_name(name: str, enum_name: str) -> str:
+ enum_name = casing.snake_case(enum_name).upper()
+ find = name.find(enum_name)
+ if find != -1:
+ name = name[find + len(enum_name) :].strip("_")
+ return casing.sanitize_name(name)
diff --git a/src/aristaproto/enum.py b/src/aristaproto/enum.py
new file mode 100644
index 0000000..8535e86
--- /dev/null
+++ b/src/aristaproto/enum.py
@@ -0,0 +1,195 @@
+from __future__ import annotations
+
+import sys
+from enum import (
+ EnumMeta,
+ IntEnum,
+)
+from types import MappingProxyType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Optional,
+ Tuple,
+)
+
+
+if TYPE_CHECKING:
+ from collections.abc import (
+ Generator,
+ Mapping,
+ )
+
+ from typing_extensions import (
+ Never,
+ Self,
+ )
+
+
+def _is_descriptor(obj: object) -> bool:
+ return (
+ hasattr(obj, "__get__") or hasattr(obj, "__set__") or hasattr(obj, "__delete__")
+ )
+
+
+class EnumType(EnumMeta if TYPE_CHECKING else type):
+ _value_map_: Mapping[int, Enum]
+ _member_map_: Mapping[str, Enum]
+
+ def __new__(
+ mcs, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any]
+ ) -> Self:
+ value_map = {}
+ member_map = {}
+
+ new_mcs = type(
+ f"{name}Type",
+ tuple(
+ dict.fromkeys(
+ [base.__class__ for base in bases if base.__class__ is not type]
+ + [EnumType, type]
+ )
+ ), # reorder the bases so EnumType and type are last to avoid conflicts
+ {"_value_map_": value_map, "_member_map_": member_map},
+ )
+
+ members = {
+ name: value
+ for name, value in namespace.items()
+ if not _is_descriptor(value) and not name.startswith("__")
+ }
+
+ cls = type.__new__(
+ new_mcs,
+ name,
+ bases,
+ {key: value for key, value in namespace.items() if key not in members},
+ )
+ # this allows us to disallow member access from other members as
+ # members become proper class variables
+
+ for name, value in members.items():
+ member = value_map.get(value)
+ if member is None:
+ member = cls.__new__(cls, name=name, value=value) # type: ignore
+ value_map[value] = member
+ member_map[name] = member
+ type.__setattr__(new_mcs, name, member)
+
+ return cls
+
+ if not TYPE_CHECKING:
+
+ def __call__(cls, value: int) -> Enum:
+ try:
+ return cls._value_map_[value]
+ except (KeyError, TypeError):
+ raise ValueError(f"{value!r} is not a valid {cls.__name__}") from None
+
+ def __iter__(cls) -> Generator[Enum, None, None]:
+ yield from cls._member_map_.values()
+
+ def __reversed__(cls) -> Generator[Enum, None, None]:
+ yield from reversed(cls._member_map_.values())
+
+ def __getitem__(cls, key: str) -> Enum:
+ return cls._member_map_[key]
+
+ @property
+ def __members__(cls) -> MappingProxyType[str, Enum]:
+ return MappingProxyType(cls._member_map_)
+
+ def __repr__(cls) -> str:
+ return f"<enum {cls.__name__!r}>"
+
+ def __len__(cls) -> int:
+ return len(cls._member_map_)
+
+ def __setattr__(cls, name: str, value: Any) -> Never:
+ raise AttributeError(f"{cls.__name__}: cannot reassign Enum members.")
+
+ def __delattr__(cls, name: str) -> Never:
+ raise AttributeError(f"{cls.__name__}: cannot delete Enum members.")
+
+ def __contains__(cls, member: object) -> bool:
+ return isinstance(member, cls) and member.name in cls._member_map_
+
+
+class Enum(IntEnum if TYPE_CHECKING else int, metaclass=EnumType):
+ """
+ The base class for protobuf enumerations, all generated enumerations will
+ inherit from this. Emulates `enum.IntEnum`.
+ """
+
+ name: Optional[str]
+ value: int
+
+ if not TYPE_CHECKING:
+
+ def __new__(cls, *, name: Optional[str], value: int) -> Self:
+ self = super().__new__(cls, value)
+ super().__setattr__(self, "name", name)
+ super().__setattr__(self, "value", value)
+ return self
+
+ def __str__(self) -> str:
+ return self.name or "None"
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}.{self.name}"
+
+ def __setattr__(self, key: str, value: Any) -> Never:
+ raise AttributeError(
+ f"{self.__class__.__name__} Cannot reassign a member's attributes."
+ )
+
+ def __delattr__(self, item: Any) -> Never:
+ raise AttributeError(
+ f"{self.__class__.__name__} Cannot delete a member's attributes."
+ )
+
+ def __copy__(self) -> Self:
+ return self
+
+ def __deepcopy__(self, memo: Any) -> Self:
+ return self
+
+ @classmethod
+ def try_value(cls, value: int = 0) -> Self:
+ """Return the value which corresponds to the value.
+
+ Parameters
+ -----------
+ value: :class:`int`
+ The value of the enum member to get.
+
+ Returns
+ -------
+ :class:`Enum`
+ The corresponding member or a new instance of the enum if
+ ``value`` isn't actually a member.
+ """
+ try:
+ return cls._value_map_[value]
+ except (KeyError, TypeError):
+ return cls.__new__(cls, name=None, value=value)
+
+ @classmethod
+ def from_string(cls, name: str) -> Self:
+ """Return the value which corresponds to the string name.
+
+ Parameters
+ -----------
+ name: :class:`str`
+ The name of the enum member to get.
+
+ Raises
+ -------
+ :exc:`ValueError`
+ The member was not found in the Enum.
+ """
+ try:
+ return cls._member_map_[name]
+ except KeyError as e:
+ raise ValueError(f"Unknown value {name} for enum {cls.__name__}") from e
diff --git a/src/aristaproto/grpc/__init__.py b/src/aristaproto/grpc/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/grpc/__init__.py
diff --git a/src/aristaproto/grpc/grpclib_client.py b/src/aristaproto/grpc/grpclib_client.py
new file mode 100644
index 0000000..b19e806
--- /dev/null
+++ b/src/aristaproto/grpc/grpclib_client.py
@@ -0,0 +1,177 @@
+import asyncio
+from abc import ABC
+from typing import (
+ TYPE_CHECKING,
+ AsyncIterable,
+ AsyncIterator,
+ Collection,
+ Iterable,
+ Mapping,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+)
+
+import grpclib.const
+
+
+if TYPE_CHECKING:
+ from grpclib.client import Channel
+ from grpclib.metadata import Deadline
+
+ from .._types import (
+ ST,
+ IProtoMessage,
+ Message,
+ T,
+ )
+
+
+Value = Union[str, bytes]
+MetadataLike = Union[Mapping[str, Value], Collection[Tuple[str, Value]]]
+MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]]
+
+
+class ServiceStub(ABC):
+ """
+ Base class for async gRPC clients.
+ """
+
+ def __init__(
+ self,
+ channel: "Channel",
+ *,
+ timeout: Optional[float] = None,
+ deadline: Optional["Deadline"] = None,
+ metadata: Optional[MetadataLike] = None,
+ ) -> None:
+ self.channel = channel
+ self.timeout = timeout
+ self.deadline = deadline
+ self.metadata = metadata
+
+ def __resolve_request_kwargs(
+ self,
+ timeout: Optional[float],
+ deadline: Optional["Deadline"],
+ metadata: Optional[MetadataLike],
+ ):
+ return {
+ "timeout": self.timeout if timeout is None else timeout,
+ "deadline": self.deadline if deadline is None else deadline,
+ "metadata": self.metadata if metadata is None else metadata,
+ }
+
+ async def _unary_unary(
+ self,
+ route: str,
+ request: "IProtoMessage",
+ response_type: Type["T"],
+ *,
+ timeout: Optional[float] = None,
+ deadline: Optional["Deadline"] = None,
+ metadata: Optional[MetadataLike] = None,
+ ) -> "T":
+ """Make a unary request and return the response."""
+ async with self.channel.request(
+ route,
+ grpclib.const.Cardinality.UNARY_UNARY,
+ type(request),
+ response_type,
+ **self.__resolve_request_kwargs(timeout, deadline, metadata),
+ ) as stream:
+ await stream.send_message(request, end=True)
+ response = await stream.recv_message()
+ assert response is not None
+ return response
+
+ async def _unary_stream(
+ self,
+ route: str,
+ request: "IProtoMessage",
+ response_type: Type["T"],
+ *,
+ timeout: Optional[float] = None,
+ deadline: Optional["Deadline"] = None,
+ metadata: Optional[MetadataLike] = None,
+ ) -> AsyncIterator["T"]:
+ """Make a unary request and return the stream response iterator."""
+ async with self.channel.request(
+ route,
+ grpclib.const.Cardinality.UNARY_STREAM,
+ type(request),
+ response_type,
+ **self.__resolve_request_kwargs(timeout, deadline, metadata),
+ ) as stream:
+ await stream.send_message(request, end=True)
+ async for message in stream:
+ yield message
+
+ async def _stream_unary(
+ self,
+ route: str,
+ request_iterator: MessageSource,
+ request_type: Type["IProtoMessage"],
+ response_type: Type["T"],
+ *,
+ timeout: Optional[float] = None,
+ deadline: Optional["Deadline"] = None,
+ metadata: Optional[MetadataLike] = None,
+ ) -> "T":
+ """Make a stream request and return the response."""
+ async with self.channel.request(
+ route,
+ grpclib.const.Cardinality.STREAM_UNARY,
+ request_type,
+ response_type,
+ **self.__resolve_request_kwargs(timeout, deadline, metadata),
+ ) as stream:
+ await stream.send_request()
+ await self._send_messages(stream, request_iterator)
+ response = await stream.recv_message()
+ assert response is not None
+ return response
+
+ async def _stream_stream(
+ self,
+ route: str,
+ request_iterator: MessageSource,
+ request_type: Type["IProtoMessage"],
+ response_type: Type["T"],
+ *,
+ timeout: Optional[float] = None,
+ deadline: Optional["Deadline"] = None,
+ metadata: Optional[MetadataLike] = None,
+ ) -> AsyncIterator["T"]:
+ """
+ Make a stream request and return an AsyncIterator to iterate over response
+ messages.
+ """
+ async with self.channel.request(
+ route,
+ grpclib.const.Cardinality.STREAM_STREAM,
+ request_type,
+ response_type,
+ **self.__resolve_request_kwargs(timeout, deadline, metadata),
+ ) as stream:
+ await stream.send_request()
+ sending_task = asyncio.ensure_future(
+ self._send_messages(stream, request_iterator)
+ )
+ try:
+ async for response in stream:
+ yield response
+ except:
+ sending_task.cancel()
+ raise
+
+ @staticmethod
+ async def _send_messages(stream, messages: MessageSource):
+ if isinstance(messages, AsyncIterable):
+ async for message in messages:
+ await stream.send_message(message)
+ else:
+ for message in messages:
+ await stream.send_message(message)
+ await stream.end()
diff --git a/src/aristaproto/grpc/grpclib_server.py b/src/aristaproto/grpc/grpclib_server.py
new file mode 100644
index 0000000..3e28031
--- /dev/null
+++ b/src/aristaproto/grpc/grpclib_server.py
@@ -0,0 +1,33 @@
+from abc import ABC
+from collections.abc import AsyncIterable
+from typing import (
+ Any,
+ Callable,
+ Dict,
+)
+
+import grpclib
+import grpclib.server
+
+
+class ServiceBase(ABC):
+ """
+ Base class for async gRPC servers.
+ """
+
+ async def _call_rpc_handler_server_stream(
+ self,
+ handler: Callable,
+ stream: grpclib.server.Stream,
+ request: Any,
+ ) -> None:
+ response_iter = handler(request)
+ # check if response is actually an AsyncIterator
+ # this might be false if the method just returns without
+ # yielding at least once
+ # in that case, we just interpret it as an empty iterator
+ if isinstance(response_iter, AsyncIterable):
+ async for response_message in response_iter:
+ await stream.send_message(response_message)
+ else:
+ response_iter.close()
diff --git a/src/aristaproto/grpc/util/__init__.py b/src/aristaproto/grpc/util/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/grpc/util/__init__.py
diff --git a/src/aristaproto/grpc/util/async_channel.py b/src/aristaproto/grpc/util/async_channel.py
new file mode 100644
index 0000000..9f18dbf
--- /dev/null
+++ b/src/aristaproto/grpc/util/async_channel.py
@@ -0,0 +1,193 @@
+import asyncio
+from typing import (
+ AsyncIterable,
+ AsyncIterator,
+ Iterable,
+ Optional,
+ TypeVar,
+ Union,
+)
+
+
+T = TypeVar("T")
+
+
+class ChannelClosed(Exception):
+ """
+ An exception raised on an attempt to send through a closed channel
+ """
+
+
+class ChannelDone(Exception):
+ """
+ An exception raised on an attempt to send receive from a channel that is both closed
+ and empty.
+ """
+
+
+class AsyncChannel(AsyncIterable[T]):
+ """
+ A buffered async channel for sending items between coroutines with FIFO ordering.
+
+ This makes decoupled bidirectional steaming gRPC requests easy if used like:
+
+ .. code-block:: python
+ client = GeneratedStub(grpclib_chan)
+ request_channel = await AsyncChannel()
+ # We can start be sending all the requests we already have
+ await request_channel.send_from([RequestObject(...), RequestObject(...)])
+ async for response in client.rpc_call(request_channel):
+ # The response iterator will remain active until the connection is closed
+ ...
+ # More items can be sent at any time
+ await request_channel.send(RequestObject(...))
+ ...
+ # The channel must be closed to complete the gRPC connection
+ request_channel.close()
+
+ Items can be sent through the channel by either:
+ - providing an iterable to the send_from method
+ - passing them to the send method one at a time
+
+ Items can be received from the channel by either:
+ - iterating over the channel with a for loop to get all items
+ - calling the receive method to get one item at a time
+
+ If the channel is empty then receivers will wait until either an item appears or the
+ channel is closed.
+
+ Once the channel is closed then subsequent attempt to send through the channel will
+ fail with a ChannelClosed exception.
+
+ When th channel is closed and empty then it is done, and further attempts to receive
+ from it will fail with a ChannelDone exception
+
+ If multiple coroutines receive from the channel concurrently, each item sent will be
+ received by only one of the receivers.
+
+ :param source:
+ An optional iterable will items that should be sent through the channel
+ immediately.
+ :param buffer_limit:
+ Limit the number of items that can be buffered in the channel, A value less than
+ 1 implies no limit. If the channel is full then attempts to send more items will
+ result in the sender waiting until an item is received from the channel.
+ :param close:
+ If set to True then the channel will automatically close after exhausting source
+ or immediately if no source is provided.
+ """
+
+ def __init__(self, *, buffer_limit: int = 0, close: bool = False):
+ self._queue: asyncio.Queue[T] = asyncio.Queue(buffer_limit)
+ self._closed = False
+ self._waiting_receivers: int = 0
+ # Track whether flush has been invoked so it can only happen once
+ self._flushed = False
+
+ def __aiter__(self) -> AsyncIterator[T]:
+ return self
+
+ async def __anext__(self) -> T:
+ if self.done():
+ raise StopAsyncIteration
+ self._waiting_receivers += 1
+ try:
+ result = await self._queue.get()
+ if result is self.__flush:
+ raise StopAsyncIteration
+ return result
+ finally:
+ self._waiting_receivers -= 1
+ self._queue.task_done()
+
+ def closed(self) -> bool:
+ """
+ Returns True if this channel is closed and no-longer accepting new items
+ """
+ return self._closed
+
+ def done(self) -> bool:
+ """
+ Check if this channel is done.
+
+ :return: True if this channel is closed and and has been drained of items in
+ which case any further attempts to receive an item from this channel will raise
+ a ChannelDone exception.
+ """
+ # After close the channel is not yet done until there is at least one waiting
+ # receiver per enqueued item.
+ return self._closed and self._queue.qsize() <= self._waiting_receivers
+
+ async def send_from(
+ self, source: Union[Iterable[T], AsyncIterable[T]], close: bool = False
+ ) -> "AsyncChannel[T]":
+ """
+ Iterates the given [Async]Iterable and sends all the resulting items.
+ If close is set to True then subsequent send calls will be rejected with a
+ ChannelClosed exception.
+ :param source: an iterable of items to send
+ :param close:
+ if True then the channel will be closed after the source has been exhausted
+
+ """
+ if self._closed:
+ raise ChannelClosed("Cannot send through a closed channel")
+ if isinstance(source, AsyncIterable):
+ async for item in source:
+ await self._queue.put(item)
+ else:
+ for item in source:
+ await self._queue.put(item)
+ if close:
+ # Complete the closing process
+ self.close()
+ return self
+
+ async def send(self, item: T) -> "AsyncChannel[T]":
+ """
+ Send a single item over this channel.
+ :param item: The item to send
+ """
+ if self._closed:
+ raise ChannelClosed("Cannot send through a closed channel")
+ await self._queue.put(item)
+ return self
+
+ async def receive(self) -> Optional[T]:
+ """
+ Returns the next item from this channel when it becomes available,
+ or None if the channel is closed before another item is sent.
+ :return: An item from the channel
+ """
+ if self.done():
+ raise ChannelDone("Cannot receive from a closed channel")
+ self._waiting_receivers += 1
+ try:
+ result = await self._queue.get()
+ if result is self.__flush:
+ return None
+ return result
+ finally:
+ self._waiting_receivers -= 1
+ self._queue.task_done()
+
+ def close(self):
+ """
+ Close this channel to new items
+ """
+ self._closed = True
+ asyncio.ensure_future(self._flush_queue())
+
+ async def _flush_queue(self):
+ """
+ To be called after the channel is closed. Pushes a number of self.__flush
+ objects to the queue to ensure no waiting consumers get deadlocked.
+ """
+ if not self._flushed:
+ self._flushed = True
+ deadlocked_receivers = max(0, self._waiting_receivers - self._queue.qsize())
+ for _ in range(deadlocked_receivers):
+ await self._queue.put(self.__flush)
+
+ # A special signal object for flushing the queue when the channel is closed
+ __flush = object()
diff --git a/src/aristaproto/lib/__init__.py b/src/aristaproto/lib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/lib/__init__.py
diff --git a/src/aristaproto/lib/google/__init__.py b/src/aristaproto/lib/google/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/lib/google/__init__.py
diff --git a/src/aristaproto/lib/google/protobuf/__init__.py b/src/aristaproto/lib/google/protobuf/__init__.py
new file mode 100644
index 0000000..f8aad1e
--- /dev/null
+++ b/src/aristaproto/lib/google/protobuf/__init__.py
@@ -0,0 +1 @@
+from aristaproto.lib.std.google.protobuf import *
diff --git a/src/aristaproto/lib/google/protobuf/compiler/__init__.py b/src/aristaproto/lib/google/protobuf/compiler/__init__.py
new file mode 100644
index 0000000..cfa3855
--- /dev/null
+++ b/src/aristaproto/lib/google/protobuf/compiler/__init__.py
@@ -0,0 +1 @@
+from aristaproto.lib.std.google.protobuf.compiler import *
diff --git a/src/aristaproto/lib/pydantic/__init__.py b/src/aristaproto/lib/pydantic/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/lib/pydantic/__init__.py
diff --git a/src/aristaproto/lib/pydantic/google/__init__.py b/src/aristaproto/lib/pydantic/google/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/lib/pydantic/google/__init__.py
diff --git a/src/aristaproto/lib/pydantic/google/protobuf/__init__.py b/src/aristaproto/lib/pydantic/google/protobuf/__init__.py
new file mode 100644
index 0000000..3a7e8ac
--- /dev/null
+++ b/src/aristaproto/lib/pydantic/google/protobuf/__init__.py
@@ -0,0 +1,2589 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# sources: google/protobuf/any.proto, google/protobuf/api.proto, google/protobuf/descriptor.proto, google/protobuf/duration.proto, google/protobuf/empty.proto, google/protobuf/field_mask.proto, google/protobuf/source_context.proto, google/protobuf/struct.proto, google/protobuf/timestamp.proto, google/protobuf/type.proto, google/protobuf/wrappers.proto
+# plugin: python-aristaproto
+
+import warnings
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+ from dataclasses import dataclass
+else:
+ from pydantic.dataclasses import dataclass
+
+from typing import (
+ Dict,
+ List,
+ Mapping,
+ Optional,
+)
+
+from pydantic import root_validator
+from typing_extensions import Self
+
+import aristaproto
+from aristaproto.utils import hybridmethod
+
+
+class Syntax(aristaproto.Enum):
+ """The syntax in which a protocol buffer element is defined."""
+
+ PROTO2 = 0
+ """Syntax `proto2`."""
+
+ PROTO3 = 1
+ """Syntax `proto3`."""
+
+ EDITIONS = 2
+ """Syntax `editions`."""
+
+
+class FieldKind(aristaproto.Enum):
+ """Basic field types."""
+
+ TYPE_UNKNOWN = 0
+ """Field type unknown."""
+
+ TYPE_DOUBLE = 1
+ """Field type double."""
+
+ TYPE_FLOAT = 2
+ """Field type float."""
+
+ TYPE_INT64 = 3
+ """Field type int64."""
+
+ TYPE_UINT64 = 4
+ """Field type uint64."""
+
+ TYPE_INT32 = 5
+ """Field type int32."""
+
+ TYPE_FIXED64 = 6
+ """Field type fixed64."""
+
+ TYPE_FIXED32 = 7
+ """Field type fixed32."""
+
+ TYPE_BOOL = 8
+ """Field type bool."""
+
+ TYPE_STRING = 9
+ """Field type string."""
+
+ TYPE_GROUP = 10
+ """Field type group. Proto2 syntax only, and deprecated."""
+
+ TYPE_MESSAGE = 11
+ """Field type message."""
+
+ TYPE_BYTES = 12
+ """Field type bytes."""
+
+ TYPE_UINT32 = 13
+ """Field type uint32."""
+
+ TYPE_ENUM = 14
+ """Field type enum."""
+
+ TYPE_SFIXED32 = 15
+ """Field type sfixed32."""
+
+ TYPE_SFIXED64 = 16
+ """Field type sfixed64."""
+
+ TYPE_SINT32 = 17
+ """Field type sint32."""
+
+ TYPE_SINT64 = 18
+ """Field type sint64."""
+
+
+class FieldCardinality(aristaproto.Enum):
+ """Whether a field is optional, required, or repeated."""
+
+ CARDINALITY_UNKNOWN = 0
+ """For fields with unknown cardinality."""
+
+ CARDINALITY_OPTIONAL = 1
+ """For optional fields."""
+
+ CARDINALITY_REQUIRED = 2
+ """For required fields. Proto2 syntax only."""
+
+ CARDINALITY_REPEATED = 3
+ """For repeated fields."""
+
+
+class Edition(aristaproto.Enum):
+ """The full set of known editions."""
+
+ UNKNOWN = 0
+ """A placeholder for an unknown edition value."""
+
+ PROTO2 = 998
+ """
+ Legacy syntax "editions". These pre-date editions, but behave much like
+ distinct editions. These can't be used to specify the edition of proto
+ files, but feature definitions must supply proto2/proto3 defaults for
+ backwards compatibility.
+ """
+
+ PROTO3 = 999
+ _2023 = 1000
+ """
+ Editions that have been released. The specific values are arbitrary and
+ should not be depended on, but they will always be time-ordered for easy
+ comparison.
+ """
+
+ _2024 = 1001
+ _1_TEST_ONLY = 1
+ """
+ Placeholder editions for testing feature resolution. These should not be
+ used or relyed on outside of tests.
+ """
+
+ _2_TEST_ONLY = 2
+ _99997_TEST_ONLY = 99997
+ _99998_TEST_ONLY = 99998
+ _99999_TEST_ONLY = 99999
+ MAX = 2147483647
+ """
+ Placeholder for specifying unbounded edition support. This should only
+ ever be used by plugins that can expect to never require any changes to
+ support a new edition.
+ """
+
+
+class ExtensionRangeOptionsVerificationState(aristaproto.Enum):
+ """The verification state of the extension range."""
+
+ DECLARATION = 0
+ """All the extensions of the range must be declared."""
+
+ UNVERIFIED = 1
+
+
+class FieldDescriptorProtoType(aristaproto.Enum):
+ TYPE_DOUBLE = 1
+ """
+ 0 is reserved for errors.
+ Order is weird for historical reasons.
+ """
+
+ TYPE_FLOAT = 2
+ TYPE_INT64 = 3
+ """
+ Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
+ negative values are likely.
+ """
+
+ TYPE_UINT64 = 4
+ TYPE_INT32 = 5
+ """
+ Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
+ negative values are likely.
+ """
+
+ TYPE_FIXED64 = 6
+ TYPE_FIXED32 = 7
+ TYPE_BOOL = 8
+ TYPE_STRING = 9
+ TYPE_GROUP = 10
+ """
+ Tag-delimited aggregate.
+ Group type is deprecated and not supported after google.protobuf. However, Proto3
+ implementations should still be able to parse the group wire format and
+ treat group fields as unknown fields. In Editions, the group wire format
+ can be enabled via the `message_encoding` feature.
+ """
+
+ TYPE_MESSAGE = 11
+ TYPE_BYTES = 12
+ """New in version 2."""
+
+ TYPE_UINT32 = 13
+ TYPE_ENUM = 14
+ TYPE_SFIXED32 = 15
+ TYPE_SFIXED64 = 16
+ TYPE_SINT32 = 17
+ TYPE_SINT64 = 18
+
+
+class FieldDescriptorProtoLabel(aristaproto.Enum):
+ LABEL_OPTIONAL = 1
+ """0 is reserved for errors"""
+
+ LABEL_REPEATED = 3
+ LABEL_REQUIRED = 2
+ """
+ The required label is only allowed in google.protobuf. In proto3 and Editions
+ it's explicitly prohibited. In Editions, the `field_presence` feature
+ can be used to get this behavior.
+ """
+
+
+class FileOptionsOptimizeMode(aristaproto.Enum):
+ """Generated classes can be optimized for speed or code size."""
+
+ SPEED = 1
+ CODE_SIZE = 2
+ """etc."""
+
+ LITE_RUNTIME = 3
+
+
+class FieldOptionsCType(aristaproto.Enum):
+ STRING = 0
+ """Default mode."""
+
+ CORD = 1
+ """
+ The option [ctype=CORD] may be applied to a non-repeated field of type
+ "bytes". It indicates that in C++, the data should be stored in a Cord
+ instead of a string. For very large strings, this may reduce memory
+ fragmentation. It may also allow better performance when parsing from a
+ Cord, or when parsing with aliasing enabled, as the parsed Cord may then
+ alias the original buffer.
+ """
+
+ STRING_PIECE = 2
+
+
+class FieldOptionsJsType(aristaproto.Enum):
+ JS_NORMAL = 0
+ """Use the default type."""
+
+ JS_STRING = 1
+ """Use JavaScript strings."""
+
+ JS_NUMBER = 2
+ """Use JavaScript numbers."""
+
+
+class FieldOptionsOptionRetention(aristaproto.Enum):
+ """
+ If set to RETENTION_SOURCE, the option will be omitted from the binary.
+ Note: as of January 2023, support for this is in progress and does not yet
+ have an effect (b/264593489).
+ """
+
+ RETENTION_UNKNOWN = 0
+ RETENTION_RUNTIME = 1
+ RETENTION_SOURCE = 2
+
+
+class FieldOptionsOptionTargetType(aristaproto.Enum):
+ """
+ This indicates the types of entities that the field may apply to when used
+ as an option. If it is unset, then the field may be freely used as an
+ option on any kind of entity. Note: as of January 2023, support for this is
+ in progress and does not yet have an effect (b/264593489).
+ """
+
+ TARGET_TYPE_UNKNOWN = 0
+ TARGET_TYPE_FILE = 1
+ TARGET_TYPE_EXTENSION_RANGE = 2
+ TARGET_TYPE_MESSAGE = 3
+ TARGET_TYPE_FIELD = 4
+ TARGET_TYPE_ONEOF = 5
+ TARGET_TYPE_ENUM = 6
+ TARGET_TYPE_ENUM_ENTRY = 7
+ TARGET_TYPE_SERVICE = 8
+ TARGET_TYPE_METHOD = 9
+
+
+class MethodOptionsIdempotencyLevel(aristaproto.Enum):
+ """
+ Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ or neither? HTTP based RPC implementation may choose GET verb for safe
+ methods, and PUT verb for idempotent methods instead of the default POST.
+ """
+
+ IDEMPOTENCY_UNKNOWN = 0
+ NO_SIDE_EFFECTS = 1
+ IDEMPOTENT = 2
+
+
+class FeatureSetFieldPresence(aristaproto.Enum):
+ FIELD_PRESENCE_UNKNOWN = 0
+ EXPLICIT = 1
+ IMPLICIT = 2
+ LEGACY_REQUIRED = 3
+
+
+class FeatureSetEnumType(aristaproto.Enum):
+ ENUM_TYPE_UNKNOWN = 0
+ OPEN = 1
+ CLOSED = 2
+
+
+class FeatureSetRepeatedFieldEncoding(aristaproto.Enum):
+ REPEATED_FIELD_ENCODING_UNKNOWN = 0
+ PACKED = 1
+ EXPANDED = 2
+
+
+class FeatureSetUtf8Validation(aristaproto.Enum):
+ UTF8_VALIDATION_UNKNOWN = 0
+ VERIFY = 2
+ NONE = 3
+
+
+class FeatureSetMessageEncoding(aristaproto.Enum):
+ MESSAGE_ENCODING_UNKNOWN = 0
+ LENGTH_PREFIXED = 1
+ DELIMITED = 2
+
+
+class FeatureSetJsonFormat(aristaproto.Enum):
+ JSON_FORMAT_UNKNOWN = 0
+ ALLOW = 1
+ LEGACY_BEST_EFFORT = 2
+
+
+class GeneratedCodeInfoAnnotationSemantic(aristaproto.Enum):
+ """
+ Represents the identified object's effect on the element in the original
+ .proto file.
+ """
+
+ NONE = 0
+ """There is no effect or the effect is indescribable."""
+
+ SET = 1
+ """The element is set or otherwise mutated."""
+
+ ALIAS = 2
+ """An alias to the element is returned."""
+
+
+class NullValue(aristaproto.Enum):
+ """
+ `NullValue` is a singleton enumeration to represent the null value for the
+ `Value` type union.
+
+ The JSON representation for `NullValue` is JSON `null`.
+ """
+
+ _ = 0
+ """Null value."""
+
+
+@dataclass(eq=False, repr=False)
+class Any(aristaproto.Message):
+ """
+ `Any` contains an arbitrary serialized protocol buffer message along with a
+ URL that describes the type of the serialized message.
+
+ Protobuf library provides support to pack/unpack Any values in the form
+ of utility functions or additional generated methods of the Any type.
+
+ Example 1: Pack and unpack a message in C++.
+
+ Foo foo = ...;
+ Any any;
+ any.PackFrom(foo);
+ ...
+ if (any.UnpackTo(&foo)) {
+ ...
+ }
+
+ Example 2: Pack and unpack a message in Java.
+
+ Foo foo = ...;
+ Any any = Any.pack(foo);
+ ...
+ if (any.is(Foo.class)) {
+ foo = any.unpack(Foo.class);
+ }
+ // or ...
+ if (any.isSameTypeAs(Foo.getDefaultInstance())) {
+ foo = any.unpack(Foo.getDefaultInstance());
+ }
+
+ Example 3: Pack and unpack a message in Python.
+
+ foo = Foo(...)
+ any = Any()
+ any.Pack(foo)
+ ...
+ if any.Is(Foo.DESCRIPTOR):
+ any.Unpack(foo)
+ ...
+
+ Example 4: Pack and unpack a message in Go
+
+ foo := &pb.Foo{...}
+ any, err := anypb.New(foo)
+ if err != nil {
+ ...
+ }
+ ...
+ foo := &pb.Foo{}
+ if err := any.UnmarshalTo(foo); err != nil {
+ ...
+ }
+
+ The pack methods provided by protobuf library will by default use
+ 'type.googleapis.com/full.type.name' as the type URL and the unpack
+ methods only use the fully qualified type name after the last '/'
+ in the type URL, for example "foo.bar.com/x/y.z" will yield type
+ name "y.z".
+
+ JSON
+ ====
+ The JSON representation of an `Any` value uses the regular
+ representation of the deserialized, embedded message, with an
+ additional field `@type` which contains the type URL. Example:
+
+ package google.profile;
+ message Person {
+ string first_name = 1;
+ string last_name = 2;
+ }
+
+ {
+ "@type": "type.googleapis.com/google.profile.Person",
+ "firstName": <string>,
+ "lastName": <string>
+ }
+
+ If the embedded message type is well-known and has a custom JSON
+ representation, that representation will be embedded adding a field
+ `value` which holds the custom JSON in addition to the `@type`
+ field. Example (for message [google.protobuf.Duration][]):
+
+ {
+ "@type": "type.googleapis.com/google.protobuf.Duration",
+ "value": "1.212s"
+ }
+ """
+
+ type_url: str = aristaproto.string_field(1)
+ """
+ A URL/resource name that uniquely identifies the type of the serialized
+ protocol buffer message. This string must contain at least
+ one "/" character. The last segment of the URL's path must represent
+ the fully qualified name of the type (as in
+ `path/google.protobuf.Duration`). The name should be in a canonical form
+ (e.g., leading "." is not accepted).
+
+ In practice, teams usually precompile into the binary all types that they
+ expect it to use in the context of Any. However, for URLs which use the
+ scheme `http`, `https`, or no scheme, one can optionally set up a type
+ server that maps type URLs to message definitions as follows:
+
+ * If no scheme is provided, `https` is assumed.
+ * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ value in binary format, or produce an error.
+ * Applications are allowed to cache lookup results based on the
+ URL, or have them precompiled into a binary to avoid any
+ lookup. Therefore, binary compatibility needs to be preserved
+ on changes to types. (Use versioned type names to manage
+ breaking changes.)
+
+ Note: this functionality is not currently available in the official
+ protobuf release, and it is not used for type URLs beginning with
+ type.googleapis.com. As of May 2023, there are no widely used type server
+ implementations and no plans to implement one.
+
+ Schemes other than `http`, `https` (or the empty scheme) might be
+ used with implementation specific semantics.
+ """
+
+ value: bytes = aristaproto.bytes_field(2)
+ """
+ Must be a valid serialized protocol buffer of the above specified type.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class SourceContext(aristaproto.Message):
+ """
+ `SourceContext` represents information about the source of a
+ protobuf element, like the file in which it is defined.
+ """
+
+ file_name: str = aristaproto.string_field(1)
+ """
+ The path-qualified name of the .proto file that contained the associated
+ protobuf element. For example: `"google/protobuf/source_context.proto"`.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Type(aristaproto.Message):
+ """A protocol buffer message type."""
+
+ name: str = aristaproto.string_field(1)
+ """The fully qualified message name."""
+
+ fields: List["Field"] = aristaproto.message_field(2)
+ """The list of fields."""
+
+ oneofs: List[str] = aristaproto.string_field(3)
+ """The list of types appearing in `oneof` definitions in this type."""
+
+ options: List["Option"] = aristaproto.message_field(4)
+ """The protocol buffer options."""
+
+ source_context: "SourceContext" = aristaproto.message_field(5)
+ """The source context."""
+
+ syntax: "Syntax" = aristaproto.enum_field(6)
+ """The source syntax."""
+
+ edition: str = aristaproto.string_field(7)
+ """
+ The source edition string, only valid when syntax is SYNTAX_EDITIONS.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Field(aristaproto.Message):
+ """A single field of a message type."""
+
+ kind: "FieldKind" = aristaproto.enum_field(1)
+ """The field type."""
+
+ cardinality: "FieldCardinality" = aristaproto.enum_field(2)
+ """The field cardinality."""
+
+ number: int = aristaproto.int32_field(3)
+ """The field number."""
+
+ name: str = aristaproto.string_field(4)
+ """The field name."""
+
+ type_url: str = aristaproto.string_field(6)
+ """
+ The field type URL, without the scheme, for message or enumeration
+ types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`.
+ """
+
+ oneof_index: int = aristaproto.int32_field(7)
+ """
+ The index of the field type in `Type.oneofs`, for message or enumeration
+ types. The first type has index 1; zero means the type is not in the list.
+ """
+
+ packed: bool = aristaproto.bool_field(8)
+ """Whether to use alternative packed wire representation."""
+
+ options: List["Option"] = aristaproto.message_field(9)
+ """The protocol buffer options."""
+
+ json_name: str = aristaproto.string_field(10)
+ """The field JSON name."""
+
+ default_value: str = aristaproto.string_field(11)
+ """
+ The string value of the default value of this field. Proto2 syntax only.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Enum(aristaproto.Message):
+ """Enum type definition."""
+
+ name: str = aristaproto.string_field(1)
+ """Enum type name."""
+
+ enumvalue: List["EnumValue"] = aristaproto.message_field(
+ 2, wraps=aristaproto.TYPE_ENUM
+ )
+ """Enum value definitions."""
+
+ options: List["Option"] = aristaproto.message_field(3)
+ """Protocol buffer options."""
+
+ source_context: "SourceContext" = aristaproto.message_field(4)
+ """The source context."""
+
+ syntax: "Syntax" = aristaproto.enum_field(5)
+ """The source syntax."""
+
+ edition: str = aristaproto.string_field(6)
+ """
+ The source edition string, only valid when syntax is SYNTAX_EDITIONS.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class EnumValue(aristaproto.Message):
+ """Enum value definition."""
+
+ name: str = aristaproto.string_field(1)
+ """Enum value name."""
+
+ number: int = aristaproto.int32_field(2)
+ """Enum value number."""
+
+ options: List["Option"] = aristaproto.message_field(3)
+ """Protocol buffer options."""
+
+
+@dataclass(eq=False, repr=False)
+class Option(aristaproto.Message):
+ """
+ A protocol buffer option, which can be attached to a message, field,
+ enumeration, etc.
+ """
+
+ name: str = aristaproto.string_field(1)
+ """
+ The option's name. For protobuf built-in options (options defined in
+ descriptor.proto), this is the short name. For example, `"map_entry"`.
+ For custom options, it should be the fully-qualified name. For example,
+ `"google.api.http"`.
+ """
+
+ value: "Any" = aristaproto.message_field(2)
+ """
+ The option's value packed in an Any message. If the value is a primitive,
+ the corresponding wrapper type defined in google/protobuf/wrappers.proto
+ should be used. If the value is an enum, it should be stored as an int32
+ value using the google.protobuf.Int32Value type.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Api(aristaproto.Message):
+ """
+ Api is a light-weight descriptor for an API Interface.
+
+ Interfaces are also described as "protocol buffer services" in some contexts,
+ such as by the "service" keyword in a .proto file, but they are different
+ from API Services, which represent a concrete implementation of an interface
+ as opposed to simply a description of methods and bindings. They are also
+ sometimes simply referred to as "APIs" in other contexts, such as the name of
+ this message itself. See https://cloud.google.com/apis/design/glossary for
+ detailed terminology.
+ """
+
+ name: str = aristaproto.string_field(1)
+ """
+ The fully qualified name of this interface, including package name
+ followed by the interface's simple name.
+ """
+
+ methods: List["Method"] = aristaproto.message_field(2)
+ """The methods of this interface, in unspecified order."""
+
+ options: List["Option"] = aristaproto.message_field(3)
+ """Any metadata attached to the interface."""
+
+ version: str = aristaproto.string_field(4)
+ """
+ A version string for this interface. If specified, must have the form
+ `major-version.minor-version`, as in `1.10`. If the minor version is
+ omitted, it defaults to zero. If the entire version field is empty, the
+ major version is derived from the package name, as outlined below. If the
+ field is not empty, the version in the package name will be verified to be
+ consistent with what is provided here.
+
+ The versioning schema uses [semantic
+ versioning](http://semver.org) where the major version number
+ indicates a breaking change and the minor version an additive,
+ non-breaking change. Both version numbers are signals to users
+ what to expect from different versions, and should be carefully
+ chosen based on the product plan.
+
+ The major version is also reflected in the package name of the
+ interface, which must end in `v<major-version>`, as in
+ `google.feature.v1`. For major versions 0 and 1, the suffix can
+ be omitted. Zero major versions must only be used for
+ experimental, non-GA interfaces.
+ """
+
+ source_context: "SourceContext" = aristaproto.message_field(5)
+ """
+ Source context for the protocol buffer service represented by this
+ message.
+ """
+
+ mixins: List["Mixin"] = aristaproto.message_field(6)
+ """Included interfaces. See [Mixin][]."""
+
+ syntax: "Syntax" = aristaproto.enum_field(7)
+ """The source syntax of the service."""
+
+
+@dataclass(eq=False, repr=False)
+class Method(aristaproto.Message):
+ """Method represents a method of an API interface."""
+
+ name: str = aristaproto.string_field(1)
+ """The simple name of this method."""
+
+ request_type_url: str = aristaproto.string_field(2)
+ """A URL of the input message type."""
+
+ request_streaming: bool = aristaproto.bool_field(3)
+ """If true, the request is streamed."""
+
+ response_type_url: str = aristaproto.string_field(4)
+ """The URL of the output message type."""
+
+ response_streaming: bool = aristaproto.bool_field(5)
+ """If true, the response is streamed."""
+
+ options: List["Option"] = aristaproto.message_field(6)
+ """Any metadata attached to the method."""
+
+ syntax: "Syntax" = aristaproto.enum_field(7)
+ """The source syntax of this method."""
+
+
+@dataclass(eq=False, repr=False)
+class Mixin(aristaproto.Message):
+ """
+ Declares an API Interface to be included in this interface. The including
+ interface must redeclare all the methods from the included interface, but
+ documentation and options are inherited as follows:
+
+ - If after comment and whitespace stripping, the documentation
+ string of the redeclared method is empty, it will be inherited
+ from the original method.
+
+ - Each annotation belonging to the service config (http,
+ visibility) which is not set in the redeclared method will be
+ inherited.
+
+ - If an http annotation is inherited, the path pattern will be
+ modified as follows. Any version prefix will be replaced by the
+ version of the including interface plus the [root][] path if
+ specified.
+
+ Example of a simple mixin:
+
+ package google.acl.v1;
+ service AccessControl {
+ // Get the underlying ACL object.
+ rpc GetAcl(GetAclRequest) returns (Acl) {
+ option (google.api.http).get = "/v1/{resource=**}:getAcl";
+ }
+ }
+
+ package google.storage.v2;
+ service Storage {
+ rpc GetAcl(GetAclRequest) returns (Acl);
+
+ // Get a data record.
+ rpc GetData(GetDataRequest) returns (Data) {
+ option (google.api.http).get = "/v2/{resource=**}";
+ }
+ }
+
+ Example of a mixin configuration:
+
+ apis:
+ - name: google.storage.v2.Storage
+ mixins:
+ - name: google.acl.v1.AccessControl
+
+ The mixin construct implies that all methods in `AccessControl` are
+ also declared with same name and request/response types in
+ `Storage`. A documentation generator or annotation processor will
+ see the effective `Storage.GetAcl` method after inherting
+ documentation and annotations as follows:
+
+ service Storage {
+ // Get the underlying ACL object.
+ rpc GetAcl(GetAclRequest) returns (Acl) {
+ option (google.api.http).get = "/v2/{resource=**}:getAcl";
+ }
+ ...
+ }
+
+ Note how the version in the path pattern changed from `v1` to `v2`.
+
+ If the `root` field in the mixin is specified, it should be a
+ relative path under which inherited HTTP paths are placed. Example:
+
+ apis:
+ - name: google.storage.v2.Storage
+ mixins:
+ - name: google.acl.v1.AccessControl
+ root: acls
+
+ This implies the following inherited HTTP annotation:
+
+ service Storage {
+ // Get the underlying ACL object.
+ rpc GetAcl(GetAclRequest) returns (Acl) {
+ option (google.api.http).get = "/v2/acls/{resource=**}:getAcl";
+ }
+ ...
+ }
+ """
+
+ name: str = aristaproto.string_field(1)
+ """The fully qualified name of the interface which is included."""
+
+ root: str = aristaproto.string_field(2)
+ """
+ If non-empty specifies a path under which inherited HTTP paths
+ are rooted.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class FileDescriptorSet(aristaproto.Message):
+ """
+ The protocol compiler can output a FileDescriptorSet containing the .proto
+ files it parses.
+ """
+
+ file: List["FileDescriptorProto"] = aristaproto.message_field(1)
+
+
+@dataclass(eq=False, repr=False)
+class FileDescriptorProto(aristaproto.Message):
+ """Describes a complete .proto file."""
+
+ name: str = aristaproto.string_field(1)
+ package: str = aristaproto.string_field(2)
+ dependency: List[str] = aristaproto.string_field(3)
+ """Names of files imported by this file."""
+
+ public_dependency: List[int] = aristaproto.int32_field(10)
+ """Indexes of the public imported files in the dependency list above."""
+
+ weak_dependency: List[int] = aristaproto.int32_field(11)
+ """
+ Indexes of the weak imported files in the dependency list.
+ For Google-internal migration only. Do not use.
+ """
+
+ message_type: List["DescriptorProto"] = aristaproto.message_field(4)
+ """All top-level definitions in this file."""
+
+ enum_type: List["EnumDescriptorProto"] = aristaproto.message_field(5)
+ service: List["ServiceDescriptorProto"] = aristaproto.message_field(6)
+ extension: List["FieldDescriptorProto"] = aristaproto.message_field(7)
+ options: "FileOptions" = aristaproto.message_field(8)
+ source_code_info: "SourceCodeInfo" = aristaproto.message_field(9)
+ """
+ This field contains optional information about the original source code.
+ You may safely remove this entire field without harming runtime
+ functionality of the descriptors -- the information is needed only by
+ development tools.
+ """
+
+ syntax: str = aristaproto.string_field(12)
+ """
+ The syntax of the proto file.
+ The supported values are "proto2", "proto3", and "editions".
+
+ If `edition` is present, this value must be "editions".
+ """
+
+ edition: "Edition" = aristaproto.enum_field(14)
+ """The edition of the proto file."""
+
+
+@dataclass(eq=False, repr=False)
+class DescriptorProto(aristaproto.Message):
+ """Describes a message type."""
+
+ name: str = aristaproto.string_field(1)
+ field: List["FieldDescriptorProto"] = aristaproto.message_field(2)
+ extension: List["FieldDescriptorProto"] = aristaproto.message_field(6)
+ nested_type: List["DescriptorProto"] = aristaproto.message_field(3)
+ enum_type: List["EnumDescriptorProto"] = aristaproto.message_field(4)
+ extension_range: List["DescriptorProtoExtensionRange"] = aristaproto.message_field(
+ 5
+ )
+ oneof_decl: List["OneofDescriptorProto"] = aristaproto.message_field(8)
+ options: "MessageOptions" = aristaproto.message_field(7)
+ reserved_range: List["DescriptorProtoReservedRange"] = aristaproto.message_field(9)
+ reserved_name: List[str] = aristaproto.string_field(10)
+ """
+ Reserved field names, which may not be used by fields in the same message.
+ A given name may only be reserved once.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class DescriptorProtoExtensionRange(aristaproto.Message):
+ start: int = aristaproto.int32_field(1)
+ end: int = aristaproto.int32_field(2)
+ options: "ExtensionRangeOptions" = aristaproto.message_field(3)
+
+
+@dataclass(eq=False, repr=False)
+class DescriptorProtoReservedRange(aristaproto.Message):
+ """
+ Range of reserved tag numbers. Reserved tag numbers may not be used by
+ fields or extension ranges in the same message. Reserved ranges may
+ not overlap.
+ """
+
+ start: int = aristaproto.int32_field(1)
+ end: int = aristaproto.int32_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class ExtensionRangeOptions(aristaproto.Message):
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+ declaration: List["ExtensionRangeOptionsDeclaration"] = aristaproto.message_field(2)
+ """
+ For external users: DO NOT USE. We are in the process of open sourcing
+ extension declaration and executing internal cleanups before it can be
+ used externally.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(50)
+ """Any features defined in the specific edition."""
+
+ verification: "ExtensionRangeOptionsVerificationState" = aristaproto.enum_field(3)
+ """
+ The verification state of the range.
+ TODO: flip the default to DECLARATION once all empty ranges
+ are marked as UNVERIFIED.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class ExtensionRangeOptionsDeclaration(aristaproto.Message):
+ number: int = aristaproto.int32_field(1)
+ """The extension number declared within the extension range."""
+
+ full_name: str = aristaproto.string_field(2)
+ """
+ The fully-qualified name of the extension field. There must be a leading
+ dot in front of the full name.
+ """
+
+ type: str = aristaproto.string_field(3)
+ """
+ The fully-qualified type name of the extension field. Unlike
+ Metadata.type, Declaration.type must have a leading dot for messages
+ and enums.
+ """
+
+ reserved: bool = aristaproto.bool_field(5)
+ """
+ If true, indicates that the number is reserved in the extension range,
+ and any extension field with the number will fail to compile. Set this
+ when a declared extension field is deleted.
+ """
+
+ repeated: bool = aristaproto.bool_field(6)
+ """
+ If true, indicates that the extension must be defined as repeated.
+ Otherwise the extension must be defined as optional.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class FieldDescriptorProto(aristaproto.Message):
+ """Describes a field within a message."""
+
+ name: str = aristaproto.string_field(1)
+ number: int = aristaproto.int32_field(3)
+ label: "FieldDescriptorProtoLabel" = aristaproto.enum_field(4)
+ type: "FieldDescriptorProtoType" = aristaproto.enum_field(5)
+ """
+ If type_name is set, this need not be set. If both this and type_name
+ are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
+ """
+
+ type_name: str = aristaproto.string_field(6)
+ """
+ For message and enum types, this is the name of the type. If the name
+ starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
+ rules are used to find the type (i.e. first the nested types within this
+ message are searched, then within the parent, on up to the root
+ namespace).
+ """
+
+ extendee: str = aristaproto.string_field(2)
+ """
+ For extensions, this is the name of the type being extended. It is
+ resolved in the same manner as type_name.
+ """
+
+ default_value: str = aristaproto.string_field(7)
+ """
+ For numeric types, contains the original text representation of the value.
+ For booleans, "true" or "false".
+ For strings, contains the default text contents (not escaped in any way).
+ For bytes, contains the C escaped value. All bytes >= 128 are escaped.
+ """
+
+ oneof_index: int = aristaproto.int32_field(9)
+ """
+ If set, gives the index of a oneof in the containing type's oneof_decl
+ list. This field is a member of that oneof.
+ """
+
+ json_name: str = aristaproto.string_field(10)
+ """
+ JSON name of this field. The value is set by protocol compiler. If the
+ user has set a "json_name" option on this field, that option's value
+ will be used. Otherwise, it's deduced from the field's name by converting
+ it to camelCase.
+ """
+
+ options: "FieldOptions" = aristaproto.message_field(8)
+ proto3_optional: bool = aristaproto.bool_field(17)
+ """
+ If true, this is a proto3 "optional". When a proto3 field is optional, it
+ tracks presence regardless of field type.
+
+ When proto3_optional is true, this field must belong to a oneof to signal
+ to old proto3 clients that presence is tracked for this field. This oneof
+ is known as a "synthetic" oneof, and this field must be its sole member
+ (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs
+ exist in the descriptor only, and do not generate any API. Synthetic oneofs
+ must be ordered after all "real" oneofs.
+
+ For message fields, proto3_optional doesn't create any semantic change,
+ since non-repeated message fields always track presence. However it still
+ indicates the semantic detail of whether the user wrote "optional" or not.
+ This can be useful for round-tripping the .proto file. For consistency we
+ give message fields a synthetic oneof also, even though it is not required
+ to track presence. This is especially important because the parser can't
+ tell if a field is a message or an enum, so it must always create a
+ synthetic oneof.
+
+ Proto2 optional fields do not set this flag, because they already indicate
+ optional with `LABEL_OPTIONAL`.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class OneofDescriptorProto(aristaproto.Message):
+ """Describes a oneof."""
+
+ name: str = aristaproto.string_field(1)
+ options: "OneofOptions" = aristaproto.message_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class EnumDescriptorProto(aristaproto.Message):
+ """Describes an enum type."""
+
+ name: str = aristaproto.string_field(1)
+ value: List["EnumValueDescriptorProto"] = aristaproto.message_field(2)
+ options: "EnumOptions" = aristaproto.message_field(3)
+ reserved_range: List[
+ "EnumDescriptorProtoEnumReservedRange"
+ ] = aristaproto.message_field(4)
+ """
+ Range of reserved numeric values. Reserved numeric values may not be used
+ by enum values in the same enum declaration. Reserved ranges may not
+ overlap.
+ """
+
+ reserved_name: List[str] = aristaproto.string_field(5)
+ """
+ Reserved enum value names, which may not be reused. A given name may only
+ be reserved once.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class EnumDescriptorProtoEnumReservedRange(aristaproto.Message):
+ """
+ Range of reserved numeric values. Reserved values may not be used by
+ entries in the same enum. Reserved ranges may not overlap.
+
+ Note that this is distinct from DescriptorProto.ReservedRange in that it
+ is inclusive such that it can appropriately represent the entire int32
+ domain.
+ """
+
+ start: int = aristaproto.int32_field(1)
+ end: int = aristaproto.int32_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class EnumValueDescriptorProto(aristaproto.Message):
+ """Describes a value within an enum."""
+
+ name: str = aristaproto.string_field(1)
+ number: int = aristaproto.int32_field(2)
+ options: "EnumValueOptions" = aristaproto.message_field(3)
+
+
+@dataclass(eq=False, repr=False)
+class ServiceDescriptorProto(aristaproto.Message):
+ """Describes a service."""
+
+ name: str = aristaproto.string_field(1)
+ method: List["MethodDescriptorProto"] = aristaproto.message_field(2)
+ options: "ServiceOptions" = aristaproto.message_field(3)
+
+
+@dataclass(eq=False, repr=False)
+class MethodDescriptorProto(aristaproto.Message):
+ """Describes a method of a service."""
+
+ name: str = aristaproto.string_field(1)
+ input_type: str = aristaproto.string_field(2)
+ """
+ Input and output type names. These are resolved in the same way as
+ FieldDescriptorProto.type_name, but must refer to a message type.
+ """
+
+ output_type: str = aristaproto.string_field(3)
+ options: "MethodOptions" = aristaproto.message_field(4)
+ client_streaming: bool = aristaproto.bool_field(5)
+ """Identifies if client streams multiple client messages"""
+
+ server_streaming: bool = aristaproto.bool_field(6)
+ """Identifies if server streams multiple server messages"""
+
+
+@dataclass(eq=False, repr=False)
+class FileOptions(aristaproto.Message):
+ java_package: str = aristaproto.string_field(1)
+ """
+ Sets the Java package where classes generated from this .proto will be
+ placed. By default, the proto package is used, but this is often
+ inappropriate because proto packages do not normally start with backwards
+ domain names.
+ """
+
+ java_outer_classname: str = aristaproto.string_field(8)
+ """
+ Controls the name of the wrapper Java class generated for the .proto file.
+ That class will always contain the .proto file's getDescriptor() method as
+ well as any top-level extensions defined in the .proto file.
+ If java_multiple_files is disabled, then all the other classes from the
+ .proto file will be nested inside the single wrapper outer class.
+ """
+
+ java_multiple_files: bool = aristaproto.bool_field(10)
+ """
+ If enabled, then the Java code generator will generate a separate .java
+ file for each top-level message, enum, and service defined in the .proto
+ file. Thus, these types will *not* be nested inside the wrapper class
+ named by java_outer_classname. However, the wrapper class will still be
+ generated to contain the file's getDescriptor() method as well as any
+ top-level extensions defined in the file.
+ """
+
+ java_generate_equals_and_hash: bool = aristaproto.bool_field(20)
+ """This option does nothing."""
+
+ java_string_check_utf8: bool = aristaproto.bool_field(27)
+ """
+ A proto2 file can set this to true to opt in to UTF-8 checking for Java,
+ which will throw an exception if invalid UTF-8 is parsed from the wire or
+ assigned to a string field.
+
+ TODO: clarify exactly what kinds of field types this option
+ applies to, and update these docs accordingly.
+
+ Proto3 files already perform these checks. Setting the option explicitly to
+ false has no effect: it cannot be used to opt proto3 files out of UTF-8
+ checks.
+ """
+
+ optimize_for: "FileOptionsOptimizeMode" = aristaproto.enum_field(9)
+ go_package: str = aristaproto.string_field(11)
+ """
+ Sets the Go package where structs generated from this .proto will be
+ placed. If omitted, the Go package will be derived from the following:
+ - The basename of the package import path, if provided.
+ - Otherwise, the package statement in the .proto file, if present.
+ - Otherwise, the basename of the .proto file, without extension.
+ """
+
+ cc_generic_services: bool = aristaproto.bool_field(16)
+ """
+ Should generic services be generated in each language? "Generic" services
+ are not specific to any particular RPC system. They are generated by the
+ main code generators in each language (without additional plugins).
+ Generic services were the only kind of service generation supported by
+ early versions of google.protobuf.
+
+ Generic services are now considered deprecated in favor of using plugins
+ that generate code specific to your particular RPC system. Therefore,
+ these default to false. Old code which depends on generic services should
+ explicitly set them to true.
+ """
+
+ java_generic_services: bool = aristaproto.bool_field(17)
+ py_generic_services: bool = aristaproto.bool_field(18)
+ deprecated: bool = aristaproto.bool_field(23)
+ """
+ Is this file deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for everything in the file, or it will be completely ignored; in the very
+ least, this is a formalization for deprecating files.
+ """
+
+ cc_enable_arenas: bool = aristaproto.bool_field(31)
+ """
+ Enables the use of arenas for the proto messages in this file. This applies
+ only to generated classes for C++.
+ """
+
+ objc_class_prefix: str = aristaproto.string_field(36)
+ """
+ Sets the objective c class prefix which is prepended to all objective c
+ generated classes from this .proto. There is no default.
+ """
+
+ csharp_namespace: str = aristaproto.string_field(37)
+ """Namespace for generated classes; defaults to the package."""
+
+ swift_prefix: str = aristaproto.string_field(39)
+ """
+ By default Swift generators will take the proto package and CamelCase it
+ replacing '.' with underscore and use that to prefix the types/symbols
+ defined. When this options is provided, they will use this value instead
+ to prefix the types/symbols defined.
+ """
+
+ php_class_prefix: str = aristaproto.string_field(40)
+ """
+ Sets the php class prefix which is prepended to all php generated classes
+ from this .proto. Default is empty.
+ """
+
+ php_namespace: str = aristaproto.string_field(41)
+ """
+ Use this option to change the namespace of php generated classes. Default
+ is empty. When this option is empty, the package name will be used for
+ determining the namespace.
+ """
+
+ php_metadata_namespace: str = aristaproto.string_field(44)
+ """
+ Use this option to change the namespace of php generated metadata classes.
+ Default is empty. When this option is empty, the proto file name will be
+ used for determining the namespace.
+ """
+
+ ruby_package: str = aristaproto.string_field(45)
+ """
+ Use this option to change the package of ruby generated classes. Default
+ is empty. When this option is not set, the package name will be used for
+ determining the ruby package.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(50)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """
+ The parser stores options it doesn't recognize here.
+ See the documentation for the "Options" section above.
+ """
+
+ def __post_init__(self) -> None:
+ super().__post_init__()
+ if self.is_set("java_generate_equals_and_hash"):
+ warnings.warn(
+ "FileOptions.java_generate_equals_and_hash is deprecated",
+ DeprecationWarning,
+ )
+
+
+@dataclass(eq=False, repr=False)
+class MessageOptions(aristaproto.Message):
+ message_set_wire_format: bool = aristaproto.bool_field(1)
+ """
+ Set true to use the old proto1 MessageSet wire format for extensions.
+ This is provided for backwards-compatibility with the MessageSet wire
+ format. You should not use this for any other reason: It's less
+ efficient, has fewer features, and is more complicated.
+
+ The message must be defined exactly as follows:
+ message Foo {
+ option message_set_wire_format = true;
+ extensions 4 to max;
+ }
+ Note that the message cannot have any defined fields; MessageSets only
+ have extensions.
+
+ All extensions of your type must be singular messages; e.g. they cannot
+ be int32s, enums, or repeated messages.
+
+ Because this is an option, the above two restrictions are not enforced by
+ the protocol compiler.
+ """
+
+ no_standard_descriptor_accessor: bool = aristaproto.bool_field(2)
+ """
+ Disables the generation of the standard "descriptor()" accessor, which can
+ conflict with a field of the same name. This is meant to make migration
+ from proto1 easier; new code should avoid fields named "descriptor".
+ """
+
+ deprecated: bool = aristaproto.bool_field(3)
+ """
+ Is this message deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the message, or it will be completely ignored; in the very least,
+ this is a formalization for deprecating messages.
+ """
+
+ map_entry: bool = aristaproto.bool_field(7)
+ """
+ Whether the message is an automatically generated map entry type for the
+ maps field.
+
+ For maps fields:
+ map<KeyType, ValueType> map_field = 1;
+ The parsed descriptor looks like:
+ message MapFieldEntry {
+ option map_entry = true;
+ optional KeyType key = 1;
+ optional ValueType value = 2;
+ }
+ repeated MapFieldEntry map_field = 1;
+
+ Implementations may choose not to generate the map_entry=true message, but
+ use a native map in the target language to hold the keys and values.
+ The reflection APIs in such implementations still need to work as
+ if the field is a repeated message field.
+
+ NOTE: Do not set the option in .proto files. Always use the maps syntax
+ instead. The option should only be implicitly set by the proto compiler
+ parser.
+ """
+
+ deprecated_legacy_json_field_conflicts: bool = aristaproto.bool_field(11)
+ """
+ Enable the legacy handling of JSON field name conflicts. This lowercases
+ and strips underscored from the fields before comparison in proto3 only.
+ The new behavior takes `json_name` into account and applies to proto2 as
+ well.
+
+ This should only be used as a temporary measure against broken builds due
+ to the change in behavior for JSON field name conflicts.
+
+ TODO This is legacy behavior we plan to remove once downstream
+ teams have had time to migrate.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(12)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+ def __post_init__(self) -> None:
+ super().__post_init__()
+ if self.is_set("deprecated_legacy_json_field_conflicts"):
+ warnings.warn(
+ "MessageOptions.deprecated_legacy_json_field_conflicts is deprecated",
+ DeprecationWarning,
+ )
+
+
+@dataclass(eq=False, repr=False)
+class FieldOptions(aristaproto.Message):
+ ctype: "FieldOptionsCType" = aristaproto.enum_field(1)
+ """
+ The ctype option instructs the C++ code generator to use a different
+ representation of the field than it normally would. See the specific
+ options below. This option is only implemented to support use of
+ [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of
+ type "bytes" in the open source release -- sorry, we'll try to include
+ other types in a future version!
+ """
+
+ packed: bool = aristaproto.bool_field(2)
+ """
+ The packed option can be enabled for repeated primitive fields to enable
+ a more efficient representation on the wire. Rather than repeatedly
+ writing the tag and type for each element, the entire array is encoded as
+ a single length-delimited blob. In proto3, only explicit setting it to
+ false will avoid using packed encoding. This option is prohibited in
+ Editions, but the `repeated_field_encoding` feature can be used to control
+ the behavior.
+ """
+
+ jstype: "FieldOptionsJsType" = aristaproto.enum_field(6)
+ """
+ The jstype option determines the JavaScript type used for values of the
+ field. The option is permitted only for 64 bit integral and fixed types
+ (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
+ is represented as JavaScript string, which avoids loss of precision that
+ can happen when a large value is converted to a floating point JavaScript.
+ Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
+ use the JavaScript "number" type. The behavior of the default option
+ JS_NORMAL is implementation dependent.
+
+ This option is an enum to permit additional types to be added, e.g.
+ goog.math.Integer.
+ """
+
+ lazy: bool = aristaproto.bool_field(5)
+ """
+ Should this field be parsed lazily? Lazy applies only to message-type
+ fields. It means that when the outer message is initially parsed, the
+ inner message's contents will not be parsed but instead stored in encoded
+ form. The inner message will actually be parsed when it is first accessed.
+
+ This is only a hint. Implementations are free to choose whether to use
+ eager or lazy parsing regardless of the value of this option. However,
+ setting this option true suggests that the protocol author believes that
+ using lazy parsing on this field is worth the additional bookkeeping
+ overhead typically needed to implement it.
+
+ This option does not affect the public interface of any generated code;
+ all method signatures remain the same. Furthermore, thread-safety of the
+ interface is not affected by this option; const methods remain safe to
+ call from multiple threads concurrently, while non-const methods continue
+ to require exclusive access.
+
+ Note that lazy message fields are still eagerly verified to check
+ ill-formed wireformat or missing required fields. Calling IsInitialized()
+ on the outer message would fail if the inner message has missing required
+ fields. Failed verification would result in parsing failure (except when
+ uninitialized messages are acceptable).
+ """
+
+ unverified_lazy: bool = aristaproto.bool_field(15)
+ """
+ unverified_lazy does no correctness checks on the byte stream. This should
+ only be used where lazy with verification is prohibitive for performance
+ reasons.
+ """
+
+ deprecated: bool = aristaproto.bool_field(3)
+ """
+ Is this field deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for accessors, or it will be completely ignored; in the very least, this
+ is a formalization for deprecating fields.
+ """
+
+ weak: bool = aristaproto.bool_field(10)
+ """For Google-internal migration only. Do not use."""
+
+ debug_redact: bool = aristaproto.bool_field(16)
+ """
+ Indicate that the field value should not be printed out when using debug
+ formats, e.g. when the field contains sensitive credentials.
+ """
+
+ retention: "FieldOptionsOptionRetention" = aristaproto.enum_field(17)
+ targets: List["FieldOptionsOptionTargetType"] = aristaproto.enum_field(19)
+ edition_defaults: List["FieldOptionsEditionDefault"] = aristaproto.message_field(20)
+ features: "FeatureSet" = aristaproto.message_field(21)
+ """Any features defined in the specific edition."""
+
+ feature_support: "FieldOptionsFeatureSupport" = aristaproto.message_field(22)
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class FieldOptionsEditionDefault(aristaproto.Message):
+ edition: "Edition" = aristaproto.enum_field(3)
+ value: str = aristaproto.string_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class FieldOptionsFeatureSupport(aristaproto.Message):
+ """Information about the support window of a feature."""
+
+ edition_introduced: "Edition" = aristaproto.enum_field(1)
+ """
+ The edition that this feature was first available in. In editions
+ earlier than this one, the default assigned to EDITION_LEGACY will be
+ used, and proto files will not be able to override it.
+ """
+
+ edition_deprecated: "Edition" = aristaproto.enum_field(2)
+ """
+ The edition this feature becomes deprecated in. Using this after this
+ edition may trigger warnings.
+ """
+
+ deprecation_warning: str = aristaproto.string_field(3)
+ """
+ The deprecation warning text if this feature is used after the edition it
+ was marked deprecated in.
+ """
+
+ edition_removed: "Edition" = aristaproto.enum_field(4)
+ """
+ The edition this feature is no longer available in. In editions after
+ this one, the last default assigned will be used, and proto files will
+ not be able to override it.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class OneofOptions(aristaproto.Message):
+ features: "FeatureSet" = aristaproto.message_field(1)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class EnumOptions(aristaproto.Message):
+ allow_alias: bool = aristaproto.bool_field(2)
+ """
+ Set this option to true to allow mapping different tag names to the same
+ value.
+ """
+
+ deprecated: bool = aristaproto.bool_field(3)
+ """
+ Is this enum deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the enum, or it will be completely ignored; in the very least, this
+ is a formalization for deprecating enums.
+ """
+
+ deprecated_legacy_json_field_conflicts: bool = aristaproto.bool_field(6)
+ """
+ Enable the legacy handling of JSON field name conflicts. This lowercases
+ and strips underscored from the fields before comparison in proto3 only.
+ The new behavior takes `json_name` into account and applies to proto2 as
+ well.
+ TODO Remove this legacy behavior once downstream teams have
+ had time to migrate.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(7)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+ def __post_init__(self) -> None:
+ super().__post_init__()
+ if self.is_set("deprecated_legacy_json_field_conflicts"):
+ warnings.warn(
+ "EnumOptions.deprecated_legacy_json_field_conflicts is deprecated",
+ DeprecationWarning,
+ )
+
+
+@dataclass(eq=False, repr=False)
+class EnumValueOptions(aristaproto.Message):
+ deprecated: bool = aristaproto.bool_field(1)
+ """
+ Is this enum value deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the enum value, or it will be completely ignored; in the very least,
+ this is a formalization for deprecating enum values.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(2)
+ """Any features defined in the specific edition."""
+
+ debug_redact: bool = aristaproto.bool_field(3)
+ """
+ Indicate that fields annotated with this enum value should not be printed
+ out when using debug formats, e.g. when the field contains sensitive
+ credentials.
+ """
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class ServiceOptions(aristaproto.Message):
+ features: "FeatureSet" = aristaproto.message_field(34)
+ """Any features defined in the specific edition."""
+
+ deprecated: bool = aristaproto.bool_field(33)
+ """
+ Is this service deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the service, or it will be completely ignored; in the very least,
+ this is a formalization for deprecating services.
+ """
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class MethodOptions(aristaproto.Message):
+ deprecated: bool = aristaproto.bool_field(33)
+ """
+ Is this method deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the method, or it will be completely ignored; in the very least,
+ this is a formalization for deprecating methods.
+ """
+
+ idempotency_level: "MethodOptionsIdempotencyLevel" = aristaproto.enum_field(34)
+ features: "FeatureSet" = aristaproto.message_field(35)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class UninterpretedOption(aristaproto.Message):
+ """
+ A message representing a option the parser does not recognize. This only
+ appears in options protos created by the compiler::Parser class.
+ DescriptorPool resolves these when building Descriptor objects. Therefore,
+ options protos in descriptor objects (e.g. returned by Descriptor::options(),
+ or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
+ in them.
+ """
+
+ name: List["UninterpretedOptionNamePart"] = aristaproto.message_field(2)
+ identifier_value: str = aristaproto.string_field(3)
+ """
+ The value of the uninterpreted option, in whatever type the tokenizer
+ identified it as during parsing. Exactly one of these should be set.
+ """
+
+ positive_int_value: int = aristaproto.uint64_field(4)
+ negative_int_value: int = aristaproto.int64_field(5)
+ double_value: float = aristaproto.double_field(6)
+ string_value: bytes = aristaproto.bytes_field(7)
+ aggregate_value: str = aristaproto.string_field(8)
+
+
+@dataclass(eq=False, repr=False)
+class UninterpretedOptionNamePart(aristaproto.Message):
+ """
+ The name of the uninterpreted option. Each string represents a segment in
+ a dot-separated name. is_extension is true iff a segment represents an
+ extension (denoted with parentheses in options specs in .proto files).
+ E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents
+ "foo.(bar.baz).moo".
+ """
+
+ name_part: str = aristaproto.string_field(1)
+ is_extension: bool = aristaproto.bool_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class FeatureSet(aristaproto.Message):
+ """
+ TODO Enums in C++ gencode (and potentially other languages) are
+ not well scoped. This means that each of the feature enums below can clash
+ with each other. The short names we've chosen maximize call-site
+ readability, but leave us very open to this scenario. A future feature will
+ be designed and implemented to handle this, hopefully before we ever hit a
+ conflict here.
+ """
+
+ field_presence: "FeatureSetFieldPresence" = aristaproto.enum_field(1)
+ enum_type: "FeatureSetEnumType" = aristaproto.enum_field(2)
+ repeated_field_encoding: "FeatureSetRepeatedFieldEncoding" = aristaproto.enum_field(
+ 3
+ )
+ utf8_validation: "FeatureSetUtf8Validation" = aristaproto.enum_field(4)
+ message_encoding: "FeatureSetMessageEncoding" = aristaproto.enum_field(5)
+ json_format: "FeatureSetJsonFormat" = aristaproto.enum_field(6)
+
+
+@dataclass(eq=False, repr=False)
+class FeatureSetDefaults(aristaproto.Message):
+ """
+ A compiled specification for the defaults of a set of features. These
+ messages are generated from FeatureSet extensions and can be used to seed
+ feature resolution. The resolution with this object becomes a simple search
+ for the closest matching edition, followed by proto merges.
+ """
+
+ defaults: List[
+ "FeatureSetDefaultsFeatureSetEditionDefault"
+ ] = aristaproto.message_field(1)
+ minimum_edition: "Edition" = aristaproto.enum_field(4)
+ """
+ The minimum supported edition (inclusive) when this was constructed.
+ Editions before this will not have defaults.
+ """
+
+ maximum_edition: "Edition" = aristaproto.enum_field(5)
+ """
+ The maximum known edition (inclusive) when this was constructed. Editions
+ after this will not have reliable defaults.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class FeatureSetDefaultsFeatureSetEditionDefault(aristaproto.Message):
+ """
+ A map from every known edition with a unique set of defaults to its
+ defaults. Not all editions may be contained here. For a given edition,
+ the defaults at the closest matching edition ordered at or before it should
+ be used. This field must be in strict ascending order by edition.
+ """
+
+ edition: "Edition" = aristaproto.enum_field(3)
+ overridable_features: "FeatureSet" = aristaproto.message_field(4)
+ """Defaults of features that can be overridden in this edition."""
+
+ fixed_features: "FeatureSet" = aristaproto.message_field(5)
+ """Defaults of features that can't be overridden in this edition."""
+
+ features: "FeatureSet" = aristaproto.message_field(2)
+ """
+ TODO Deprecate and remove this field, which is just the
+ above two merged.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class SourceCodeInfo(aristaproto.Message):
+ """
+ Encapsulates information about the original source file from which a
+ FileDescriptorProto was generated.
+ """
+
+ location: List["SourceCodeInfoLocation"] = aristaproto.message_field(1)
+ """
+ A Location identifies a piece of source code in a .proto file which
+ corresponds to a particular definition. This information is intended
+ to be useful to IDEs, code indexers, documentation generators, and similar
+ tools.
+
+ For example, say we have a file like:
+ message Foo {
+ optional string foo = 1;
+ }
+ Let's look at just the field definition:
+ optional string foo = 1;
+ ^ ^^ ^^ ^ ^^^
+ a bc de f ghi
+ We have the following locations:
+ span path represents
+ [a,i) [ 4, 0, 2, 0 ] The whole field definition.
+ [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
+ [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
+ [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
+ [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
+
+ Notes:
+ - A location may refer to a repeated field itself (i.e. not to any
+ particular index within it). This is used whenever a set of elements are
+ logically enclosed in a single code segment. For example, an entire
+ extend block (possibly containing multiple extension definitions) will
+ have an outer location whose path refers to the "extensions" repeated
+ field without an index.
+ - Multiple locations may have the same path. This happens when a single
+ logical declaration is spread out across multiple places. The most
+ obvious example is the "extend" block again -- there may be multiple
+ extend blocks in the same scope, each of which will have the same path.
+ - A location's span is not always a subset of its parent's span. For
+ example, the "extendee" of an extension declaration appears at the
+ beginning of the "extend" block and is shared by all extensions within
+ the block.
+ - Just because a location's span is a subset of some other location's span
+ does not mean that it is a descendant. For example, a "group" defines
+ both a type and a field in a single declaration. Thus, the locations
+ corresponding to the type and field and their components will overlap.
+ - Code which tries to interpret locations should probably be designed to
+ ignore those that it doesn't understand, as more types of locations could
+ be recorded in the future.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class SourceCodeInfoLocation(aristaproto.Message):
+ path: List[int] = aristaproto.int32_field(1)
+ """
+ Identifies which part of the FileDescriptorProto was defined at this
+ location.
+
+ Each element is a field number or an index. They form a path from
+ the root FileDescriptorProto to the place where the definition appears.
+ For example, this path:
+ [ 4, 3, 2, 7, 1 ]
+ refers to:
+ file.message_type(3) // 4, 3
+ .field(7) // 2, 7
+ .name() // 1
+ This is because FileDescriptorProto.message_type has field number 4:
+ repeated DescriptorProto message_type = 4;
+ and DescriptorProto.field has field number 2:
+ repeated FieldDescriptorProto field = 2;
+ and FieldDescriptorProto.name has field number 1:
+ optional string name = 1;
+
+ Thus, the above path gives the location of a field name. If we removed
+ the last element:
+ [ 4, 3, 2, 7 ]
+ this path refers to the whole field declaration (from the beginning
+ of the label to the terminating semicolon).
+ """
+
+ span: List[int] = aristaproto.int32_field(2)
+ """
+ Always has exactly three or four elements: start line, start column,
+ end line (optional, otherwise assumed same as start line), end column.
+ These are packed into a single field for efficiency. Note that line
+ and column numbers are zero-based -- typically you will want to add
+ 1 to each before displaying to a user.
+ """
+
+ leading_comments: str = aristaproto.string_field(3)
+ """
+ If this SourceCodeInfo represents a complete declaration, these are any
+ comments appearing before and after the declaration which appear to be
+ attached to the declaration.
+
+ A series of line comments appearing on consecutive lines, with no other
+ tokens appearing on those lines, will be treated as a single comment.
+
+ leading_detached_comments will keep paragraphs of comments that appear
+ before (but not connected to) the current element. Each paragraph,
+ separated by empty lines, will be one comment element in the repeated
+ field.
+
+ Only the comment content is provided; comment markers (e.g. //) are
+ stripped out. For block comments, leading whitespace and an asterisk
+ will be stripped from the beginning of each line other than the first.
+ Newlines are included in the output.
+
+ Examples:
+
+ optional int32 foo = 1; // Comment attached to foo.
+ // Comment attached to bar.
+ optional int32 bar = 2;
+
+ optional string baz = 3;
+ // Comment attached to baz.
+ // Another line attached to baz.
+
+ // Comment attached to moo.
+ //
+ // Another line attached to moo.
+ optional double moo = 4;
+
+ // Detached comment for corge. This is not leading or trailing comments
+ // to moo or corge because there are blank lines separating it from
+ // both.
+
+ // Detached comment for corge paragraph 2.
+
+ optional string corge = 5;
+ /* Block comment attached
+ * to corge. Leading asterisks
+ * will be removed. */
+ /* Block comment attached to
+ * grault. */
+ optional int32 grault = 6;
+
+ // ignored detached comments.
+ """
+
+ trailing_comments: str = aristaproto.string_field(4)
+ leading_detached_comments: List[str] = aristaproto.string_field(6)
+
+
+@dataclass(eq=False, repr=False)
+class GeneratedCodeInfo(aristaproto.Message):
+ """
+ Describes the relationship between generated code and its original source
+ file. A GeneratedCodeInfo message is associated with only one generated
+ source file, but may contain references to different source .proto files.
+ """
+
+ annotation: List["GeneratedCodeInfoAnnotation"] = aristaproto.message_field(1)
+ """
+ An Annotation connects some span of text in generated code to an element
+ of its generating .proto file.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class GeneratedCodeInfoAnnotation(aristaproto.Message):
+ path: List[int] = aristaproto.int32_field(1)
+ """
+ Identifies the element in the original source .proto file. This field
+ is formatted the same as SourceCodeInfo.Location.path.
+ """
+
+ source_file: str = aristaproto.string_field(2)
+ """Identifies the filesystem path to the original source .proto."""
+
+ begin: int = aristaproto.int32_field(3)
+ """
+ Identifies the starting offset in bytes in the generated code
+ that relates to the identified object.
+ """
+
+ end: int = aristaproto.int32_field(4)
+ """
+ Identifies the ending offset in bytes in the generated code that
+ relates to the identified object. The end offset should be one past
+ the last relevant byte (so the length of the text = end - begin).
+ """
+
+ semantic: "GeneratedCodeInfoAnnotationSemantic" = aristaproto.enum_field(5)
+
+
+@dataclass(eq=False, repr=False)
+class Duration(aristaproto.Message):
+ """
+ A Duration represents a signed, fixed-length span of time represented
+ as a count of seconds and fractions of seconds at nanosecond
+ resolution. It is independent of any calendar and concepts like "day"
+ or "month". It is related to Timestamp in that the difference between
+ two Timestamp values is a Duration and it can be added or subtracted
+ from a Timestamp. Range is approximately +-10,000 years.
+
+ # Examples
+
+ Example 1: Compute Duration from two Timestamps in pseudo code.
+
+ Timestamp start = ...;
+ Timestamp end = ...;
+ Duration duration = ...;
+
+ duration.seconds = end.seconds - start.seconds;
+ duration.nanos = end.nanos - start.nanos;
+
+ if (duration.seconds < 0 && duration.nanos > 0) {
+ duration.seconds += 1;
+ duration.nanos -= 1000000000;
+ } else if (duration.seconds > 0 && duration.nanos < 0) {
+ duration.seconds -= 1;
+ duration.nanos += 1000000000;
+ }
+
+ Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+
+ Timestamp start = ...;
+ Duration duration = ...;
+ Timestamp end = ...;
+
+ end.seconds = start.seconds + duration.seconds;
+ end.nanos = start.nanos + duration.nanos;
+
+ if (end.nanos < 0) {
+ end.seconds -= 1;
+ end.nanos += 1000000000;
+ } else if (end.nanos >= 1000000000) {
+ end.seconds += 1;
+ end.nanos -= 1000000000;
+ }
+
+ Example 3: Compute Duration from datetime.timedelta in Python.
+
+ td = datetime.timedelta(days=3, minutes=10)
+ duration = Duration()
+ duration.FromTimedelta(td)
+
+ # JSON Mapping
+
+ In JSON format, the Duration type is encoded as a string rather than an
+ object, where the string ends in the suffix "s" (indicating seconds) and
+ is preceded by the number of seconds, with nanoseconds expressed as
+ fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+ encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+ be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+ microsecond should be expressed in JSON format as "3.000001s".
+ """
+
+ seconds: int = aristaproto.int64_field(1)
+ """
+ Signed seconds of the span of time. Must be from -315,576,000,000
+ to +315,576,000,000 inclusive. Note: these bounds are computed from:
+ 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
+ """
+
+ nanos: int = aristaproto.int32_field(2)
+ """
+ Signed fractions of a second at nanosecond resolution of the span
+ of time. Durations less than one second are represented with a 0
+ `seconds` field and a positive or negative `nanos` field. For durations
+ of one second or more, a non-zero value for the `nanos` field must be
+ of the same sign as the `seconds` field. Must be from -999,999,999
+ to +999,999,999 inclusive.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Empty(aristaproto.Message):
+ """
+ A generic empty message that you can re-use to avoid defining duplicated
+ empty messages in your APIs. A typical example is to use it as the request
+ or the response type of an API method. For instance:
+
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ }
+ """
+
+ pass
+
+
+@dataclass(eq=False, repr=False)
+class FieldMask(aristaproto.Message):
+ """
+ `FieldMask` represents a set of symbolic field paths, for example:
+
+ paths: "f.a"
+ paths: "f.b.d"
+
+ Here `f` represents a field in some root message, `a` and `b`
+ fields in the message found in `f`, and `d` a field found in the
+ message in `f.b`.
+
+ Field masks are used to specify a subset of fields that should be
+ returned by a get operation or modified by an update operation.
+ Field masks also have a custom JSON encoding (see below).
+
+ # Field Masks in Projections
+
+ When used in the context of a projection, a response message or
+ sub-message is filtered by the API to only contain those fields as
+ specified in the mask. For example, if the mask in the previous
+ example is applied to a response message as follows:
+
+ f {
+ a : 22
+ b {
+ d : 1
+ x : 2
+ }
+ y : 13
+ }
+ z: 8
+
+ The result will not contain specific values for fields x,y and z
+ (their value will be set to the default, and omitted in proto text
+ output):
+
+
+ f {
+ a : 22
+ b {
+ d : 1
+ }
+ }
+
+ A repeated field is not allowed except at the last position of a
+ paths string.
+
+ If a FieldMask object is not present in a get operation, the
+ operation applies to all fields (as if a FieldMask of all fields
+ had been specified).
+
+ Note that a field mask does not necessarily apply to the
+ top-level response message. In case of a REST get operation, the
+ field mask applies directly to the response, but in case of a REST
+ list operation, the mask instead applies to each individual message
+ in the returned resource list. In case of a REST custom method,
+ other definitions may be used. Where the mask applies will be
+ clearly documented together with its declaration in the API. In
+ any case, the effect on the returned resource/resources is required
+ behavior for APIs.
+
+ # Field Masks in Update Operations
+
+ A field mask in update operations specifies which fields of the
+ targeted resource are going to be updated. The API is required
+ to only change the values of the fields as specified in the mask
+ and leave the others untouched. If a resource is passed in to
+ describe the updated values, the API ignores the values of all
+ fields not covered by the mask.
+
+ If a repeated field is specified for an update operation, new values will
+ be appended to the existing repeated field in the target resource. Note that
+ a repeated field is only allowed in the last position of a `paths` string.
+
+ If a sub-message is specified in the last position of the field mask for an
+ update operation, then new value will be merged into the existing sub-message
+ in the target resource.
+
+ For example, given the target message:
+
+ f {
+ b {
+ d: 1
+ x: 2
+ }
+ c: [1]
+ }
+
+ And an update message:
+
+ f {
+ b {
+ d: 10
+ }
+ c: [2]
+ }
+
+ then if the field mask is:
+
+ paths: ["f.b", "f.c"]
+
+ then the result will be:
+
+ f {
+ b {
+ d: 10
+ x: 2
+ }
+ c: [1, 2]
+ }
+
+ An implementation may provide options to override this default behavior for
+ repeated and message fields.
+
+ In order to reset a field's value to the default, the field must
+ be in the mask and set to the default value in the provided resource.
+ Hence, in order to reset all fields of a resource, provide a default
+ instance of the resource and set all fields in the mask, or do
+ not provide a mask as described below.
+
+ If a field mask is not present on update, the operation applies to
+ all fields (as if a field mask of all fields has been specified).
+ Note that in the presence of schema evolution, this may mean that
+ fields the client does not know and has therefore not filled into
+ the request will be reset to their default. If this is unwanted
+ behavior, a specific service may require a client to always specify
+ a field mask, producing an error if not.
+
+ As with get operations, the location of the resource which
+ describes the updated values in the request message depends on the
+ operation kind. In any case, the effect of the field mask is
+ required to be honored by the API.
+
+ ## Considerations for HTTP REST
+
+ The HTTP kind of an update operation which uses a field mask must
+ be set to PATCH instead of PUT in order to satisfy HTTP semantics
+ (PUT must only be used for full updates).
+
+ # JSON Encoding of Field Masks
+
+ In JSON, a field mask is encoded as a single string where paths are
+ separated by a comma. Fields name in each path are converted
+ to/from lower-camel naming conventions.
+
+ As an example, consider the following message declarations:
+
+ message Profile {
+ User user = 1;
+ Photo photo = 2;
+ }
+ message User {
+ string display_name = 1;
+ string address = 2;
+ }
+
+ In proto a field mask for `Profile` may look as such:
+
+ mask {
+ paths: "user.display_name"
+ paths: "photo"
+ }
+
+ In JSON, the same mask is represented as below:
+
+ {
+ mask: "user.displayName,photo"
+ }
+
+ # Field Masks and Oneof Fields
+
+ Field masks treat fields in oneofs just as regular fields. Consider the
+ following message:
+
+ message SampleMessage {
+ oneof test_oneof {
+ string name = 4;
+ SubMessage sub_message = 9;
+ }
+ }
+
+ The field mask can be:
+
+ mask {
+ paths: "name"
+ }
+
+ Or:
+
+ mask {
+ paths: "sub_message"
+ }
+
+ Note that oneof type names ("test_oneof" in this case) cannot be used in
+ paths.
+
+ ## Field Mask Verification
+
+ The implementation of any API method which has a FieldMask type field in the
+ request should verify the included field paths, and return an
+ `INVALID_ARGUMENT` error if any path is unmappable.
+ """
+
+ paths: List[str] = aristaproto.string_field(1)
+ """The set of field mask paths."""
+
+
+@dataclass(eq=False, repr=False)
+class Struct(aristaproto.Message):
+ """
+ `Struct` represents a structured data value, consisting of fields
+ which map to dynamically typed values. In some languages, `Struct`
+ might be supported by a native representation. For example, in
+ scripting languages like JS a struct is represented as an
+ object. The details of that representation are described together
+ with the proto support for the language.
+
+ The JSON representation for `Struct` is JSON object.
+ """
+
+ fields: Dict[str, "Value"] = aristaproto.map_field(
+ 1, aristaproto.TYPE_STRING, aristaproto.TYPE_MESSAGE
+ )
+ """Unordered map of dynamically typed values."""
+
+ @hybridmethod
+ def from_dict(cls: "type[Self]", value: Mapping[str, Any]) -> Self: # type: ignore
+ self = cls()
+ return self.from_dict(value)
+
+ @from_dict.instancemethod
+ def from_dict(self, value: Mapping[str, Any]) -> Self:
+ fields = {**value}
+ for k in fields:
+ if hasattr(fields[k], "from_dict"):
+ fields[k] = fields[k].from_dict()
+
+ self.fields = fields
+ return self
+
+ def to_dict(
+ self,
+ casing: aristaproto.Casing = aristaproto.Casing.CAMEL,
+ include_default_values: bool = False,
+ ) -> Dict[str, Any]:
+ output = {**self.fields}
+ for k in self.fields:
+ if hasattr(self.fields[k], "to_dict"):
+ output[k] = self.fields[k].to_dict(casing, include_default_values)
+ return output
+
+
+@dataclass(eq=False, repr=False)
+class Value(aristaproto.Message):
+ """
+ `Value` represents a dynamically typed value which can be either
+ null, a number, a string, a boolean, a recursive struct value, or a
+ list of values. A producer of value is expected to set one of these
+ variants. Absence of any variant indicates an error.
+
+ The JSON representation for `Value` is JSON value.
+ """
+
+ null_value: Optional["NullValue"] = aristaproto.enum_field(
+ 1, optional=True, group="kind"
+ )
+ """Represents a null value."""
+
+ number_value: Optional[float] = aristaproto.double_field(
+ 2, optional=True, group="kind"
+ )
+ """Represents a double value."""
+
+ string_value: Optional[str] = aristaproto.string_field(
+ 3, optional=True, group="kind"
+ )
+ """Represents a string value."""
+
+ bool_value: Optional[bool] = aristaproto.bool_field(4, optional=True, group="kind")
+ """Represents a boolean value."""
+
+ struct_value: Optional["Struct"] = aristaproto.message_field(
+ 5, optional=True, group="kind"
+ )
+ """Represents a structured value."""
+
+ list_value: Optional["ListValue"] = aristaproto.message_field(
+ 6, optional=True, group="kind"
+ )
+ """Represents a repeated `Value`."""
+
+ @root_validator()
+ def check_oneof(cls, values):
+ return cls._validate_field_groups(values)
+
+
+@dataclass(eq=False, repr=False)
+class ListValue(aristaproto.Message):
+ """
+ `ListValue` is a wrapper around a repeated field of values.
+
+ The JSON representation for `ListValue` is JSON array.
+ """
+
+ values: List["Value"] = aristaproto.message_field(1)
+ """Repeated field of dynamically typed values."""
+
+
+@dataclass(eq=False, repr=False)
+class Timestamp(aristaproto.Message):
+ """
+ A Timestamp represents a point in time independent of any time zone or local
+ calendar, encoded as a count of seconds and fractions of seconds at
+ nanosecond resolution. The count is relative to an epoch at UTC midnight on
+ January 1, 1970, in the proleptic Gregorian calendar which extends the
+ Gregorian calendar backwards to year one.
+
+ All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
+ second table is needed for interpretation, using a [24-hour linear
+ smear](https://developers.google.com/time/smear).
+
+ The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
+ restricting to that range, we ensure that we can convert to and from [RFC
+ 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
+
+ # Examples
+
+ Example 1: Compute Timestamp from POSIX `time()`.
+
+ Timestamp timestamp;
+ timestamp.set_seconds(time(NULL));
+ timestamp.set_nanos(0);
+
+ Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ Timestamp timestamp;
+ timestamp.set_seconds(tv.tv_sec);
+ timestamp.set_nanos(tv.tv_usec * 1000);
+
+ Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+
+ FILETIME ft;
+ GetSystemTimeAsFileTime(&ft);
+ UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+
+ // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+ // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+ Timestamp timestamp;
+ timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+ timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+
+ Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+
+ long millis = System.currentTimeMillis();
+
+ Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+ .setNanos((int) ((millis % 1000) * 1000000)).build();
+
+ Example 5: Compute Timestamp from Java `Instant.now()`.
+
+ Instant now = Instant.now();
+
+ Timestamp timestamp =
+ Timestamp.newBuilder().setSeconds(now.getEpochSecond())
+ .setNanos(now.getNano()).build();
+
+ Example 6: Compute Timestamp from current time in Python.
+
+ timestamp = Timestamp()
+ timestamp.GetCurrentTime()
+
+ # JSON Mapping
+
+ In JSON format, the Timestamp type is encoded as a string in the
+ [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+ format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+ where {year} is always expressed using four digits while {month}, {day},
+ {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+ seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+ are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+ is required. A proto3 JSON serializer should always use UTC (as indicated by
+ "Z") when printing the Timestamp type and a proto3 JSON parser should be
+ able to accept both UTC and other timezones (as indicated by an offset).
+
+ For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+ 01:30 UTC on January 15, 2017.
+
+ In JavaScript, one can convert a Date object to this format using the
+ standard
+ [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
+ method. In Python, a standard `datetime.datetime` object can be converted
+ to this format using
+ [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
+ the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
+ the Joda Time's [`ISODateTimeFormat.dateTime()`](
+ http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
+ ) to obtain a formatter capable of generating timestamps in this format.
+ """
+
+ seconds: int = aristaproto.int64_field(1)
+ """
+ Represents seconds of UTC time since Unix epoch
+ 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
+ 9999-12-31T23:59:59Z inclusive.
+ """
+
+ nanos: int = aristaproto.int32_field(2)
+ """
+ Non-negative fractions of a second at nanosecond resolution. Negative
+ second values with fractions must still have non-negative nanos values
+ that count forward in time. Must be from 0 to 999,999,999
+ inclusive.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class DoubleValue(aristaproto.Message):
+ """
+ Wrapper message for `double`.
+
+ The JSON representation for `DoubleValue` is JSON number.
+ """
+
+ value: float = aristaproto.double_field(1)
+ """The double value."""
+
+
+@dataclass(eq=False, repr=False)
+class FloatValue(aristaproto.Message):
+ """
+ Wrapper message for `float`.
+
+ The JSON representation for `FloatValue` is JSON number.
+ """
+
+ value: float = aristaproto.float_field(1)
+ """The float value."""
+
+
+@dataclass(eq=False, repr=False)
+class Int64Value(aristaproto.Message):
+ """
+ Wrapper message for `int64`.
+
+ The JSON representation for `Int64Value` is JSON string.
+ """
+
+ value: int = aristaproto.int64_field(1)
+ """The int64 value."""
+
+
+@dataclass(eq=False, repr=False)
+class UInt64Value(aristaproto.Message):
+ """
+ Wrapper message for `uint64`.
+
+ The JSON representation for `UInt64Value` is JSON string.
+ """
+
+ value: int = aristaproto.uint64_field(1)
+ """The uint64 value."""
+
+
+@dataclass(eq=False, repr=False)
+class Int32Value(aristaproto.Message):
+ """
+ Wrapper message for `int32`.
+
+ The JSON representation for `Int32Value` is JSON number.
+ """
+
+ value: int = aristaproto.int32_field(1)
+ """The int32 value."""
+
+
+@dataclass(eq=False, repr=False)
+class UInt32Value(aristaproto.Message):
+ """
+ Wrapper message for `uint32`.
+
+ The JSON representation for `UInt32Value` is JSON number.
+ """
+
+ value: int = aristaproto.uint32_field(1)
+ """The uint32 value."""
+
+
+@dataclass(eq=False, repr=False)
+class BoolValue(aristaproto.Message):
+ """
+ Wrapper message for `bool`.
+
+ The JSON representation for `BoolValue` is JSON `true` and `false`.
+ """
+
+ value: bool = aristaproto.bool_field(1)
+ """The bool value."""
+
+
+@dataclass(eq=False, repr=False)
+class StringValue(aristaproto.Message):
+ """
+ Wrapper message for `string`.
+
+ The JSON representation for `StringValue` is JSON string.
+ """
+
+ value: str = aristaproto.string_field(1)
+ """The string value."""
+
+
+@dataclass(eq=False, repr=False)
+class BytesValue(aristaproto.Message):
+ """
+ Wrapper message for `bytes`.
+
+ The JSON representation for `BytesValue` is JSON string.
+ """
+
+ value: bytes = aristaproto.bytes_field(1)
+ """The bytes value."""
+
+
+Type.__pydantic_model__.update_forward_refs() # type: ignore
+Field.__pydantic_model__.update_forward_refs() # type: ignore
+Enum.__pydantic_model__.update_forward_refs() # type: ignore
+EnumValue.__pydantic_model__.update_forward_refs() # type: ignore
+Option.__pydantic_model__.update_forward_refs() # type: ignore
+Api.__pydantic_model__.update_forward_refs() # type: ignore
+Method.__pydantic_model__.update_forward_refs() # type: ignore
+FileDescriptorSet.__pydantic_model__.update_forward_refs() # type: ignore
+FileDescriptorProto.__pydantic_model__.update_forward_refs() # type: ignore
+DescriptorProto.__pydantic_model__.update_forward_refs() # type: ignore
+DescriptorProtoExtensionRange.__pydantic_model__.update_forward_refs() # type: ignore
+ExtensionRangeOptions.__pydantic_model__.update_forward_refs() # type: ignore
+FieldDescriptorProto.__pydantic_model__.update_forward_refs() # type: ignore
+OneofDescriptorProto.__pydantic_model__.update_forward_refs() # type: ignore
+EnumDescriptorProto.__pydantic_model__.update_forward_refs() # type: ignore
+EnumValueDescriptorProto.__pydantic_model__.update_forward_refs() # type: ignore
+ServiceDescriptorProto.__pydantic_model__.update_forward_refs() # type: ignore
+MethodDescriptorProto.__pydantic_model__.update_forward_refs() # type: ignore
+FileOptions.__pydantic_model__.update_forward_refs() # type: ignore
+MessageOptions.__pydantic_model__.update_forward_refs() # type: ignore
+FieldOptions.__pydantic_model__.update_forward_refs() # type: ignore
+FieldOptionsEditionDefault.__pydantic_model__.update_forward_refs() # type: ignore
+FieldOptionsFeatureSupport.__pydantic_model__.update_forward_refs() # type: ignore
+OneofOptions.__pydantic_model__.update_forward_refs() # type: ignore
+EnumOptions.__pydantic_model__.update_forward_refs() # type: ignore
+EnumValueOptions.__pydantic_model__.update_forward_refs() # type: ignore
+ServiceOptions.__pydantic_model__.update_forward_refs() # type: ignore
+MethodOptions.__pydantic_model__.update_forward_refs() # type: ignore
+UninterpretedOption.__pydantic_model__.update_forward_refs() # type: ignore
+FeatureSet.__pydantic_model__.update_forward_refs() # type: ignore
+FeatureSetDefaults.__pydantic_model__.update_forward_refs() # type: ignore
+FeatureSetDefaultsFeatureSetEditionDefault.__pydantic_model__.update_forward_refs() # type: ignore
+SourceCodeInfo.__pydantic_model__.update_forward_refs() # type: ignore
+GeneratedCodeInfo.__pydantic_model__.update_forward_refs() # type: ignore
+GeneratedCodeInfoAnnotation.__pydantic_model__.update_forward_refs() # type: ignore
+Struct.__pydantic_model__.update_forward_refs() # type: ignore
+Value.__pydantic_model__.update_forward_refs() # type: ignore
+ListValue.__pydantic_model__.update_forward_refs() # type: ignore
diff --git a/src/aristaproto/lib/pydantic/google/protobuf/compiler/__init__.py b/src/aristaproto/lib/pydantic/google/protobuf/compiler/__init__.py
new file mode 100644
index 0000000..495c555
--- /dev/null
+++ b/src/aristaproto/lib/pydantic/google/protobuf/compiler/__init__.py
@@ -0,0 +1,210 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# sources: google/protobuf/compiler/plugin.proto
+# plugin: python-aristaproto
+# This file has been @generated
+
+from typing import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+ from dataclasses import dataclass
+else:
+ from pydantic.dataclasses import dataclass
+
+from typing import List
+
+import aristaproto
+import aristaproto.lib.pydantic.google.protobuf as aristaproto_lib_pydantic_google_protobuf
+
+
+class CodeGeneratorResponseFeature(aristaproto.Enum):
+ """Sync with code_generator.h."""
+
+ FEATURE_NONE = 0
+ FEATURE_PROTO3_OPTIONAL = 1
+ FEATURE_SUPPORTS_EDITIONS = 2
+
+
+@dataclass(eq=False, repr=False)
+class Version(aristaproto.Message):
+ """The version number of protocol compiler."""
+
+ major: int = aristaproto.int32_field(1)
+ minor: int = aristaproto.int32_field(2)
+ patch: int = aristaproto.int32_field(3)
+ suffix: str = aristaproto.string_field(4)
+ """
+ A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
+ be empty for mainline stable releases.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class CodeGeneratorRequest(aristaproto.Message):
+ """An encoded CodeGeneratorRequest is written to the plugin's stdin."""
+
+ file_to_generate: List[str] = aristaproto.string_field(1)
+ """
+ The .proto files that were explicitly listed on the command-line. The
+ code generator should generate code only for these files. Each file's
+ descriptor will be included in proto_file, below.
+ """
+
+ parameter: str = aristaproto.string_field(2)
+ """The generator parameter passed on the command-line."""
+
+ proto_file: List[
+ "aristaproto_lib_pydantic_google_protobuf.FileDescriptorProto"
+ ] = aristaproto.message_field(15)
+ """
+ FileDescriptorProtos for all files in files_to_generate and everything
+ they import. The files will appear in topological order, so each file
+ appears before any file that imports it.
+
+ Note: the files listed in files_to_generate will include runtime-retention
+ options only, but all other files will include source-retention options.
+ The source_file_descriptors field below is available in case you need
+ source-retention options for files_to_generate.
+
+ protoc guarantees that all proto_files will be written after
+ the fields above, even though this is not technically guaranteed by the
+ protobuf wire format. This theoretically could allow a plugin to stream
+ in the FileDescriptorProtos and handle them one by one rather than read
+ the entire set into memory at once. However, as of this writing, this
+ is not similarly optimized on protoc's end -- it will store all fields in
+ memory at once before sending them to the plugin.
+
+ Type names of fields and extensions in the FileDescriptorProto are always
+ fully qualified.
+ """
+
+ source_file_descriptors: List[
+ "aristaproto_lib_pydantic_google_protobuf.FileDescriptorProto"
+ ] = aristaproto.message_field(17)
+ """
+ File descriptors with all options, including source-retention options.
+ These descriptors are only provided for the files listed in
+ files_to_generate.
+ """
+
+ compiler_version: "Version" = aristaproto.message_field(3)
+ """The version number of protocol compiler."""
+
+
+@dataclass(eq=False, repr=False)
+class CodeGeneratorResponse(aristaproto.Message):
+ """The plugin writes an encoded CodeGeneratorResponse to stdout."""
+
+ error: str = aristaproto.string_field(1)
+ """
+ Error message. If non-empty, code generation failed. The plugin process
+ should exit with status code zero even if it reports an error in this way.
+
+ This should be used to indicate errors in .proto files which prevent the
+ code generator from generating correct code. Errors which indicate a
+ problem in protoc itself -- such as the input CodeGeneratorRequest being
+ unparseable -- should be reported by writing a message to stderr and
+ exiting with a non-zero status code.
+ """
+
+ supported_features: int = aristaproto.uint64_field(2)
+ """
+ A bitmask of supported features that the code generator supports.
+ This is a bitwise "or" of values from the Feature enum.
+ """
+
+ minimum_edition: int = aristaproto.int32_field(3)
+ """
+ The minimum edition this plugin supports. This will be treated as an
+ Edition enum, but we want to allow unknown values. It should be specified
+ according the edition enum value, *not* the edition number. Only takes
+ effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
+ """
+
+ maximum_edition: int = aristaproto.int32_field(4)
+ """
+ The maximum edition this plugin supports. This will be treated as an
+ Edition enum, but we want to allow unknown values. It should be specified
+ according the edition enum value, *not* the edition number. Only takes
+ effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
+ """
+
+ file: List["CodeGeneratorResponseFile"] = aristaproto.message_field(15)
+
+
+@dataclass(eq=False, repr=False)
+class CodeGeneratorResponseFile(aristaproto.Message):
+ """Represents a single generated file."""
+
+ name: str = aristaproto.string_field(1)
+ """
+ The file name, relative to the output directory. The name must not
+ contain "." or ".." components and must be relative, not be absolute (so,
+ the file cannot lie outside the output directory). "/" must be used as
+ the path separator, not "\".
+
+ If the name is omitted, the content will be appended to the previous
+ file. This allows the generator to break large files into small chunks,
+ and allows the generated text to be streamed back to protoc so that large
+ files need not reside completely in memory at one time. Note that as of
+ this writing protoc does not optimize for this -- it will read the entire
+ CodeGeneratorResponse before writing files to disk.
+ """
+
+ insertion_point: str = aristaproto.string_field(2)
+ """
+ If non-empty, indicates that the named file should already exist, and the
+ content here is to be inserted into that file at a defined insertion
+ point. This feature allows a code generator to extend the output
+ produced by another code generator. The original generator may provide
+ insertion points by placing special annotations in the file that look
+ like:
+ @@protoc_insertion_point(NAME)
+ The annotation can have arbitrary text before and after it on the line,
+ which allows it to be placed in a comment. NAME should be replaced with
+ an identifier naming the point -- this is what other generators will use
+ as the insertion_point. Code inserted at this point will be placed
+ immediately above the line containing the insertion point (thus multiple
+ insertions to the same point will come out in the order they were added).
+ The double-@ is intended to make it unlikely that the generated code
+ could contain things that look like insertion points by accident.
+
+ For example, the C++ code generator places the following line in the
+ .pb.h files that it generates:
+ // @@protoc_insertion_point(namespace_scope)
+ This line appears within the scope of the file's package namespace, but
+ outside of any particular class. Another plugin can then specify the
+ insertion_point "namespace_scope" to generate additional classes or
+ other declarations that should be placed in this scope.
+
+ Note that if the line containing the insertion point begins with
+ whitespace, the same whitespace will be added to every line of the
+ inserted text. This is useful for languages like Python, where
+ indentation matters. In these languages, the insertion point comment
+ should be indented the same amount as any inserted code will need to be
+ in order to work correctly in that context.
+
+ The code generator that generates the initial file and the one which
+ inserts into it must both run as part of a single invocation of protoc.
+ Code generators are executed in the order in which they appear on the
+ command line.
+
+ If |insertion_point| is present, |name| must also be present.
+ """
+
+ content: str = aristaproto.string_field(15)
+ """The file contents."""
+
+ generated_code_info: (
+ "aristaproto_lib_pydantic_google_protobuf.GeneratedCodeInfo"
+ ) = aristaproto.message_field(16)
+ """
+ Information describing the file content being inserted. If an insertion
+ point is used, this information will be appropriately offset and inserted
+ into the code generation metadata for the generated files.
+ """
+
+
+CodeGeneratorRequest.__pydantic_model__.update_forward_refs() # type: ignore
+CodeGeneratorResponse.__pydantic_model__.update_forward_refs() # type: ignore
+CodeGeneratorResponseFile.__pydantic_model__.update_forward_refs() # type: ignore
diff --git a/src/aristaproto/lib/std/__init__.py b/src/aristaproto/lib/std/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/lib/std/__init__.py
diff --git a/src/aristaproto/lib/std/google/__init__.py b/src/aristaproto/lib/std/google/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/lib/std/google/__init__.py
diff --git a/src/aristaproto/lib/std/google/protobuf/__init__.py b/src/aristaproto/lib/std/google/protobuf/__init__.py
new file mode 100644
index 0000000..783676a
--- /dev/null
+++ b/src/aristaproto/lib/std/google/protobuf/__init__.py
@@ -0,0 +1,2526 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# sources: google/protobuf/any.proto, google/protobuf/api.proto, google/protobuf/descriptor.proto, google/protobuf/duration.proto, google/protobuf/empty.proto, google/protobuf/field_mask.proto, google/protobuf/source_context.proto, google/protobuf/struct.proto, google/protobuf/timestamp.proto, google/protobuf/type.proto, google/protobuf/wrappers.proto
+# plugin: python-aristaproto
+
+import warnings
+from dataclasses import dataclass
+from typing import (
+ Dict,
+ List,
+ Mapping,
+)
+
+from typing_extensions import Self
+
+import aristaproto
+from aristaproto.utils import hybridmethod
+
+
+class Syntax(aristaproto.Enum):
+ """The syntax in which a protocol buffer element is defined."""
+
+ PROTO2 = 0
+ """Syntax `proto2`."""
+
+ PROTO3 = 1
+ """Syntax `proto3`."""
+
+ EDITIONS = 2
+ """Syntax `editions`."""
+
+
+class FieldKind(aristaproto.Enum):
+ """Basic field types."""
+
+ TYPE_UNKNOWN = 0
+ """Field type unknown."""
+
+ TYPE_DOUBLE = 1
+ """Field type double."""
+
+ TYPE_FLOAT = 2
+ """Field type float."""
+
+ TYPE_INT64 = 3
+ """Field type int64."""
+
+ TYPE_UINT64 = 4
+ """Field type uint64."""
+
+ TYPE_INT32 = 5
+ """Field type int32."""
+
+ TYPE_FIXED64 = 6
+ """Field type fixed64."""
+
+ TYPE_FIXED32 = 7
+ """Field type fixed32."""
+
+ TYPE_BOOL = 8
+ """Field type bool."""
+
+ TYPE_STRING = 9
+ """Field type string."""
+
+ TYPE_GROUP = 10
+ """Field type group. Proto2 syntax only, and deprecated."""
+
+ TYPE_MESSAGE = 11
+ """Field type message."""
+
+ TYPE_BYTES = 12
+ """Field type bytes."""
+
+ TYPE_UINT32 = 13
+ """Field type uint32."""
+
+ TYPE_ENUM = 14
+ """Field type enum."""
+
+ TYPE_SFIXED32 = 15
+ """Field type sfixed32."""
+
+ TYPE_SFIXED64 = 16
+ """Field type sfixed64."""
+
+ TYPE_SINT32 = 17
+ """Field type sint32."""
+
+ TYPE_SINT64 = 18
+ """Field type sint64."""
+
+
+class FieldCardinality(aristaproto.Enum):
+ """Whether a field is optional, required, or repeated."""
+
+ CARDINALITY_UNKNOWN = 0
+ """For fields with unknown cardinality."""
+
+ CARDINALITY_OPTIONAL = 1
+ """For optional fields."""
+
+ CARDINALITY_REQUIRED = 2
+ """For required fields. Proto2 syntax only."""
+
+ CARDINALITY_REPEATED = 3
+ """For repeated fields."""
+
+
+class Edition(aristaproto.Enum):
+ """The full set of known editions."""
+
+ UNKNOWN = 0
+ """A placeholder for an unknown edition value."""
+
+ PROTO2 = 998
+ """
+ Legacy syntax "editions". These pre-date editions, but behave much like
+ distinct editions. These can't be used to specify the edition of proto
+ files, but feature definitions must supply proto2/proto3 defaults for
+ backwards compatibility.
+ """
+
+ PROTO3 = 999
+ _2023 = 1000
+ """
+ Editions that have been released. The specific values are arbitrary and
+ should not be depended on, but they will always be time-ordered for easy
+ comparison.
+ """
+
+ _2024 = 1001
+ _1_TEST_ONLY = 1
+ """
+ Placeholder editions for testing feature resolution. These should not be
+ used or relyed on outside of tests.
+ """
+
+ _2_TEST_ONLY = 2
+ _99997_TEST_ONLY = 99997
+ _99998_TEST_ONLY = 99998
+ _99999_TEST_ONLY = 99999
+ MAX = 2147483647
+ """
+ Placeholder for specifying unbounded edition support. This should only
+ ever be used by plugins that can expect to never require any changes to
+ support a new edition.
+ """
+
+
+class ExtensionRangeOptionsVerificationState(aristaproto.Enum):
+ """The verification state of the extension range."""
+
+ DECLARATION = 0
+ """All the extensions of the range must be declared."""
+
+ UNVERIFIED = 1
+
+
+class FieldDescriptorProtoType(aristaproto.Enum):
+ TYPE_DOUBLE = 1
+ """
+ 0 is reserved for errors.
+ Order is weird for historical reasons.
+ """
+
+ TYPE_FLOAT = 2
+ TYPE_INT64 = 3
+ """
+ Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
+ negative values are likely.
+ """
+
+ TYPE_UINT64 = 4
+ TYPE_INT32 = 5
+ """
+ Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
+ negative values are likely.
+ """
+
+ TYPE_FIXED64 = 6
+ TYPE_FIXED32 = 7
+ TYPE_BOOL = 8
+ TYPE_STRING = 9
+ TYPE_GROUP = 10
+ """
+ Tag-delimited aggregate.
+ Group type is deprecated and not supported after google.protobuf. However, Proto3
+ implementations should still be able to parse the group wire format and
+ treat group fields as unknown fields. In Editions, the group wire format
+ can be enabled via the `message_encoding` feature.
+ """
+
+ TYPE_MESSAGE = 11
+ TYPE_BYTES = 12
+ """New in version 2."""
+
+ TYPE_UINT32 = 13
+ TYPE_ENUM = 14
+ TYPE_SFIXED32 = 15
+ TYPE_SFIXED64 = 16
+ TYPE_SINT32 = 17
+ TYPE_SINT64 = 18
+
+
+class FieldDescriptorProtoLabel(aristaproto.Enum):
+ LABEL_OPTIONAL = 1
+ """0 is reserved for errors"""
+
+ LABEL_REPEATED = 3
+ LABEL_REQUIRED = 2
+ """
+ The required label is only allowed in google.protobuf. In proto3 and Editions
+ it's explicitly prohibited. In Editions, the `field_presence` feature
+ can be used to get this behavior.
+ """
+
+
+class FileOptionsOptimizeMode(aristaproto.Enum):
+ """Generated classes can be optimized for speed or code size."""
+
+ SPEED = 1
+ CODE_SIZE = 2
+ """etc."""
+
+ LITE_RUNTIME = 3
+
+
+class FieldOptionsCType(aristaproto.Enum):
+ STRING = 0
+ """Default mode."""
+
+ CORD = 1
+ """
+ The option [ctype=CORD] may be applied to a non-repeated field of type
+ "bytes". It indicates that in C++, the data should be stored in a Cord
+ instead of a string. For very large strings, this may reduce memory
+ fragmentation. It may also allow better performance when parsing from a
+ Cord, or when parsing with aliasing enabled, as the parsed Cord may then
+ alias the original buffer.
+ """
+
+ STRING_PIECE = 2
+
+
+class FieldOptionsJsType(aristaproto.Enum):
+ JS_NORMAL = 0
+ """Use the default type."""
+
+ JS_STRING = 1
+ """Use JavaScript strings."""
+
+ JS_NUMBER = 2
+ """Use JavaScript numbers."""
+
+
+class FieldOptionsOptionRetention(aristaproto.Enum):
+ """
+ If set to RETENTION_SOURCE, the option will be omitted from the binary.
+ Note: as of January 2023, support for this is in progress and does not yet
+ have an effect (b/264593489).
+ """
+
+ RETENTION_UNKNOWN = 0
+ RETENTION_RUNTIME = 1
+ RETENTION_SOURCE = 2
+
+
+class FieldOptionsOptionTargetType(aristaproto.Enum):
+ """
+ This indicates the types of entities that the field may apply to when used
+ as an option. If it is unset, then the field may be freely used as an
+ option on any kind of entity. Note: as of January 2023, support for this is
+ in progress and does not yet have an effect (b/264593489).
+ """
+
+ TARGET_TYPE_UNKNOWN = 0
+ TARGET_TYPE_FILE = 1
+ TARGET_TYPE_EXTENSION_RANGE = 2
+ TARGET_TYPE_MESSAGE = 3
+ TARGET_TYPE_FIELD = 4
+ TARGET_TYPE_ONEOF = 5
+ TARGET_TYPE_ENUM = 6
+ TARGET_TYPE_ENUM_ENTRY = 7
+ TARGET_TYPE_SERVICE = 8
+ TARGET_TYPE_METHOD = 9
+
+
+class MethodOptionsIdempotencyLevel(aristaproto.Enum):
+ """
+ Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ or neither? HTTP based RPC implementation may choose GET verb for safe
+ methods, and PUT verb for idempotent methods instead of the default POST.
+ """
+
+ IDEMPOTENCY_UNKNOWN = 0
+ NO_SIDE_EFFECTS = 1
+ IDEMPOTENT = 2
+
+
+class FeatureSetFieldPresence(aristaproto.Enum):
+ FIELD_PRESENCE_UNKNOWN = 0
+ EXPLICIT = 1
+ IMPLICIT = 2
+ LEGACY_REQUIRED = 3
+
+
+class FeatureSetEnumType(aristaproto.Enum):
+ ENUM_TYPE_UNKNOWN = 0
+ OPEN = 1
+ CLOSED = 2
+
+
+class FeatureSetRepeatedFieldEncoding(aristaproto.Enum):
+ REPEATED_FIELD_ENCODING_UNKNOWN = 0
+ PACKED = 1
+ EXPANDED = 2
+
+
+class FeatureSetUtf8Validation(aristaproto.Enum):
+ UTF8_VALIDATION_UNKNOWN = 0
+ VERIFY = 2
+ NONE = 3
+
+
+class FeatureSetMessageEncoding(aristaproto.Enum):
+ MESSAGE_ENCODING_UNKNOWN = 0
+ LENGTH_PREFIXED = 1
+ DELIMITED = 2
+
+
+class FeatureSetJsonFormat(aristaproto.Enum):
+ JSON_FORMAT_UNKNOWN = 0
+ ALLOW = 1
+ LEGACY_BEST_EFFORT = 2
+
+
+class GeneratedCodeInfoAnnotationSemantic(aristaproto.Enum):
+ """
+ Represents the identified object's effect on the element in the original
+ .proto file.
+ """
+
+ NONE = 0
+ """There is no effect or the effect is indescribable."""
+
+ SET = 1
+ """The element is set or otherwise mutated."""
+
+ ALIAS = 2
+ """An alias to the element is returned."""
+
+
+class NullValue(aristaproto.Enum):
+ """
+ `NullValue` is a singleton enumeration to represent the null value for the
+ `Value` type union.
+
+ The JSON representation for `NullValue` is JSON `null`.
+ """
+
+ _ = 0
+ """Null value."""
+
+
+@dataclass(eq=False, repr=False)
+class Any(aristaproto.Message):
+ """
+ `Any` contains an arbitrary serialized protocol buffer message along with a
+ URL that describes the type of the serialized message.
+
+ Protobuf library provides support to pack/unpack Any values in the form
+ of utility functions or additional generated methods of the Any type.
+
+ Example 1: Pack and unpack a message in C++.
+
+ Foo foo = ...;
+ Any any;
+ any.PackFrom(foo);
+ ...
+ if (any.UnpackTo(&foo)) {
+ ...
+ }
+
+ Example 2: Pack and unpack a message in Java.
+
+ Foo foo = ...;
+ Any any = Any.pack(foo);
+ ...
+ if (any.is(Foo.class)) {
+ foo = any.unpack(Foo.class);
+ }
+ // or ...
+ if (any.isSameTypeAs(Foo.getDefaultInstance())) {
+ foo = any.unpack(Foo.getDefaultInstance());
+ }
+
+ Example 3: Pack and unpack a message in Python.
+
+ foo = Foo(...)
+ any = Any()
+ any.Pack(foo)
+ ...
+ if any.Is(Foo.DESCRIPTOR):
+ any.Unpack(foo)
+ ...
+
+ Example 4: Pack and unpack a message in Go
+
+ foo := &pb.Foo{...}
+ any, err := anypb.New(foo)
+ if err != nil {
+ ...
+ }
+ ...
+ foo := &pb.Foo{}
+ if err := any.UnmarshalTo(foo); err != nil {
+ ...
+ }
+
+ The pack methods provided by protobuf library will by default use
+ 'type.googleapis.com/full.type.name' as the type URL and the unpack
+ methods only use the fully qualified type name after the last '/'
+ in the type URL, for example "foo.bar.com/x/y.z" will yield type
+ name "y.z".
+
+ JSON
+ ====
+ The JSON representation of an `Any` value uses the regular
+ representation of the deserialized, embedded message, with an
+ additional field `@type` which contains the type URL. Example:
+
+ package google.profile;
+ message Person {
+ string first_name = 1;
+ string last_name = 2;
+ }
+
+ {
+ "@type": "type.googleapis.com/google.profile.Person",
+ "firstName": <string>,
+ "lastName": <string>
+ }
+
+ If the embedded message type is well-known and has a custom JSON
+ representation, that representation will be embedded adding a field
+ `value` which holds the custom JSON in addition to the `@type`
+ field. Example (for message [google.protobuf.Duration][]):
+
+ {
+ "@type": "type.googleapis.com/google.protobuf.Duration",
+ "value": "1.212s"
+ }
+ """
+
+ type_url: str = aristaproto.string_field(1)
+ """
+ A URL/resource name that uniquely identifies the type of the serialized
+ protocol buffer message. This string must contain at least
+ one "/" character. The last segment of the URL's path must represent
+ the fully qualified name of the type (as in
+ `path/google.protobuf.Duration`). The name should be in a canonical form
+ (e.g., leading "." is not accepted).
+
+ In practice, teams usually precompile into the binary all types that they
+ expect it to use in the context of Any. However, for URLs which use the
+ scheme `http`, `https`, or no scheme, one can optionally set up a type
+ server that maps type URLs to message definitions as follows:
+
+ * If no scheme is provided, `https` is assumed.
+ * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ value in binary format, or produce an error.
+ * Applications are allowed to cache lookup results based on the
+ URL, or have them precompiled into a binary to avoid any
+ lookup. Therefore, binary compatibility needs to be preserved
+ on changes to types. (Use versioned type names to manage
+ breaking changes.)
+
+ Note: this functionality is not currently available in the official
+ protobuf release, and it is not used for type URLs beginning with
+ type.googleapis.com. As of May 2023, there are no widely used type server
+ implementations and no plans to implement one.
+
+ Schemes other than `http`, `https` (or the empty scheme) might be
+ used with implementation specific semantics.
+ """
+
+ value: bytes = aristaproto.bytes_field(2)
+ """
+ Must be a valid serialized protocol buffer of the above specified type.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class SourceContext(aristaproto.Message):
+ """
+ `SourceContext` represents information about the source of a
+ protobuf element, like the file in which it is defined.
+ """
+
+ file_name: str = aristaproto.string_field(1)
+ """
+ The path-qualified name of the .proto file that contained the associated
+ protobuf element. For example: `"google/protobuf/source_context.proto"`.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Type(aristaproto.Message):
+ """A protocol buffer message type."""
+
+ name: str = aristaproto.string_field(1)
+ """The fully qualified message name."""
+
+ fields: List["Field"] = aristaproto.message_field(2)
+ """The list of fields."""
+
+ oneofs: List[str] = aristaproto.string_field(3)
+ """The list of types appearing in `oneof` definitions in this type."""
+
+ options: List["Option"] = aristaproto.message_field(4)
+ """The protocol buffer options."""
+
+ source_context: "SourceContext" = aristaproto.message_field(5)
+ """The source context."""
+
+ syntax: "Syntax" = aristaproto.enum_field(6)
+ """The source syntax."""
+
+ edition: str = aristaproto.string_field(7)
+ """
+ The source edition string, only valid when syntax is SYNTAX_EDITIONS.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Field(aristaproto.Message):
+ """A single field of a message type."""
+
+ kind: "FieldKind" = aristaproto.enum_field(1)
+ """The field type."""
+
+ cardinality: "FieldCardinality" = aristaproto.enum_field(2)
+ """The field cardinality."""
+
+ number: int = aristaproto.int32_field(3)
+ """The field number."""
+
+ name: str = aristaproto.string_field(4)
+ """The field name."""
+
+ type_url: str = aristaproto.string_field(6)
+ """
+ The field type URL, without the scheme, for message or enumeration
+ types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`.
+ """
+
+ oneof_index: int = aristaproto.int32_field(7)
+ """
+ The index of the field type in `Type.oneofs`, for message or enumeration
+ types. The first type has index 1; zero means the type is not in the list.
+ """
+
+ packed: bool = aristaproto.bool_field(8)
+ """Whether to use alternative packed wire representation."""
+
+ options: List["Option"] = aristaproto.message_field(9)
+ """The protocol buffer options."""
+
+ json_name: str = aristaproto.string_field(10)
+ """The field JSON name."""
+
+ default_value: str = aristaproto.string_field(11)
+ """
+ The string value of the default value of this field. Proto2 syntax only.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Enum(aristaproto.Message):
+ """Enum type definition."""
+
+ name: str = aristaproto.string_field(1)
+ """Enum type name."""
+
+ enumvalue: List["EnumValue"] = aristaproto.message_field(
+ 2, wraps=aristaproto.TYPE_ENUM
+ )
+ """Enum value definitions."""
+
+ options: List["Option"] = aristaproto.message_field(3)
+ """Protocol buffer options."""
+
+ source_context: "SourceContext" = aristaproto.message_field(4)
+ """The source context."""
+
+ syntax: "Syntax" = aristaproto.enum_field(5)
+ """The source syntax."""
+
+ edition: str = aristaproto.string_field(6)
+ """
+ The source edition string, only valid when syntax is SYNTAX_EDITIONS.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class EnumValue(aristaproto.Message):
+ """Enum value definition."""
+
+ name: str = aristaproto.string_field(1)
+ """Enum value name."""
+
+ number: int = aristaproto.int32_field(2)
+ """Enum value number."""
+
+ options: List["Option"] = aristaproto.message_field(3)
+ """Protocol buffer options."""
+
+
+@dataclass(eq=False, repr=False)
+class Option(aristaproto.Message):
+ """
+ A protocol buffer option, which can be attached to a message, field,
+ enumeration, etc.
+ """
+
+ name: str = aristaproto.string_field(1)
+ """
+ The option's name. For protobuf built-in options (options defined in
+ descriptor.proto), this is the short name. For example, `"map_entry"`.
+ For custom options, it should be the fully-qualified name. For example,
+ `"google.api.http"`.
+ """
+
+ value: "Any" = aristaproto.message_field(2)
+ """
+ The option's value packed in an Any message. If the value is a primitive,
+ the corresponding wrapper type defined in google/protobuf/wrappers.proto
+ should be used. If the value is an enum, it should be stored as an int32
+ value using the google.protobuf.Int32Value type.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Api(aristaproto.Message):
+ """
+ Api is a light-weight descriptor for an API Interface.
+
+ Interfaces are also described as "protocol buffer services" in some contexts,
+ such as by the "service" keyword in a .proto file, but they are different
+ from API Services, which represent a concrete implementation of an interface
+ as opposed to simply a description of methods and bindings. They are also
+ sometimes simply referred to as "APIs" in other contexts, such as the name of
+ this message itself. See https://cloud.google.com/apis/design/glossary for
+ detailed terminology.
+ """
+
+ name: str = aristaproto.string_field(1)
+ """
+ The fully qualified name of this interface, including package name
+ followed by the interface's simple name.
+ """
+
+ methods: List["Method"] = aristaproto.message_field(2)
+ """The methods of this interface, in unspecified order."""
+
+ options: List["Option"] = aristaproto.message_field(3)
+ """Any metadata attached to the interface."""
+
+ version: str = aristaproto.string_field(4)
+ """
+ A version string for this interface. If specified, must have the form
+ `major-version.minor-version`, as in `1.10`. If the minor version is
+ omitted, it defaults to zero. If the entire version field is empty, the
+ major version is derived from the package name, as outlined below. If the
+ field is not empty, the version in the package name will be verified to be
+ consistent with what is provided here.
+
+ The versioning schema uses [semantic
+ versioning](http://semver.org) where the major version number
+ indicates a breaking change and the minor version an additive,
+ non-breaking change. Both version numbers are signals to users
+ what to expect from different versions, and should be carefully
+ chosen based on the product plan.
+
+ The major version is also reflected in the package name of the
+ interface, which must end in `v<major-version>`, as in
+ `google.feature.v1`. For major versions 0 and 1, the suffix can
+ be omitted. Zero major versions must only be used for
+ experimental, non-GA interfaces.
+ """
+
+ source_context: "SourceContext" = aristaproto.message_field(5)
+ """
+ Source context for the protocol buffer service represented by this
+ message.
+ """
+
+ mixins: List["Mixin"] = aristaproto.message_field(6)
+ """Included interfaces. See [Mixin][]."""
+
+ syntax: "Syntax" = aristaproto.enum_field(7)
+ """The source syntax of the service."""
+
+
+@dataclass(eq=False, repr=False)
+class Method(aristaproto.Message):
+ """Method represents a method of an API interface."""
+
+ name: str = aristaproto.string_field(1)
+ """The simple name of this method."""
+
+ request_type_url: str = aristaproto.string_field(2)
+ """A URL of the input message type."""
+
+ request_streaming: bool = aristaproto.bool_field(3)
+ """If true, the request is streamed."""
+
+ response_type_url: str = aristaproto.string_field(4)
+ """The URL of the output message type."""
+
+ response_streaming: bool = aristaproto.bool_field(5)
+ """If true, the response is streamed."""
+
+ options: List["Option"] = aristaproto.message_field(6)
+ """Any metadata attached to the method."""
+
+ syntax: "Syntax" = aristaproto.enum_field(7)
+ """The source syntax of this method."""
+
+
+@dataclass(eq=False, repr=False)
+class Mixin(aristaproto.Message):
+ """
+ Declares an API Interface to be included in this interface. The including
+ interface must redeclare all the methods from the included interface, but
+ documentation and options are inherited as follows:
+
+ - If after comment and whitespace stripping, the documentation
+ string of the redeclared method is empty, it will be inherited
+ from the original method.
+
+ - Each annotation belonging to the service config (http,
+ visibility) which is not set in the redeclared method will be
+ inherited.
+
+ - If an http annotation is inherited, the path pattern will be
+ modified as follows. Any version prefix will be replaced by the
+ version of the including interface plus the [root][] path if
+ specified.
+
+ Example of a simple mixin:
+
+ package google.acl.v1;
+ service AccessControl {
+ // Get the underlying ACL object.
+ rpc GetAcl(GetAclRequest) returns (Acl) {
+ option (google.api.http).get = "/v1/{resource=**}:getAcl";
+ }
+ }
+
+ package google.storage.v2;
+ service Storage {
+ rpc GetAcl(GetAclRequest) returns (Acl);
+
+ // Get a data record.
+ rpc GetData(GetDataRequest) returns (Data) {
+ option (google.api.http).get = "/v2/{resource=**}";
+ }
+ }
+
+ Example of a mixin configuration:
+
+ apis:
+ - name: google.storage.v2.Storage
+ mixins:
+ - name: google.acl.v1.AccessControl
+
+ The mixin construct implies that all methods in `AccessControl` are
+ also declared with same name and request/response types in
+ `Storage`. A documentation generator or annotation processor will
+ see the effective `Storage.GetAcl` method after inherting
+ documentation and annotations as follows:
+
+ service Storage {
+ // Get the underlying ACL object.
+ rpc GetAcl(GetAclRequest) returns (Acl) {
+ option (google.api.http).get = "/v2/{resource=**}:getAcl";
+ }
+ ...
+ }
+
+ Note how the version in the path pattern changed from `v1` to `v2`.
+
+ If the `root` field in the mixin is specified, it should be a
+ relative path under which inherited HTTP paths are placed. Example:
+
+ apis:
+ - name: google.storage.v2.Storage
+ mixins:
+ - name: google.acl.v1.AccessControl
+ root: acls
+
+ This implies the following inherited HTTP annotation:
+
+ service Storage {
+ // Get the underlying ACL object.
+ rpc GetAcl(GetAclRequest) returns (Acl) {
+ option (google.api.http).get = "/v2/acls/{resource=**}:getAcl";
+ }
+ ...
+ }
+ """
+
+ name: str = aristaproto.string_field(1)
+ """The fully qualified name of the interface which is included."""
+
+ root: str = aristaproto.string_field(2)
+ """
+ If non-empty specifies a path under which inherited HTTP paths
+ are rooted.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class FileDescriptorSet(aristaproto.Message):
+ """
+ The protocol compiler can output a FileDescriptorSet containing the .proto
+ files it parses.
+ """
+
+ file: List["FileDescriptorProto"] = aristaproto.message_field(1)
+
+
+@dataclass(eq=False, repr=False)
+class FileDescriptorProto(aristaproto.Message):
+ """Describes a complete .proto file."""
+
+ name: str = aristaproto.string_field(1)
+ package: str = aristaproto.string_field(2)
+ dependency: List[str] = aristaproto.string_field(3)
+ """Names of files imported by this file."""
+
+ public_dependency: List[int] = aristaproto.int32_field(10)
+ """Indexes of the public imported files in the dependency list above."""
+
+ weak_dependency: List[int] = aristaproto.int32_field(11)
+ """
+ Indexes of the weak imported files in the dependency list.
+ For Google-internal migration only. Do not use.
+ """
+
+ message_type: List["DescriptorProto"] = aristaproto.message_field(4)
+ """All top-level definitions in this file."""
+
+ enum_type: List["EnumDescriptorProto"] = aristaproto.message_field(5)
+ service: List["ServiceDescriptorProto"] = aristaproto.message_field(6)
+ extension: List["FieldDescriptorProto"] = aristaproto.message_field(7)
+ options: "FileOptions" = aristaproto.message_field(8)
+ source_code_info: "SourceCodeInfo" = aristaproto.message_field(9)
+ """
+ This field contains optional information about the original source code.
+ You may safely remove this entire field without harming runtime
+ functionality of the descriptors -- the information is needed only by
+ development tools.
+ """
+
+ syntax: str = aristaproto.string_field(12)
+ """
+ The syntax of the proto file.
+ The supported values are "proto2", "proto3", and "editions".
+
+ If `edition` is present, this value must be "editions".
+ """
+
+ edition: "Edition" = aristaproto.enum_field(14)
+ """The edition of the proto file."""
+
+
+@dataclass(eq=False, repr=False)
+class DescriptorProto(aristaproto.Message):
+ """Describes a message type."""
+
+ name: str = aristaproto.string_field(1)
+ field: List["FieldDescriptorProto"] = aristaproto.message_field(2)
+ extension: List["FieldDescriptorProto"] = aristaproto.message_field(6)
+ nested_type: List["DescriptorProto"] = aristaproto.message_field(3)
+ enum_type: List["EnumDescriptorProto"] = aristaproto.message_field(4)
+ extension_range: List["DescriptorProtoExtensionRange"] = aristaproto.message_field(
+ 5
+ )
+ oneof_decl: List["OneofDescriptorProto"] = aristaproto.message_field(8)
+ options: "MessageOptions" = aristaproto.message_field(7)
+ reserved_range: List["DescriptorProtoReservedRange"] = aristaproto.message_field(9)
+ reserved_name: List[str] = aristaproto.string_field(10)
+ """
+ Reserved field names, which may not be used by fields in the same message.
+ A given name may only be reserved once.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class DescriptorProtoExtensionRange(aristaproto.Message):
+ start: int = aristaproto.int32_field(1)
+ end: int = aristaproto.int32_field(2)
+ options: "ExtensionRangeOptions" = aristaproto.message_field(3)
+
+
+@dataclass(eq=False, repr=False)
+class DescriptorProtoReservedRange(aristaproto.Message):
+ """
+ Range of reserved tag numbers. Reserved tag numbers may not be used by
+ fields or extension ranges in the same message. Reserved ranges may
+ not overlap.
+ """
+
+ start: int = aristaproto.int32_field(1)
+ end: int = aristaproto.int32_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class ExtensionRangeOptions(aristaproto.Message):
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+ declaration: List["ExtensionRangeOptionsDeclaration"] = aristaproto.message_field(2)
+ """
+ For external users: DO NOT USE. We are in the process of open sourcing
+ extension declaration and executing internal cleanups before it can be
+ used externally.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(50)
+ """Any features defined in the specific edition."""
+
+ verification: "ExtensionRangeOptionsVerificationState" = aristaproto.enum_field(3)
+ """
+ The verification state of the range.
+ TODO: flip the default to DECLARATION once all empty ranges
+ are marked as UNVERIFIED.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class ExtensionRangeOptionsDeclaration(aristaproto.Message):
+ number: int = aristaproto.int32_field(1)
+ """The extension number declared within the extension range."""
+
+ full_name: str = aristaproto.string_field(2)
+ """
+ The fully-qualified name of the extension field. There must be a leading
+ dot in front of the full name.
+ """
+
+ type: str = aristaproto.string_field(3)
+ """
+ The fully-qualified type name of the extension field. Unlike
+ Metadata.type, Declaration.type must have a leading dot for messages
+ and enums.
+ """
+
+ reserved: bool = aristaproto.bool_field(5)
+ """
+ If true, indicates that the number is reserved in the extension range,
+ and any extension field with the number will fail to compile. Set this
+ when a declared extension field is deleted.
+ """
+
+ repeated: bool = aristaproto.bool_field(6)
+ """
+ If true, indicates that the extension must be defined as repeated.
+ Otherwise the extension must be defined as optional.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class FieldDescriptorProto(aristaproto.Message):
+ """Describes a field within a message."""
+
+ name: str = aristaproto.string_field(1)
+ number: int = aristaproto.int32_field(3)
+ label: "FieldDescriptorProtoLabel" = aristaproto.enum_field(4)
+ type: "FieldDescriptorProtoType" = aristaproto.enum_field(5)
+ """
+ If type_name is set, this need not be set. If both this and type_name
+ are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
+ """
+
+ type_name: str = aristaproto.string_field(6)
+ """
+ For message and enum types, this is the name of the type. If the name
+ starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
+ rules are used to find the type (i.e. first the nested types within this
+ message are searched, then within the parent, on up to the root
+ namespace).
+ """
+
+ extendee: str = aristaproto.string_field(2)
+ """
+ For extensions, this is the name of the type being extended. It is
+ resolved in the same manner as type_name.
+ """
+
+ default_value: str = aristaproto.string_field(7)
+ """
+ For numeric types, contains the original text representation of the value.
+ For booleans, "true" or "false".
+ For strings, contains the default text contents (not escaped in any way).
+ For bytes, contains the C escaped value. All bytes >= 128 are escaped.
+ """
+
+ oneof_index: int = aristaproto.int32_field(9)
+ """
+ If set, gives the index of a oneof in the containing type's oneof_decl
+ list. This field is a member of that oneof.
+ """
+
+ json_name: str = aristaproto.string_field(10)
+ """
+ JSON name of this field. The value is set by protocol compiler. If the
+ user has set a "json_name" option on this field, that option's value
+ will be used. Otherwise, it's deduced from the field's name by converting
+ it to camelCase.
+ """
+
+ options: "FieldOptions" = aristaproto.message_field(8)
+ proto3_optional: bool = aristaproto.bool_field(17)
+ """
+ If true, this is a proto3 "optional". When a proto3 field is optional, it
+ tracks presence regardless of field type.
+
+ When proto3_optional is true, this field must belong to a oneof to signal
+ to old proto3 clients that presence is tracked for this field. This oneof
+ is known as a "synthetic" oneof, and this field must be its sole member
+ (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs
+ exist in the descriptor only, and do not generate any API. Synthetic oneofs
+ must be ordered after all "real" oneofs.
+
+ For message fields, proto3_optional doesn't create any semantic change,
+ since non-repeated message fields always track presence. However it still
+ indicates the semantic detail of whether the user wrote "optional" or not.
+ This can be useful for round-tripping the .proto file. For consistency we
+ give message fields a synthetic oneof also, even though it is not required
+ to track presence. This is especially important because the parser can't
+ tell if a field is a message or an enum, so it must always create a
+ synthetic oneof.
+
+ Proto2 optional fields do not set this flag, because they already indicate
+ optional with `LABEL_OPTIONAL`.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class OneofDescriptorProto(aristaproto.Message):
+ """Describes a oneof."""
+
+ name: str = aristaproto.string_field(1)
+ options: "OneofOptions" = aristaproto.message_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class EnumDescriptorProto(aristaproto.Message):
+ """Describes an enum type."""
+
+ name: str = aristaproto.string_field(1)
+ value: List["EnumValueDescriptorProto"] = aristaproto.message_field(2)
+ options: "EnumOptions" = aristaproto.message_field(3)
+ reserved_range: List[
+ "EnumDescriptorProtoEnumReservedRange"
+ ] = aristaproto.message_field(4)
+ """
+ Range of reserved numeric values. Reserved numeric values may not be used
+ by enum values in the same enum declaration. Reserved ranges may not
+ overlap.
+ """
+
+ reserved_name: List[str] = aristaproto.string_field(5)
+ """
+ Reserved enum value names, which may not be reused. A given name may only
+ be reserved once.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class EnumDescriptorProtoEnumReservedRange(aristaproto.Message):
+ """
+ Range of reserved numeric values. Reserved values may not be used by
+ entries in the same enum. Reserved ranges may not overlap.
+
+ Note that this is distinct from DescriptorProto.ReservedRange in that it
+ is inclusive such that it can appropriately represent the entire int32
+ domain.
+ """
+
+ start: int = aristaproto.int32_field(1)
+ end: int = aristaproto.int32_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class EnumValueDescriptorProto(aristaproto.Message):
+ """Describes a value within an enum."""
+
+ name: str = aristaproto.string_field(1)
+ number: int = aristaproto.int32_field(2)
+ options: "EnumValueOptions" = aristaproto.message_field(3)
+
+
+@dataclass(eq=False, repr=False)
+class ServiceDescriptorProto(aristaproto.Message):
+ """Describes a service."""
+
+ name: str = aristaproto.string_field(1)
+ method: List["MethodDescriptorProto"] = aristaproto.message_field(2)
+ options: "ServiceOptions" = aristaproto.message_field(3)
+
+
+@dataclass(eq=False, repr=False)
+class MethodDescriptorProto(aristaproto.Message):
+ """Describes a method of a service."""
+
+ name: str = aristaproto.string_field(1)
+ input_type: str = aristaproto.string_field(2)
+ """
+ Input and output type names. These are resolved in the same way as
+ FieldDescriptorProto.type_name, but must refer to a message type.
+ """
+
+ output_type: str = aristaproto.string_field(3)
+ options: "MethodOptions" = aristaproto.message_field(4)
+ client_streaming: bool = aristaproto.bool_field(5)
+ """Identifies if client streams multiple client messages"""
+
+ server_streaming: bool = aristaproto.bool_field(6)
+ """Identifies if server streams multiple server messages"""
+
+
+@dataclass(eq=False, repr=False)
+class FileOptions(aristaproto.Message):
+ java_package: str = aristaproto.string_field(1)
+ """
+ Sets the Java package where classes generated from this .proto will be
+ placed. By default, the proto package is used, but this is often
+ inappropriate because proto packages do not normally start with backwards
+ domain names.
+ """
+
+ java_outer_classname: str = aristaproto.string_field(8)
+ """
+ Controls the name of the wrapper Java class generated for the .proto file.
+ That class will always contain the .proto file's getDescriptor() method as
+ well as any top-level extensions defined in the .proto file.
+ If java_multiple_files is disabled, then all the other classes from the
+ .proto file will be nested inside the single wrapper outer class.
+ """
+
+ java_multiple_files: bool = aristaproto.bool_field(10)
+ """
+ If enabled, then the Java code generator will generate a separate .java
+ file for each top-level message, enum, and service defined in the .proto
+ file. Thus, these types will *not* be nested inside the wrapper class
+ named by java_outer_classname. However, the wrapper class will still be
+ generated to contain the file's getDescriptor() method as well as any
+ top-level extensions defined in the file.
+ """
+
+ java_generate_equals_and_hash: bool = aristaproto.bool_field(20)
+ """This option does nothing."""
+
+ java_string_check_utf8: bool = aristaproto.bool_field(27)
+ """
+ A proto2 file can set this to true to opt in to UTF-8 checking for Java,
+ which will throw an exception if invalid UTF-8 is parsed from the wire or
+ assigned to a string field.
+
+ TODO: clarify exactly what kinds of field types this option
+ applies to, and update these docs accordingly.
+
+ Proto3 files already perform these checks. Setting the option explicitly to
+ false has no effect: it cannot be used to opt proto3 files out of UTF-8
+ checks.
+ """
+
+ optimize_for: "FileOptionsOptimizeMode" = aristaproto.enum_field(9)
+ go_package: str = aristaproto.string_field(11)
+ """
+ Sets the Go package where structs generated from this .proto will be
+ placed. If omitted, the Go package will be derived from the following:
+ - The basename of the package import path, if provided.
+ - Otherwise, the package statement in the .proto file, if present.
+ - Otherwise, the basename of the .proto file, without extension.
+ """
+
+ cc_generic_services: bool = aristaproto.bool_field(16)
+ """
+ Should generic services be generated in each language? "Generic" services
+ are not specific to any particular RPC system. They are generated by the
+ main code generators in each language (without additional plugins).
+ Generic services were the only kind of service generation supported by
+ early versions of google.protobuf.
+
+ Generic services are now considered deprecated in favor of using plugins
+ that generate code specific to your particular RPC system. Therefore,
+ these default to false. Old code which depends on generic services should
+ explicitly set them to true.
+ """
+
+ java_generic_services: bool = aristaproto.bool_field(17)
+ py_generic_services: bool = aristaproto.bool_field(18)
+ deprecated: bool = aristaproto.bool_field(23)
+ """
+ Is this file deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for everything in the file, or it will be completely ignored; in the very
+ least, this is a formalization for deprecating files.
+ """
+
+ cc_enable_arenas: bool = aristaproto.bool_field(31)
+ """
+ Enables the use of arenas for the proto messages in this file. This applies
+ only to generated classes for C++.
+ """
+
+ objc_class_prefix: str = aristaproto.string_field(36)
+ """
+ Sets the objective c class prefix which is prepended to all objective c
+ generated classes from this .proto. There is no default.
+ """
+
+ csharp_namespace: str = aristaproto.string_field(37)
+ """Namespace for generated classes; defaults to the package."""
+
+ swift_prefix: str = aristaproto.string_field(39)
+ """
+ By default Swift generators will take the proto package and CamelCase it
+ replacing '.' with underscore and use that to prefix the types/symbols
+ defined. When this options is provided, they will use this value instead
+ to prefix the types/symbols defined.
+ """
+
+ php_class_prefix: str = aristaproto.string_field(40)
+ """
+ Sets the php class prefix which is prepended to all php generated classes
+ from this .proto. Default is empty.
+ """
+
+ php_namespace: str = aristaproto.string_field(41)
+ """
+ Use this option to change the namespace of php generated classes. Default
+ is empty. When this option is empty, the package name will be used for
+ determining the namespace.
+ """
+
+ php_metadata_namespace: str = aristaproto.string_field(44)
+ """
+ Use this option to change the namespace of php generated metadata classes.
+ Default is empty. When this option is empty, the proto file name will be
+ used for determining the namespace.
+ """
+
+ ruby_package: str = aristaproto.string_field(45)
+ """
+ Use this option to change the package of ruby generated classes. Default
+ is empty. When this option is not set, the package name will be used for
+ determining the ruby package.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(50)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """
+ The parser stores options it doesn't recognize here.
+ See the documentation for the "Options" section above.
+ """
+
+ def __post_init__(self) -> None:
+ super().__post_init__()
+ if self.is_set("java_generate_equals_and_hash"):
+ warnings.warn(
+ "FileOptions.java_generate_equals_and_hash is deprecated",
+ DeprecationWarning,
+ )
+
+
+@dataclass(eq=False, repr=False)
+class MessageOptions(aristaproto.Message):
+ message_set_wire_format: bool = aristaproto.bool_field(1)
+ """
+ Set true to use the old proto1 MessageSet wire format for extensions.
+ This is provided for backwards-compatibility with the MessageSet wire
+ format. You should not use this for any other reason: It's less
+ efficient, has fewer features, and is more complicated.
+
+ The message must be defined exactly as follows:
+ message Foo {
+ option message_set_wire_format = true;
+ extensions 4 to max;
+ }
+ Note that the message cannot have any defined fields; MessageSets only
+ have extensions.
+
+ All extensions of your type must be singular messages; e.g. they cannot
+ be int32s, enums, or repeated messages.
+
+ Because this is an option, the above two restrictions are not enforced by
+ the protocol compiler.
+ """
+
+ no_standard_descriptor_accessor: bool = aristaproto.bool_field(2)
+ """
+ Disables the generation of the standard "descriptor()" accessor, which can
+ conflict with a field of the same name. This is meant to make migration
+ from proto1 easier; new code should avoid fields named "descriptor".
+ """
+
+ deprecated: bool = aristaproto.bool_field(3)
+ """
+ Is this message deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the message, or it will be completely ignored; in the very least,
+ this is a formalization for deprecating messages.
+ """
+
+ map_entry: bool = aristaproto.bool_field(7)
+ """
+ Whether the message is an automatically generated map entry type for the
+ maps field.
+
+ For maps fields:
+ map<KeyType, ValueType> map_field = 1;
+ The parsed descriptor looks like:
+ message MapFieldEntry {
+ option map_entry = true;
+ optional KeyType key = 1;
+ optional ValueType value = 2;
+ }
+ repeated MapFieldEntry map_field = 1;
+
+ Implementations may choose not to generate the map_entry=true message, but
+ use a native map in the target language to hold the keys and values.
+ The reflection APIs in such implementations still need to work as
+ if the field is a repeated message field.
+
+ NOTE: Do not set the option in .proto files. Always use the maps syntax
+ instead. The option should only be implicitly set by the proto compiler
+ parser.
+ """
+
+ deprecated_legacy_json_field_conflicts: bool = aristaproto.bool_field(11)
+ """
+ Enable the legacy handling of JSON field name conflicts. This lowercases
+ and strips underscored from the fields before comparison in proto3 only.
+ The new behavior takes `json_name` into account and applies to proto2 as
+ well.
+
+ This should only be used as a temporary measure against broken builds due
+ to the change in behavior for JSON field name conflicts.
+
+ TODO This is legacy behavior we plan to remove once downstream
+ teams have had time to migrate.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(12)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+ def __post_init__(self) -> None:
+ super().__post_init__()
+ if self.is_set("deprecated_legacy_json_field_conflicts"):
+ warnings.warn(
+ "MessageOptions.deprecated_legacy_json_field_conflicts is deprecated",
+ DeprecationWarning,
+ )
+
+
+@dataclass(eq=False, repr=False)
+class FieldOptions(aristaproto.Message):
+ ctype: "FieldOptionsCType" = aristaproto.enum_field(1)
+ """
+ The ctype option instructs the C++ code generator to use a different
+ representation of the field than it normally would. See the specific
+ options below. This option is only implemented to support use of
+ [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of
+ type "bytes" in the open source release -- sorry, we'll try to include
+ other types in a future version!
+ """
+
+ packed: bool = aristaproto.bool_field(2)
+ """
+ The packed option can be enabled for repeated primitive fields to enable
+ a more efficient representation on the wire. Rather than repeatedly
+ writing the tag and type for each element, the entire array is encoded as
+ a single length-delimited blob. In proto3, only explicit setting it to
+ false will avoid using packed encoding. This option is prohibited in
+ Editions, but the `repeated_field_encoding` feature can be used to control
+ the behavior.
+ """
+
+ jstype: "FieldOptionsJsType" = aristaproto.enum_field(6)
+ """
+ The jstype option determines the JavaScript type used for values of the
+ field. The option is permitted only for 64 bit integral and fixed types
+ (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
+ is represented as JavaScript string, which avoids loss of precision that
+ can happen when a large value is converted to a floating point JavaScript.
+ Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
+ use the JavaScript "number" type. The behavior of the default option
+ JS_NORMAL is implementation dependent.
+
+ This option is an enum to permit additional types to be added, e.g.
+ goog.math.Integer.
+ """
+
+ lazy: bool = aristaproto.bool_field(5)
+ """
+ Should this field be parsed lazily? Lazy applies only to message-type
+ fields. It means that when the outer message is initially parsed, the
+ inner message's contents will not be parsed but instead stored in encoded
+ form. The inner message will actually be parsed when it is first accessed.
+
+ This is only a hint. Implementations are free to choose whether to use
+ eager or lazy parsing regardless of the value of this option. However,
+ setting this option true suggests that the protocol author believes that
+ using lazy parsing on this field is worth the additional bookkeeping
+ overhead typically needed to implement it.
+
+ This option does not affect the public interface of any generated code;
+ all method signatures remain the same. Furthermore, thread-safety of the
+ interface is not affected by this option; const methods remain safe to
+ call from multiple threads concurrently, while non-const methods continue
+ to require exclusive access.
+
+ Note that lazy message fields are still eagerly verified to check
+ ill-formed wireformat or missing required fields. Calling IsInitialized()
+ on the outer message would fail if the inner message has missing required
+ fields. Failed verification would result in parsing failure (except when
+ uninitialized messages are acceptable).
+ """
+
+ unverified_lazy: bool = aristaproto.bool_field(15)
+ """
+ unverified_lazy does no correctness checks on the byte stream. This should
+ only be used where lazy with verification is prohibitive for performance
+ reasons.
+ """
+
+ deprecated: bool = aristaproto.bool_field(3)
+ """
+ Is this field deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for accessors, or it will be completely ignored; in the very least, this
+ is a formalization for deprecating fields.
+ """
+
+ weak: bool = aristaproto.bool_field(10)
+ """For Google-internal migration only. Do not use."""
+
+ debug_redact: bool = aristaproto.bool_field(16)
+ """
+ Indicate that the field value should not be printed out when using debug
+ formats, e.g. when the field contains sensitive credentials.
+ """
+
+ retention: "FieldOptionsOptionRetention" = aristaproto.enum_field(17)
+ targets: List["FieldOptionsOptionTargetType"] = aristaproto.enum_field(19)
+ edition_defaults: List["FieldOptionsEditionDefault"] = aristaproto.message_field(20)
+ features: "FeatureSet" = aristaproto.message_field(21)
+ """Any features defined in the specific edition."""
+
+ feature_support: "FieldOptionsFeatureSupport" = aristaproto.message_field(22)
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class FieldOptionsEditionDefault(aristaproto.Message):
+ edition: "Edition" = aristaproto.enum_field(3)
+ value: str = aristaproto.string_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class FieldOptionsFeatureSupport(aristaproto.Message):
+ """Information about the support window of a feature."""
+
+ edition_introduced: "Edition" = aristaproto.enum_field(1)
+ """
+ The edition that this feature was first available in. In editions
+ earlier than this one, the default assigned to EDITION_LEGACY will be
+ used, and proto files will not be able to override it.
+ """
+
+ edition_deprecated: "Edition" = aristaproto.enum_field(2)
+ """
+ The edition this feature becomes deprecated in. Using this after this
+ edition may trigger warnings.
+ """
+
+ deprecation_warning: str = aristaproto.string_field(3)
+ """
+ The deprecation warning text if this feature is used after the edition it
+ was marked deprecated in.
+ """
+
+ edition_removed: "Edition" = aristaproto.enum_field(4)
+ """
+ The edition this feature is no longer available in. In editions after
+ this one, the last default assigned will be used, and proto files will
+ not be able to override it.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class OneofOptions(aristaproto.Message):
+ features: "FeatureSet" = aristaproto.message_field(1)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class EnumOptions(aristaproto.Message):
+ allow_alias: bool = aristaproto.bool_field(2)
+ """
+ Set this option to true to allow mapping different tag names to the same
+ value.
+ """
+
+ deprecated: bool = aristaproto.bool_field(3)
+ """
+ Is this enum deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the enum, or it will be completely ignored; in the very least, this
+ is a formalization for deprecating enums.
+ """
+
+ deprecated_legacy_json_field_conflicts: bool = aristaproto.bool_field(6)
+ """
+ Enable the legacy handling of JSON field name conflicts. This lowercases
+ and strips underscored from the fields before comparison in proto3 only.
+ The new behavior takes `json_name` into account and applies to proto2 as
+ well.
+ TODO Remove this legacy behavior once downstream teams have
+ had time to migrate.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(7)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+ def __post_init__(self) -> None:
+ super().__post_init__()
+ if self.is_set("deprecated_legacy_json_field_conflicts"):
+ warnings.warn(
+ "EnumOptions.deprecated_legacy_json_field_conflicts is deprecated",
+ DeprecationWarning,
+ )
+
+
+@dataclass(eq=False, repr=False)
+class EnumValueOptions(aristaproto.Message):
+ deprecated: bool = aristaproto.bool_field(1)
+ """
+ Is this enum value deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the enum value, or it will be completely ignored; in the very least,
+ this is a formalization for deprecating enum values.
+ """
+
+ features: "FeatureSet" = aristaproto.message_field(2)
+ """Any features defined in the specific edition."""
+
+ debug_redact: bool = aristaproto.bool_field(3)
+ """
+ Indicate that fields annotated with this enum value should not be printed
+ out when using debug formats, e.g. when the field contains sensitive
+ credentials.
+ """
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class ServiceOptions(aristaproto.Message):
+ features: "FeatureSet" = aristaproto.message_field(34)
+ """Any features defined in the specific edition."""
+
+ deprecated: bool = aristaproto.bool_field(33)
+ """
+ Is this service deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the service, or it will be completely ignored; in the very least,
+ this is a formalization for deprecating services.
+ """
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class MethodOptions(aristaproto.Message):
+ deprecated: bool = aristaproto.bool_field(33)
+ """
+ Is this method deprecated?
+ Depending on the target platform, this can emit Deprecated annotations
+ for the method, or it will be completely ignored; in the very least,
+ this is a formalization for deprecating methods.
+ """
+
+ idempotency_level: "MethodOptionsIdempotencyLevel" = aristaproto.enum_field(34)
+ features: "FeatureSet" = aristaproto.message_field(35)
+ """Any features defined in the specific edition."""
+
+ uninterpreted_option: List["UninterpretedOption"] = aristaproto.message_field(999)
+ """The parser stores options it doesn't recognize here. See above."""
+
+
+@dataclass(eq=False, repr=False)
+class UninterpretedOption(aristaproto.Message):
+ """
+ A message representing a option the parser does not recognize. This only
+ appears in options protos created by the compiler::Parser class.
+ DescriptorPool resolves these when building Descriptor objects. Therefore,
+ options protos in descriptor objects (e.g. returned by Descriptor::options(),
+ or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
+ in them.
+ """
+
+ name: List["UninterpretedOptionNamePart"] = aristaproto.message_field(2)
+ identifier_value: str = aristaproto.string_field(3)
+ """
+ The value of the uninterpreted option, in whatever type the tokenizer
+ identified it as during parsing. Exactly one of these should be set.
+ """
+
+ positive_int_value: int = aristaproto.uint64_field(4)
+ negative_int_value: int = aristaproto.int64_field(5)
+ double_value: float = aristaproto.double_field(6)
+ string_value: bytes = aristaproto.bytes_field(7)
+ aggregate_value: str = aristaproto.string_field(8)
+
+
+@dataclass(eq=False, repr=False)
+class UninterpretedOptionNamePart(aristaproto.Message):
+ """
+ The name of the uninterpreted option. Each string represents a segment in
+ a dot-separated name. is_extension is true iff a segment represents an
+ extension (denoted with parentheses in options specs in .proto files).
+ E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents
+ "foo.(bar.baz).moo".
+ """
+
+ name_part: str = aristaproto.string_field(1)
+ is_extension: bool = aristaproto.bool_field(2)
+
+
+@dataclass(eq=False, repr=False)
+class FeatureSet(aristaproto.Message):
+ """
+ TODO Enums in C++ gencode (and potentially other languages) are
+ not well scoped. This means that each of the feature enums below can clash
+ with each other. The short names we've chosen maximize call-site
+ readability, but leave us very open to this scenario. A future feature will
+ be designed and implemented to handle this, hopefully before we ever hit a
+ conflict here.
+ """
+
+ field_presence: "FeatureSetFieldPresence" = aristaproto.enum_field(1)
+ enum_type: "FeatureSetEnumType" = aristaproto.enum_field(2)
+ repeated_field_encoding: "FeatureSetRepeatedFieldEncoding" = aristaproto.enum_field(
+ 3
+ )
+ utf8_validation: "FeatureSetUtf8Validation" = aristaproto.enum_field(4)
+ message_encoding: "FeatureSetMessageEncoding" = aristaproto.enum_field(5)
+ json_format: "FeatureSetJsonFormat" = aristaproto.enum_field(6)
+
+
+@dataclass(eq=False, repr=False)
+class FeatureSetDefaults(aristaproto.Message):
+ """
+ A compiled specification for the defaults of a set of features. These
+ messages are generated from FeatureSet extensions and can be used to seed
+ feature resolution. The resolution with this object becomes a simple search
+ for the closest matching edition, followed by proto merges.
+ """
+
+ defaults: List[
+ "FeatureSetDefaultsFeatureSetEditionDefault"
+ ] = aristaproto.message_field(1)
+ minimum_edition: "Edition" = aristaproto.enum_field(4)
+ """
+ The minimum supported edition (inclusive) when this was constructed.
+ Editions before this will not have defaults.
+ """
+
+ maximum_edition: "Edition" = aristaproto.enum_field(5)
+ """
+ The maximum known edition (inclusive) when this was constructed. Editions
+ after this will not have reliable defaults.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class FeatureSetDefaultsFeatureSetEditionDefault(aristaproto.Message):
+ """
+ A map from every known edition with a unique set of defaults to its
+ defaults. Not all editions may be contained here. For a given edition,
+ the defaults at the closest matching edition ordered at or before it should
+ be used. This field must be in strict ascending order by edition.
+ """
+
+ edition: "Edition" = aristaproto.enum_field(3)
+ overridable_features: "FeatureSet" = aristaproto.message_field(4)
+ """Defaults of features that can be overridden in this edition."""
+
+ fixed_features: "FeatureSet" = aristaproto.message_field(5)
+ """Defaults of features that can't be overridden in this edition."""
+
+ features: "FeatureSet" = aristaproto.message_field(2)
+ """
+ TODO Deprecate and remove this field, which is just the
+ above two merged.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class SourceCodeInfo(aristaproto.Message):
+ """
+ Encapsulates information about the original source file from which a
+ FileDescriptorProto was generated.
+ """
+
+ location: List["SourceCodeInfoLocation"] = aristaproto.message_field(1)
+ """
+ A Location identifies a piece of source code in a .proto file which
+ corresponds to a particular definition. This information is intended
+ to be useful to IDEs, code indexers, documentation generators, and similar
+ tools.
+
+ For example, say we have a file like:
+ message Foo {
+ optional string foo = 1;
+ }
+ Let's look at just the field definition:
+ optional string foo = 1;
+ ^ ^^ ^^ ^ ^^^
+ a bc de f ghi
+ We have the following locations:
+ span path represents
+ [a,i) [ 4, 0, 2, 0 ] The whole field definition.
+ [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
+ [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
+ [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
+ [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
+
+ Notes:
+ - A location may refer to a repeated field itself (i.e. not to any
+ particular index within it). This is used whenever a set of elements are
+ logically enclosed in a single code segment. For example, an entire
+ extend block (possibly containing multiple extension definitions) will
+ have an outer location whose path refers to the "extensions" repeated
+ field without an index.
+ - Multiple locations may have the same path. This happens when a single
+ logical declaration is spread out across multiple places. The most
+ obvious example is the "extend" block again -- there may be multiple
+ extend blocks in the same scope, each of which will have the same path.
+ - A location's span is not always a subset of its parent's span. For
+ example, the "extendee" of an extension declaration appears at the
+ beginning of the "extend" block and is shared by all extensions within
+ the block.
+ - Just because a location's span is a subset of some other location's span
+ does not mean that it is a descendant. For example, a "group" defines
+ both a type and a field in a single declaration. Thus, the locations
+ corresponding to the type and field and their components will overlap.
+ - Code which tries to interpret locations should probably be designed to
+ ignore those that it doesn't understand, as more types of locations could
+ be recorded in the future.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class SourceCodeInfoLocation(aristaproto.Message):
+ path: List[int] = aristaproto.int32_field(1)
+ """
+ Identifies which part of the FileDescriptorProto was defined at this
+ location.
+
+ Each element is a field number or an index. They form a path from
+ the root FileDescriptorProto to the place where the definition appears.
+ For example, this path:
+ [ 4, 3, 2, 7, 1 ]
+ refers to:
+ file.message_type(3) // 4, 3
+ .field(7) // 2, 7
+ .name() // 1
+ This is because FileDescriptorProto.message_type has field number 4:
+ repeated DescriptorProto message_type = 4;
+ and DescriptorProto.field has field number 2:
+ repeated FieldDescriptorProto field = 2;
+ and FieldDescriptorProto.name has field number 1:
+ optional string name = 1;
+
+ Thus, the above path gives the location of a field name. If we removed
+ the last element:
+ [ 4, 3, 2, 7 ]
+ this path refers to the whole field declaration (from the beginning
+ of the label to the terminating semicolon).
+ """
+
+ span: List[int] = aristaproto.int32_field(2)
+ """
+ Always has exactly three or four elements: start line, start column,
+ end line (optional, otherwise assumed same as start line), end column.
+ These are packed into a single field for efficiency. Note that line
+ and column numbers are zero-based -- typically you will want to add
+ 1 to each before displaying to a user.
+ """
+
+ leading_comments: str = aristaproto.string_field(3)
+ """
+ If this SourceCodeInfo represents a complete declaration, these are any
+ comments appearing before and after the declaration which appear to be
+ attached to the declaration.
+
+ A series of line comments appearing on consecutive lines, with no other
+ tokens appearing on those lines, will be treated as a single comment.
+
+ leading_detached_comments will keep paragraphs of comments that appear
+ before (but not connected to) the current element. Each paragraph,
+ separated by empty lines, will be one comment element in the repeated
+ field.
+
+ Only the comment content is provided; comment markers (e.g. //) are
+ stripped out. For block comments, leading whitespace and an asterisk
+ will be stripped from the beginning of each line other than the first.
+ Newlines are included in the output.
+
+ Examples:
+
+ optional int32 foo = 1; // Comment attached to foo.
+ // Comment attached to bar.
+ optional int32 bar = 2;
+
+ optional string baz = 3;
+ // Comment attached to baz.
+ // Another line attached to baz.
+
+ // Comment attached to moo.
+ //
+ // Another line attached to moo.
+ optional double moo = 4;
+
+ // Detached comment for corge. This is not leading or trailing comments
+ // to moo or corge because there are blank lines separating it from
+ // both.
+
+ // Detached comment for corge paragraph 2.
+
+ optional string corge = 5;
+ /* Block comment attached
+ * to corge. Leading asterisks
+ * will be removed. */
+ /* Block comment attached to
+ * grault. */
+ optional int32 grault = 6;
+
+ // ignored detached comments.
+ """
+
+ trailing_comments: str = aristaproto.string_field(4)
+ leading_detached_comments: List[str] = aristaproto.string_field(6)
+
+
+@dataclass(eq=False, repr=False)
+class GeneratedCodeInfo(aristaproto.Message):
+ """
+ Describes the relationship between generated code and its original source
+ file. A GeneratedCodeInfo message is associated with only one generated
+ source file, but may contain references to different source .proto files.
+ """
+
+ annotation: List["GeneratedCodeInfoAnnotation"] = aristaproto.message_field(1)
+ """
+ An Annotation connects some span of text in generated code to an element
+ of its generating .proto file.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class GeneratedCodeInfoAnnotation(aristaproto.Message):
+ path: List[int] = aristaproto.int32_field(1)
+ """
+ Identifies the element in the original source .proto file. This field
+ is formatted the same as SourceCodeInfo.Location.path.
+ """
+
+ source_file: str = aristaproto.string_field(2)
+ """Identifies the filesystem path to the original source .proto."""
+
+ begin: int = aristaproto.int32_field(3)
+ """
+ Identifies the starting offset in bytes in the generated code
+ that relates to the identified object.
+ """
+
+ end: int = aristaproto.int32_field(4)
+ """
+ Identifies the ending offset in bytes in the generated code that
+ relates to the identified object. The end offset should be one past
+ the last relevant byte (so the length of the text = end - begin).
+ """
+
+ semantic: "GeneratedCodeInfoAnnotationSemantic" = aristaproto.enum_field(5)
+
+
+@dataclass(eq=False, repr=False)
+class Duration(aristaproto.Message):
+ """
+ A Duration represents a signed, fixed-length span of time represented
+ as a count of seconds and fractions of seconds at nanosecond
+ resolution. It is independent of any calendar and concepts like "day"
+ or "month". It is related to Timestamp in that the difference between
+ two Timestamp values is a Duration and it can be added or subtracted
+ from a Timestamp. Range is approximately +-10,000 years.
+
+ # Examples
+
+ Example 1: Compute Duration from two Timestamps in pseudo code.
+
+ Timestamp start = ...;
+ Timestamp end = ...;
+ Duration duration = ...;
+
+ duration.seconds = end.seconds - start.seconds;
+ duration.nanos = end.nanos - start.nanos;
+
+ if (duration.seconds < 0 && duration.nanos > 0) {
+ duration.seconds += 1;
+ duration.nanos -= 1000000000;
+ } else if (duration.seconds > 0 && duration.nanos < 0) {
+ duration.seconds -= 1;
+ duration.nanos += 1000000000;
+ }
+
+ Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+
+ Timestamp start = ...;
+ Duration duration = ...;
+ Timestamp end = ...;
+
+ end.seconds = start.seconds + duration.seconds;
+ end.nanos = start.nanos + duration.nanos;
+
+ if (end.nanos < 0) {
+ end.seconds -= 1;
+ end.nanos += 1000000000;
+ } else if (end.nanos >= 1000000000) {
+ end.seconds += 1;
+ end.nanos -= 1000000000;
+ }
+
+ Example 3: Compute Duration from datetime.timedelta in Python.
+
+ td = datetime.timedelta(days=3, minutes=10)
+ duration = Duration()
+ duration.FromTimedelta(td)
+
+ # JSON Mapping
+
+ In JSON format, the Duration type is encoded as a string rather than an
+ object, where the string ends in the suffix "s" (indicating seconds) and
+ is preceded by the number of seconds, with nanoseconds expressed as
+ fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+ encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+ be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+ microsecond should be expressed in JSON format as "3.000001s".
+ """
+
+ seconds: int = aristaproto.int64_field(1)
+ """
+ Signed seconds of the span of time. Must be from -315,576,000,000
+ to +315,576,000,000 inclusive. Note: these bounds are computed from:
+ 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
+ """
+
+ nanos: int = aristaproto.int32_field(2)
+ """
+ Signed fractions of a second at nanosecond resolution of the span
+ of time. Durations less than one second are represented with a 0
+ `seconds` field and a positive or negative `nanos` field. For durations
+ of one second or more, a non-zero value for the `nanos` field must be
+ of the same sign as the `seconds` field. Must be from -999,999,999
+ to +999,999,999 inclusive.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class Empty(aristaproto.Message):
+ """
+ A generic empty message that you can re-use to avoid defining duplicated
+ empty messages in your APIs. A typical example is to use it as the request
+ or the response type of an API method. For instance:
+
+ service Foo {
+ rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ }
+ """
+
+ pass
+
+
+@dataclass(eq=False, repr=False)
+class FieldMask(aristaproto.Message):
+ """
+ `FieldMask` represents a set of symbolic field paths, for example:
+
+ paths: "f.a"
+ paths: "f.b.d"
+
+ Here `f` represents a field in some root message, `a` and `b`
+ fields in the message found in `f`, and `d` a field found in the
+ message in `f.b`.
+
+ Field masks are used to specify a subset of fields that should be
+ returned by a get operation or modified by an update operation.
+ Field masks also have a custom JSON encoding (see below).
+
+ # Field Masks in Projections
+
+ When used in the context of a projection, a response message or
+ sub-message is filtered by the API to only contain those fields as
+ specified in the mask. For example, if the mask in the previous
+ example is applied to a response message as follows:
+
+ f {
+ a : 22
+ b {
+ d : 1
+ x : 2
+ }
+ y : 13
+ }
+ z: 8
+
+ The result will not contain specific values for fields x,y and z
+ (their value will be set to the default, and omitted in proto text
+ output):
+
+
+ f {
+ a : 22
+ b {
+ d : 1
+ }
+ }
+
+ A repeated field is not allowed except at the last position of a
+ paths string.
+
+ If a FieldMask object is not present in a get operation, the
+ operation applies to all fields (as if a FieldMask of all fields
+ had been specified).
+
+ Note that a field mask does not necessarily apply to the
+ top-level response message. In case of a REST get operation, the
+ field mask applies directly to the response, but in case of a REST
+ list operation, the mask instead applies to each individual message
+ in the returned resource list. In case of a REST custom method,
+ other definitions may be used. Where the mask applies will be
+ clearly documented together with its declaration in the API. In
+ any case, the effect on the returned resource/resources is required
+ behavior for APIs.
+
+ # Field Masks in Update Operations
+
+ A field mask in update operations specifies which fields of the
+ targeted resource are going to be updated. The API is required
+ to only change the values of the fields as specified in the mask
+ and leave the others untouched. If a resource is passed in to
+ describe the updated values, the API ignores the values of all
+ fields not covered by the mask.
+
+ If a repeated field is specified for an update operation, new values will
+ be appended to the existing repeated field in the target resource. Note that
+ a repeated field is only allowed in the last position of a `paths` string.
+
+ If a sub-message is specified in the last position of the field mask for an
+ update operation, then new value will be merged into the existing sub-message
+ in the target resource.
+
+ For example, given the target message:
+
+ f {
+ b {
+ d: 1
+ x: 2
+ }
+ c: [1]
+ }
+
+ And an update message:
+
+ f {
+ b {
+ d: 10
+ }
+ c: [2]
+ }
+
+ then if the field mask is:
+
+ paths: ["f.b", "f.c"]
+
+ then the result will be:
+
+ f {
+ b {
+ d: 10
+ x: 2
+ }
+ c: [1, 2]
+ }
+
+ An implementation may provide options to override this default behavior for
+ repeated and message fields.
+
+ In order to reset a field's value to the default, the field must
+ be in the mask and set to the default value in the provided resource.
+ Hence, in order to reset all fields of a resource, provide a default
+ instance of the resource and set all fields in the mask, or do
+ not provide a mask as described below.
+
+ If a field mask is not present on update, the operation applies to
+ all fields (as if a field mask of all fields has been specified).
+ Note that in the presence of schema evolution, this may mean that
+ fields the client does not know and has therefore not filled into
+ the request will be reset to their default. If this is unwanted
+ behavior, a specific service may require a client to always specify
+ a field mask, producing an error if not.
+
+ As with get operations, the location of the resource which
+ describes the updated values in the request message depends on the
+ operation kind. In any case, the effect of the field mask is
+ required to be honored by the API.
+
+ ## Considerations for HTTP REST
+
+ The HTTP kind of an update operation which uses a field mask must
+ be set to PATCH instead of PUT in order to satisfy HTTP semantics
+ (PUT must only be used for full updates).
+
+ # JSON Encoding of Field Masks
+
+ In JSON, a field mask is encoded as a single string where paths are
+ separated by a comma. Fields name in each path are converted
+ to/from lower-camel naming conventions.
+
+ As an example, consider the following message declarations:
+
+ message Profile {
+ User user = 1;
+ Photo photo = 2;
+ }
+ message User {
+ string display_name = 1;
+ string address = 2;
+ }
+
+ In proto a field mask for `Profile` may look as such:
+
+ mask {
+ paths: "user.display_name"
+ paths: "photo"
+ }
+
+ In JSON, the same mask is represented as below:
+
+ {
+ mask: "user.displayName,photo"
+ }
+
+ # Field Masks and Oneof Fields
+
+ Field masks treat fields in oneofs just as regular fields. Consider the
+ following message:
+
+ message SampleMessage {
+ oneof test_oneof {
+ string name = 4;
+ SubMessage sub_message = 9;
+ }
+ }
+
+ The field mask can be:
+
+ mask {
+ paths: "name"
+ }
+
+ Or:
+
+ mask {
+ paths: "sub_message"
+ }
+
+ Note that oneof type names ("test_oneof" in this case) cannot be used in
+ paths.
+
+ ## Field Mask Verification
+
+ The implementation of any API method which has a FieldMask type field in the
+ request should verify the included field paths, and return an
+ `INVALID_ARGUMENT` error if any path is unmappable.
+ """
+
+ paths: List[str] = aristaproto.string_field(1)
+ """The set of field mask paths."""
+
+
+@dataclass(eq=False, repr=False)
+class Struct(aristaproto.Message):
+ """
+ `Struct` represents a structured data value, consisting of fields
+ which map to dynamically typed values. In some languages, `Struct`
+ might be supported by a native representation. For example, in
+ scripting languages like JS a struct is represented as an
+ object. The details of that representation are described together
+ with the proto support for the language.
+
+ The JSON representation for `Struct` is JSON object.
+ """
+
+ fields: Dict[str, "Value"] = aristaproto.map_field(
+ 1, aristaproto.TYPE_STRING, aristaproto.TYPE_MESSAGE
+ )
+ """Unordered map of dynamically typed values."""
+
+ @hybridmethod
+ def from_dict(cls: "type[Self]", value: Mapping[str, Any]) -> Self: # type: ignore
+ self = cls()
+ return self.from_dict(value)
+
+ @from_dict.instancemethod
+ def from_dict(self, value: Mapping[str, Any]) -> Self:
+ fields = {**value}
+ for k in fields:
+ if hasattr(fields[k], "from_dict"):
+ fields[k] = fields[k].from_dict()
+
+ self.fields = fields
+ return self
+
+ def to_dict(
+ self,
+ casing: aristaproto.Casing = aristaproto.Casing.CAMEL,
+ include_default_values: bool = False,
+ ) -> Dict[str, Any]:
+ output = {**self.fields}
+ for k in self.fields:
+ if hasattr(self.fields[k], "to_dict"):
+ output[k] = self.fields[k].to_dict(casing, include_default_values)
+ return output
+
+
+@dataclass(eq=False, repr=False)
+class Value(aristaproto.Message):
+ """
+ `Value` represents a dynamically typed value which can be either
+ null, a number, a string, a boolean, a recursive struct value, or a
+ list of values. A producer of value is expected to set one of these
+ variants. Absence of any variant indicates an error.
+
+ The JSON representation for `Value` is JSON value.
+ """
+
+ null_value: "NullValue" = aristaproto.enum_field(1, group="kind")
+ """Represents a null value."""
+
+ number_value: float = aristaproto.double_field(2, group="kind")
+ """Represents a double value."""
+
+ string_value: str = aristaproto.string_field(3, group="kind")
+ """Represents a string value."""
+
+ bool_value: bool = aristaproto.bool_field(4, group="kind")
+ """Represents a boolean value."""
+
+ struct_value: "Struct" = aristaproto.message_field(5, group="kind")
+ """Represents a structured value."""
+
+ list_value: "ListValue" = aristaproto.message_field(6, group="kind")
+ """Represents a repeated `Value`."""
+
+
+@dataclass(eq=False, repr=False)
+class ListValue(aristaproto.Message):
+ """
+ `ListValue` is a wrapper around a repeated field of values.
+
+ The JSON representation for `ListValue` is JSON array.
+ """
+
+ values: List["Value"] = aristaproto.message_field(1)
+ """Repeated field of dynamically typed values."""
+
+
+@dataclass(eq=False, repr=False)
+class Timestamp(aristaproto.Message):
+ """
+ A Timestamp represents a point in time independent of any time zone or local
+ calendar, encoded as a count of seconds and fractions of seconds at
+ nanosecond resolution. The count is relative to an epoch at UTC midnight on
+ January 1, 1970, in the proleptic Gregorian calendar which extends the
+ Gregorian calendar backwards to year one.
+
+ All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
+ second table is needed for interpretation, using a [24-hour linear
+ smear](https://developers.google.com/time/smear).
+
+ The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
+ restricting to that range, we ensure that we can convert to and from [RFC
+ 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
+
+ # Examples
+
+ Example 1: Compute Timestamp from POSIX `time()`.
+
+ Timestamp timestamp;
+ timestamp.set_seconds(time(NULL));
+ timestamp.set_nanos(0);
+
+ Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ Timestamp timestamp;
+ timestamp.set_seconds(tv.tv_sec);
+ timestamp.set_nanos(tv.tv_usec * 1000);
+
+ Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+
+ FILETIME ft;
+ GetSystemTimeAsFileTime(&ft);
+ UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+
+ // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+ // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+ Timestamp timestamp;
+ timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+ timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+
+ Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+
+ long millis = System.currentTimeMillis();
+
+ Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+ .setNanos((int) ((millis % 1000) * 1000000)).build();
+
+ Example 5: Compute Timestamp from Java `Instant.now()`.
+
+ Instant now = Instant.now();
+
+ Timestamp timestamp =
+ Timestamp.newBuilder().setSeconds(now.getEpochSecond())
+ .setNanos(now.getNano()).build();
+
+ Example 6: Compute Timestamp from current time in Python.
+
+ timestamp = Timestamp()
+ timestamp.GetCurrentTime()
+
+ # JSON Mapping
+
+ In JSON format, the Timestamp type is encoded as a string in the
+ [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+ format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+ where {year} is always expressed using four digits while {month}, {day},
+ {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+ seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+ are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+ is required. A proto3 JSON serializer should always use UTC (as indicated by
+ "Z") when printing the Timestamp type and a proto3 JSON parser should be
+ able to accept both UTC and other timezones (as indicated by an offset).
+
+ For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+ 01:30 UTC on January 15, 2017.
+
+ In JavaScript, one can convert a Date object to this format using the
+ standard
+ [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
+ method. In Python, a standard `datetime.datetime` object can be converted
+ to this format using
+ [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
+ the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
+ the Joda Time's [`ISODateTimeFormat.dateTime()`](
+ http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
+ ) to obtain a formatter capable of generating timestamps in this format.
+ """
+
+ seconds: int = aristaproto.int64_field(1)
+ """
+ Represents seconds of UTC time since Unix epoch
+ 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
+ 9999-12-31T23:59:59Z inclusive.
+ """
+
+ nanos: int = aristaproto.int32_field(2)
+ """
+ Non-negative fractions of a second at nanosecond resolution. Negative
+ second values with fractions must still have non-negative nanos values
+ that count forward in time. Must be from 0 to 999,999,999
+ inclusive.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class DoubleValue(aristaproto.Message):
+ """
+ Wrapper message for `double`.
+
+ The JSON representation for `DoubleValue` is JSON number.
+ """
+
+ value: float = aristaproto.double_field(1)
+ """The double value."""
+
+
+@dataclass(eq=False, repr=False)
+class FloatValue(aristaproto.Message):
+ """
+ Wrapper message for `float`.
+
+ The JSON representation for `FloatValue` is JSON number.
+ """
+
+ value: float = aristaproto.float_field(1)
+ """The float value."""
+
+
+@dataclass(eq=False, repr=False)
+class Int64Value(aristaproto.Message):
+ """
+ Wrapper message for `int64`.
+
+ The JSON representation for `Int64Value` is JSON string.
+ """
+
+ value: int = aristaproto.int64_field(1)
+ """The int64 value."""
+
+
+@dataclass(eq=False, repr=False)
+class UInt64Value(aristaproto.Message):
+ """
+ Wrapper message for `uint64`.
+
+ The JSON representation for `UInt64Value` is JSON string.
+ """
+
+ value: int = aristaproto.uint64_field(1)
+ """The uint64 value."""
+
+
+@dataclass(eq=False, repr=False)
+class Int32Value(aristaproto.Message):
+ """
+ Wrapper message for `int32`.
+
+ The JSON representation for `Int32Value` is JSON number.
+ """
+
+ value: int = aristaproto.int32_field(1)
+ """The int32 value."""
+
+
+@dataclass(eq=False, repr=False)
+class UInt32Value(aristaproto.Message):
+ """
+ Wrapper message for `uint32`.
+
+ The JSON representation for `UInt32Value` is JSON number.
+ """
+
+ value: int = aristaproto.uint32_field(1)
+ """The uint32 value."""
+
+
+@dataclass(eq=False, repr=False)
+class BoolValue(aristaproto.Message):
+ """
+ Wrapper message for `bool`.
+
+ The JSON representation for `BoolValue` is JSON `true` and `false`.
+ """
+
+ value: bool = aristaproto.bool_field(1)
+ """The bool value."""
+
+
+@dataclass(eq=False, repr=False)
+class StringValue(aristaproto.Message):
+ """
+ Wrapper message for `string`.
+
+ The JSON representation for `StringValue` is JSON string.
+ """
+
+ value: str = aristaproto.string_field(1)
+ """The string value."""
+
+
+@dataclass(eq=False, repr=False)
+class BytesValue(aristaproto.Message):
+ """
+ Wrapper message for `bytes`.
+
+ The JSON representation for `BytesValue` is JSON string.
+ """
+
+ value: bytes = aristaproto.bytes_field(1)
+ """The bytes value."""
diff --git a/src/aristaproto/lib/std/google/protobuf/compiler/__init__.py b/src/aristaproto/lib/std/google/protobuf/compiler/__init__.py
new file mode 100644
index 0000000..a26dc86
--- /dev/null
+++ b/src/aristaproto/lib/std/google/protobuf/compiler/__init__.py
@@ -0,0 +1,198 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# sources: google/protobuf/compiler/plugin.proto
+# plugin: python-aristaproto
+# This file has been @generated
+
+from dataclasses import dataclass
+from typing import List
+
+import aristaproto
+import aristaproto.lib.google.protobuf as aristaproto_lib_google_protobuf
+
+
+class CodeGeneratorResponseFeature(aristaproto.Enum):
+ """Sync with code_generator.h."""
+
+ FEATURE_NONE = 0
+ FEATURE_PROTO3_OPTIONAL = 1
+ FEATURE_SUPPORTS_EDITIONS = 2
+
+
+@dataclass(eq=False, repr=False)
+class Version(aristaproto.Message):
+ """The version number of protocol compiler."""
+
+ major: int = aristaproto.int32_field(1)
+ minor: int = aristaproto.int32_field(2)
+ patch: int = aristaproto.int32_field(3)
+ suffix: str = aristaproto.string_field(4)
+ """
+ A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
+ be empty for mainline stable releases.
+ """
+
+
+@dataclass(eq=False, repr=False)
+class CodeGeneratorRequest(aristaproto.Message):
+ """An encoded CodeGeneratorRequest is written to the plugin's stdin."""
+
+ file_to_generate: List[str] = aristaproto.string_field(1)
+ """
+ The .proto files that were explicitly listed on the command-line. The
+ code generator should generate code only for these files. Each file's
+ descriptor will be included in proto_file, below.
+ """
+
+ parameter: str = aristaproto.string_field(2)
+ """The generator parameter passed on the command-line."""
+
+ proto_file: List[
+ "aristaproto_lib_google_protobuf.FileDescriptorProto"
+ ] = aristaproto.message_field(15)
+ """
+ FileDescriptorProtos for all files in files_to_generate and everything
+ they import. The files will appear in topological order, so each file
+ appears before any file that imports it.
+
+ Note: the files listed in files_to_generate will include runtime-retention
+ options only, but all other files will include source-retention options.
+ The source_file_descriptors field below is available in case you need
+ source-retention options for files_to_generate.
+
+ protoc guarantees that all proto_files will be written after
+ the fields above, even though this is not technically guaranteed by the
+ protobuf wire format. This theoretically could allow a plugin to stream
+ in the FileDescriptorProtos and handle them one by one rather than read
+ the entire set into memory at once. However, as of this writing, this
+ is not similarly optimized on protoc's end -- it will store all fields in
+ memory at once before sending them to the plugin.
+
+ Type names of fields and extensions in the FileDescriptorProto are always
+ fully qualified.
+ """
+
+ source_file_descriptors: List[
+ "aristaproto_lib_google_protobuf.FileDescriptorProto"
+ ] = aristaproto.message_field(17)
+ """
+ File descriptors with all options, including source-retention options.
+ These descriptors are only provided for the files listed in
+ files_to_generate.
+ """
+
+ compiler_version: "Version" = aristaproto.message_field(3)
+ """The version number of protocol compiler."""
+
+
+@dataclass(eq=False, repr=False)
+class CodeGeneratorResponse(aristaproto.Message):
+ """The plugin writes an encoded CodeGeneratorResponse to stdout."""
+
+ error: str = aristaproto.string_field(1)
+ """
+ Error message. If non-empty, code generation failed. The plugin process
+ should exit with status code zero even if it reports an error in this way.
+
+ This should be used to indicate errors in .proto files which prevent the
+ code generator from generating correct code. Errors which indicate a
+ problem in protoc itself -- such as the input CodeGeneratorRequest being
+ unparseable -- should be reported by writing a message to stderr and
+ exiting with a non-zero status code.
+ """
+
+ supported_features: int = aristaproto.uint64_field(2)
+ """
+ A bitmask of supported features that the code generator supports.
+ This is a bitwise "or" of values from the Feature enum.
+ """
+
+ minimum_edition: int = aristaproto.int32_field(3)
+ """
+ The minimum edition this plugin supports. This will be treated as an
+ Edition enum, but we want to allow unknown values. It should be specified
+ according the edition enum value, *not* the edition number. Only takes
+ effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
+ """
+
+ maximum_edition: int = aristaproto.int32_field(4)
+ """
+ The maximum edition this plugin supports. This will be treated as an
+ Edition enum, but we want to allow unknown values. It should be specified
+ according the edition enum value, *not* the edition number. Only takes
+ effect for plugins that have FEATURE_SUPPORTS_EDITIONS set.
+ """
+
+ file: List["CodeGeneratorResponseFile"] = aristaproto.message_field(15)
+
+
+@dataclass(eq=False, repr=False)
+class CodeGeneratorResponseFile(aristaproto.Message):
+ """Represents a single generated file."""
+
+ name: str = aristaproto.string_field(1)
+ """
+ The file name, relative to the output directory. The name must not
+ contain "." or ".." components and must be relative, not be absolute (so,
+ the file cannot lie outside the output directory). "/" must be used as
+ the path separator, not "\".
+
+ If the name is omitted, the content will be appended to the previous
+ file. This allows the generator to break large files into small chunks,
+ and allows the generated text to be streamed back to protoc so that large
+ files need not reside completely in memory at one time. Note that as of
+ this writing protoc does not optimize for this -- it will read the entire
+ CodeGeneratorResponse before writing files to disk.
+ """
+
+ insertion_point: str = aristaproto.string_field(2)
+ """
+ If non-empty, indicates that the named file should already exist, and the
+ content here is to be inserted into that file at a defined insertion
+ point. This feature allows a code generator to extend the output
+ produced by another code generator. The original generator may provide
+ insertion points by placing special annotations in the file that look
+ like:
+ @@protoc_insertion_point(NAME)
+ The annotation can have arbitrary text before and after it on the line,
+ which allows it to be placed in a comment. NAME should be replaced with
+ an identifier naming the point -- this is what other generators will use
+ as the insertion_point. Code inserted at this point will be placed
+ immediately above the line containing the insertion point (thus multiple
+ insertions to the same point will come out in the order they were added).
+ The double-@ is intended to make it unlikely that the generated code
+ could contain things that look like insertion points by accident.
+
+ For example, the C++ code generator places the following line in the
+ .pb.h files that it generates:
+ // @@protoc_insertion_point(namespace_scope)
+ This line appears within the scope of the file's package namespace, but
+ outside of any particular class. Another plugin can then specify the
+ insertion_point "namespace_scope" to generate additional classes or
+ other declarations that should be placed in this scope.
+
+ Note that if the line containing the insertion point begins with
+ whitespace, the same whitespace will be added to every line of the
+ inserted text. This is useful for languages like Python, where
+ indentation matters. In these languages, the insertion point comment
+ should be indented the same amount as any inserted code will need to be
+ in order to work correctly in that context.
+
+ The code generator that generates the initial file and the one which
+ inserts into it must both run as part of a single invocation of protoc.
+ Code generators are executed in the order in which they appear on the
+ command line.
+
+ If |insertion_point| is present, |name| must also be present.
+ """
+
+ content: str = aristaproto.string_field(15)
+ """The file contents."""
+
+ generated_code_info: "aristaproto_lib_google_protobuf.GeneratedCodeInfo" = (
+ aristaproto.message_field(16)
+ )
+ """
+ Information describing the file content being inserted. If an insertion
+ point is used, this information will be appropriately offset and inserted
+ into the code generation metadata for the generated files.
+ """
diff --git a/src/aristaproto/plugin/__init__.py b/src/aristaproto/plugin/__init__.py
new file mode 100644
index 0000000..c28a133
--- /dev/null
+++ b/src/aristaproto/plugin/__init__.py
@@ -0,0 +1 @@
+from .main import main
diff --git a/src/aristaproto/plugin/__main__.py b/src/aristaproto/plugin/__main__.py
new file mode 100644
index 0000000..bd95dae
--- /dev/null
+++ b/src/aristaproto/plugin/__main__.py
@@ -0,0 +1,4 @@
+from .main import main
+
+
+main()
diff --git a/src/aristaproto/plugin/compiler.py b/src/aristaproto/plugin/compiler.py
new file mode 100644
index 0000000..4bbcc48
--- /dev/null
+++ b/src/aristaproto/plugin/compiler.py
@@ -0,0 +1,50 @@
+import os.path
+
+
+try:
+ # aristaproto[compiler] specific dependencies
+ import black
+ import isort.api
+ import jinja2
+except ImportError as err:
+ print(
+ "\033[31m"
+ f"Unable to import `{err.name}` from aristaproto plugin! "
+ "Please ensure that you've installed aristaproto as "
+ '`pip install "aristaproto[compiler]"` so that compiler dependencies '
+ "are included."
+ "\033[0m"
+ )
+ raise SystemExit(1)
+
+from .models import OutputTemplate
+
+
+def outputfile_compiler(output_file: OutputTemplate) -> str:
+ templates_folder = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), "..", "templates")
+ )
+
+ env = jinja2.Environment(
+ trim_blocks=True,
+ lstrip_blocks=True,
+ loader=jinja2.FileSystemLoader(templates_folder),
+ )
+ template = env.get_template("template.py.j2")
+
+ code = template.render(output_file=output_file)
+ code = isort.api.sort_code_string(
+ code=code,
+ show_diff=False,
+ py_version=37,
+ profile="black",
+ combine_as_imports=True,
+ lines_after_imports=2,
+ quiet=True,
+ force_grid_wrap=2,
+ known_third_party=["grpclib", "aristaproto"],
+ )
+ return black.format_str(
+ src_contents=code,
+ mode=black.Mode(),
+ )
diff --git a/src/aristaproto/plugin/main.py b/src/aristaproto/plugin/main.py
new file mode 100755
index 0000000..aff3614
--- /dev/null
+++ b/src/aristaproto/plugin/main.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+
+import os
+import sys
+
+from aristaproto.lib.google.protobuf.compiler import (
+ CodeGeneratorRequest,
+ CodeGeneratorResponse,
+)
+from aristaproto.plugin.models import monkey_patch_oneof_index
+from aristaproto.plugin.parser import generate_code
+
+
+def main() -> None:
+ """The plugin's main entry point."""
+ # Read request message from stdin
+ data = sys.stdin.buffer.read()
+
+ # Apply Work around for proto2/3 difference in protoc messages
+ monkey_patch_oneof_index()
+
+ # Parse request
+ request = CodeGeneratorRequest()
+ request.parse(data)
+
+ dump_file = os.getenv("ARISTAPROTO_DUMP")
+ if dump_file:
+ dump_request(dump_file, request)
+
+ # Generate code
+ response = generate_code(request)
+
+ # Serialise response message
+ output = response.SerializeToString()
+
+ # Write to stdout
+ sys.stdout.buffer.write(output)
+
+
+def dump_request(dump_file: str, request: CodeGeneratorRequest) -> None:
+ """
+ For developers: Supports running plugin.py standalone so its possible to debug it.
+ Run protoc (or generate.py) with ARISTAPROTO_DUMP="yourfile.bin" to write the request to a file.
+ Then run plugin.py from your IDE in debugging mode, and redirect stdin to the file.
+ """
+ with open(str(dump_file), "wb") as fh:
+ sys.stderr.write(f"\033[31mWriting input from protoc to: {dump_file}\033[0m\n")
+ fh.write(request.SerializeToString())
+
+
+if __name__ == "__main__":
+ main()
diff --git a/src/aristaproto/plugin/models.py b/src/aristaproto/plugin/models.py
new file mode 100644
index 0000000..484b40d
--- /dev/null
+++ b/src/aristaproto/plugin/models.py
@@ -0,0 +1,851 @@
+"""Plugin model dataclasses.
+
+These classes are meant to be an intermediate representation
+of protobuf objects. They are used to organize the data collected during parsing.
+
+The general intention is to create a doubly-linked tree-like structure
+with the following types of references:
+- Downwards references: from message -> fields, from output package -> messages
+or from service -> service methods
+- Upwards references: from field -> message, message -> package.
+- Input/output message references: from a service method to it's corresponding
+input/output messages, which may even be in another package.
+
+There are convenience methods to allow climbing up and down this tree, for
+example to retrieve the list of all messages that are in the same package as
+the current message.
+
+Most of these classes take as inputs:
+- proto_obj: A reference to it's corresponding protobuf object as
+presented by the protoc plugin.
+- parent: a reference to the parent object in the tree.
+
+With this information, the class is able to expose attributes,
+such as a pythonized name, that will be calculated from proto_obj.
+
+The instantiation should also attach a reference to the new object
+into the corresponding place within it's parent object. For example,
+instantiating field `A` with parent message `B` should add a
+reference to `A` to `B`'s `fields` attribute.
+"""
+
+
+import builtins
+import re
+import textwrap
+from dataclasses import (
+ dataclass,
+ field,
+)
+from typing import (
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Type,
+ Union,
+)
+
+import aristaproto
+from aristaproto import which_one_of
+from aristaproto.casing import sanitize_name
+from aristaproto.compile.importing import (
+ get_type_reference,
+ parse_source_type_name,
+)
+from aristaproto.compile.naming import (
+ pythonize_class_name,
+ pythonize_field_name,
+ pythonize_method_name,
+)
+from aristaproto.lib.google.protobuf import (
+ DescriptorProto,
+ EnumDescriptorProto,
+ Field,
+ FieldDescriptorProto,
+ FieldDescriptorProtoLabel,
+ FieldDescriptorProtoType,
+ FileDescriptorProto,
+ MethodDescriptorProto,
+)
+from aristaproto.lib.google.protobuf.compiler import CodeGeneratorRequest
+
+from ..compile.importing import (
+ get_type_reference,
+ parse_source_type_name,
+)
+from ..compile.naming import (
+ pythonize_class_name,
+ pythonize_enum_member_name,
+ pythonize_field_name,
+ pythonize_method_name,
+)
+
+
+# Create a unique placeholder to deal with
+# https://stackoverflow.com/questions/51575931/class-inheritance-in-python-3-7-dataclasses
+PLACEHOLDER = object()
+
+# Organize proto types into categories
+PROTO_FLOAT_TYPES = (
+ FieldDescriptorProtoType.TYPE_DOUBLE, # 1
+ FieldDescriptorProtoType.TYPE_FLOAT, # 2
+)
+PROTO_INT_TYPES = (
+ FieldDescriptorProtoType.TYPE_INT64, # 3
+ FieldDescriptorProtoType.TYPE_UINT64, # 4
+ FieldDescriptorProtoType.TYPE_INT32, # 5
+ FieldDescriptorProtoType.TYPE_FIXED64, # 6
+ FieldDescriptorProtoType.TYPE_FIXED32, # 7
+ FieldDescriptorProtoType.TYPE_UINT32, # 13
+ FieldDescriptorProtoType.TYPE_SFIXED32, # 15
+ FieldDescriptorProtoType.TYPE_SFIXED64, # 16
+ FieldDescriptorProtoType.TYPE_SINT32, # 17
+ FieldDescriptorProtoType.TYPE_SINT64, # 18
+)
+PROTO_BOOL_TYPES = (FieldDescriptorProtoType.TYPE_BOOL,) # 8
+PROTO_STR_TYPES = (FieldDescriptorProtoType.TYPE_STRING,) # 9
+PROTO_BYTES_TYPES = (FieldDescriptorProtoType.TYPE_BYTES,) # 12
+PROTO_MESSAGE_TYPES = (
+ FieldDescriptorProtoType.TYPE_MESSAGE, # 11
+ FieldDescriptorProtoType.TYPE_ENUM, # 14
+)
+PROTO_MAP_TYPES = (FieldDescriptorProtoType.TYPE_MESSAGE,) # 11
+PROTO_PACKED_TYPES = (
+ FieldDescriptorProtoType.TYPE_DOUBLE, # 1
+ FieldDescriptorProtoType.TYPE_FLOAT, # 2
+ FieldDescriptorProtoType.TYPE_INT64, # 3
+ FieldDescriptorProtoType.TYPE_UINT64, # 4
+ FieldDescriptorProtoType.TYPE_INT32, # 5
+ FieldDescriptorProtoType.TYPE_FIXED64, # 6
+ FieldDescriptorProtoType.TYPE_FIXED32, # 7
+ FieldDescriptorProtoType.TYPE_BOOL, # 8
+ FieldDescriptorProtoType.TYPE_UINT32, # 13
+ FieldDescriptorProtoType.TYPE_SFIXED32, # 15
+ FieldDescriptorProtoType.TYPE_SFIXED64, # 16
+ FieldDescriptorProtoType.TYPE_SINT32, # 17
+ FieldDescriptorProtoType.TYPE_SINT64, # 18
+)
+
+
+def monkey_patch_oneof_index():
+ """
+ The compiler message types are written for proto2, but we read them as proto3.
+ For this to work in the case of the oneof_index fields, which depend on being able
+ to tell whether they were set, we have to treat them as oneof fields. This method
+ monkey patches the generated classes after the fact to force this behaviour.
+ """
+ object.__setattr__(
+ FieldDescriptorProto.__dataclass_fields__["oneof_index"].metadata[
+ "aristaproto"
+ ],
+ "group",
+ "oneof_index",
+ )
+ object.__setattr__(
+ Field.__dataclass_fields__["oneof_index"].metadata["aristaproto"],
+ "group",
+ "oneof_index",
+ )
+
+
+def get_comment(
+ proto_file: "FileDescriptorProto", path: List[int], indent: int = 4
+) -> str:
+ pad = " " * indent
+ for sci_loc in proto_file.source_code_info.location:
+ if list(sci_loc.path) == path and sci_loc.leading_comments:
+ lines = sci_loc.leading_comments.strip().replace("\t", " ").split("\n")
+ # This is a field, message, enum, service, or method
+ if len(lines) == 1 and len(lines[0]) < 79 - indent - 6:
+ lines[0] = lines[0].strip('"')
+ # rstrip to remove trailing spaces including whitespaces from empty lines.
+ return f'{pad}"""{lines[0]}"""'
+ else:
+ # rstrip to remove trailing spaces including empty lines.
+ padded = [f"\n{pad}{line}".rstrip(" ") for line in lines]
+ joined = "".join(padded)
+ return f'{pad}"""{joined}\n{pad}"""'
+
+ return ""
+
+
+class ProtoContentBase:
+ """Methods common to MessageCompiler, ServiceCompiler and ServiceMethodCompiler."""
+
+ source_file: FileDescriptorProto
+ path: List[int]
+ comment_indent: int = 4
+ parent: Union["aristaproto.Message", "OutputTemplate"]
+
+ __dataclass_fields__: Dict[str, object]
+
+ def __post_init__(self) -> None:
+ """Checks that no fake default fields were left as placeholders."""
+ for field_name, field_val in self.__dataclass_fields__.items():
+ if field_val is PLACEHOLDER:
+ raise ValueError(f"`{field_name}` is a required field.")
+
+ @property
+ def output_file(self) -> "OutputTemplate":
+ current = self
+ while not isinstance(current, OutputTemplate):
+ current = current.parent
+ return current
+
+ @property
+ def request(self) -> "PluginRequestCompiler":
+ current = self
+ while not isinstance(current, OutputTemplate):
+ current = current.parent
+ return current.parent_request
+
+ @property
+ def comment(self) -> str:
+ """Crawl the proto source code and retrieve comments
+ for this object.
+ """
+ return get_comment(
+ proto_file=self.source_file, path=self.path, indent=self.comment_indent
+ )
+
+
+@dataclass
+class PluginRequestCompiler:
+ plugin_request_obj: CodeGeneratorRequest
+ output_packages: Dict[str, "OutputTemplate"] = field(default_factory=dict)
+
+ @property
+ def all_messages(self) -> List["MessageCompiler"]:
+ """All of the messages in this request.
+
+ Returns
+ -------
+ List[MessageCompiler]
+ List of all of the messages in this request.
+ """
+ return [
+ msg for output in self.output_packages.values() for msg in output.messages
+ ]
+
+
+@dataclass
+class OutputTemplate:
+ """Representation of an output .py file.
+
+ Each output file corresponds to a .proto input file,
+ but may need references to other .proto files to be
+ built.
+ """
+
+ parent_request: PluginRequestCompiler
+ package_proto_obj: FileDescriptorProto
+ input_files: List[str] = field(default_factory=list)
+ imports: Set[str] = field(default_factory=set)
+ datetime_imports: Set[str] = field(default_factory=set)
+ typing_imports: Set[str] = field(default_factory=set)
+ pydantic_imports: Set[str] = field(default_factory=set)
+ builtins_import: bool = False
+ messages: List["MessageCompiler"] = field(default_factory=list)
+ enums: List["EnumDefinitionCompiler"] = field(default_factory=list)
+ services: List["ServiceCompiler"] = field(default_factory=list)
+ imports_type_checking_only: Set[str] = field(default_factory=set)
+ pydantic_dataclasses: bool = False
+ output: bool = True
+
+ @property
+ def package(self) -> str:
+ """Name of input package.
+
+ Returns
+ -------
+ str
+ Name of input package.
+ """
+ return self.package_proto_obj.package
+
+ @property
+ def input_filenames(self) -> Iterable[str]:
+ """Names of the input files used to build this output.
+
+ Returns
+ -------
+ Iterable[str]
+ Names of the input files used to build this output.
+ """
+ return sorted(f.name for f in self.input_files)
+
+ @property
+ def python_module_imports(self) -> Set[str]:
+ imports = set()
+ if any(x for x in self.messages if any(x.deprecated_fields)):
+ imports.add("warnings")
+ if self.builtins_import:
+ imports.add("builtins")
+ return imports
+
+
+@dataclass
+class MessageCompiler(ProtoContentBase):
+ """Representation of a protobuf message."""
+
+ source_file: FileDescriptorProto
+ parent: Union["MessageCompiler", OutputTemplate] = PLACEHOLDER
+ proto_obj: DescriptorProto = PLACEHOLDER
+ path: List[int] = PLACEHOLDER
+ fields: List[Union["FieldCompiler", "MessageCompiler"]] = field(
+ default_factory=list
+ )
+ deprecated: bool = field(default=False, init=False)
+ builtins_types: Set[str] = field(default_factory=set)
+
+ def __post_init__(self) -> None:
+ # Add message to output file
+ if isinstance(self.parent, OutputTemplate):
+ if isinstance(self, EnumDefinitionCompiler):
+ self.output_file.enums.append(self)
+ else:
+ self.output_file.messages.append(self)
+ self.deprecated = self.proto_obj.options.deprecated
+ super().__post_init__()
+
+ @property
+ def proto_name(self) -> str:
+ return self.proto_obj.name
+
+ @property
+ def py_name(self) -> str:
+ return pythonize_class_name(self.proto_name)
+
+ @property
+ def annotation(self) -> str:
+ if self.repeated:
+ return f"List[{self.py_name}]"
+ return self.py_name
+
+ @property
+ def deprecated_fields(self) -> Iterator[str]:
+ for f in self.fields:
+ if f.deprecated:
+ yield f.py_name
+
+ @property
+ def has_deprecated_fields(self) -> bool:
+ return any(self.deprecated_fields)
+
+ @property
+ def has_oneof_fields(self) -> bool:
+ return any(isinstance(field, OneOfFieldCompiler) for field in self.fields)
+
+ @property
+ def has_message_field(self) -> bool:
+ return any(
+ (
+ field.proto_obj.type in PROTO_MESSAGE_TYPES
+ for field in self.fields
+ if isinstance(field.proto_obj, FieldDescriptorProto)
+ )
+ )
+
+
+def is_map(
+ proto_field_obj: FieldDescriptorProto, parent_message: DescriptorProto
+) -> bool:
+ """True if proto_field_obj is a map, otherwise False."""
+ if proto_field_obj.type == FieldDescriptorProtoType.TYPE_MESSAGE:
+ if not hasattr(parent_message, "nested_type"):
+ return False
+
+ # This might be a map...
+ message_type = proto_field_obj.type_name.split(".").pop().lower()
+ map_entry = f"{proto_field_obj.name.replace('_', '').lower()}entry"
+ if message_type == map_entry:
+ for nested in parent_message.nested_type: # parent message
+ if (
+ nested.name.replace("_", "").lower() == map_entry
+ and nested.options.map_entry
+ ):
+ return True
+ return False
+
+
+def is_oneof(proto_field_obj: FieldDescriptorProto) -> bool:
+ """
+ True if proto_field_obj is a OneOf, otherwise False.
+
+ .. warning::
+ Becuase the message from protoc is defined in proto2, and aristaproto works with
+ proto3, and interpreting the FieldDescriptorProto.oneof_index field requires
+ distinguishing between default and unset values (which proto3 doesn't support),
+ we have to hack the generated FieldDescriptorProto class for this to work.
+ The hack consists of setting group="oneof_index" in the field metadata,
+ essentially making oneof_index the sole member of a one_of group, which allows
+ us to tell whether it was set, via the which_one_of interface.
+ """
+
+ return (
+ not proto_field_obj.proto3_optional
+ and which_one_of(proto_field_obj, "oneof_index")[0] == "oneof_index"
+ )
+
+
+@dataclass
+class FieldCompiler(MessageCompiler):
+ parent: MessageCompiler = PLACEHOLDER
+ proto_obj: FieldDescriptorProto = PLACEHOLDER
+
+ def __post_init__(self) -> None:
+ # Add field to message
+ self.parent.fields.append(self)
+ # Check for new imports
+ self.add_imports_to(self.output_file)
+ super().__post_init__() # call FieldCompiler-> MessageCompiler __post_init__
+
+ def get_field_string(self, indent: int = 4) -> str:
+ """Construct string representation of this field as a field."""
+ name = f"{self.py_name}"
+ annotations = f": {self.annotation}"
+ field_args = ", ".join(
+ ([""] + self.aristaproto_field_args) if self.aristaproto_field_args else []
+ )
+ aristaproto_field_type = (
+ f"aristaproto.{self.field_type}_field({self.proto_obj.number}{field_args})"
+ )
+ if self.py_name in dir(builtins):
+ self.parent.builtins_types.add(self.py_name)
+ return f"{name}{annotations} = {aristaproto_field_type}"
+
+ @property
+ def aristaproto_field_args(self) -> List[str]:
+ args = []
+ if self.field_wraps:
+ args.append(f"wraps={self.field_wraps}")
+ if self.optional:
+ args.append(f"optional=True")
+ return args
+
+ @property
+ def datetime_imports(self) -> Set[str]:
+ imports = set()
+ annotation = self.annotation
+ # FIXME: false positives - e.g. `MyDatetimedelta`
+ if "timedelta" in annotation:
+ imports.add("timedelta")
+ if "datetime" in annotation:
+ imports.add("datetime")
+ return imports
+
+ @property
+ def typing_imports(self) -> Set[str]:
+ imports = set()
+ annotation = self.annotation
+ if "Optional[" in annotation:
+ imports.add("Optional")
+ if "List[" in annotation:
+ imports.add("List")
+ if "Dict[" in annotation:
+ imports.add("Dict")
+ return imports
+
+ @property
+ def pydantic_imports(self) -> Set[str]:
+ return set()
+
+ @property
+ def use_builtins(self) -> bool:
+ return self.py_type in self.parent.builtins_types or (
+ self.py_type == self.py_name and self.py_name in dir(builtins)
+ )
+
+ def add_imports_to(self, output_file: OutputTemplate) -> None:
+ output_file.datetime_imports.update(self.datetime_imports)
+ output_file.typing_imports.update(self.typing_imports)
+ output_file.pydantic_imports.update(self.pydantic_imports)
+ output_file.builtins_import = output_file.builtins_import or self.use_builtins
+
+ @property
+ def field_wraps(self) -> Optional[str]:
+ """Returns aristaproto wrapped field type or None."""
+ match_wrapper = re.match(
+ r"\.google\.protobuf\.(.+)Value$", self.proto_obj.type_name
+ )
+ if match_wrapper:
+ wrapped_type = "TYPE_" + match_wrapper.group(1).upper()
+ if hasattr(aristaproto, wrapped_type):
+ return f"aristaproto.{wrapped_type}"
+ return None
+
+ @property
+ def repeated(self) -> bool:
+ return (
+ self.proto_obj.label == FieldDescriptorProtoLabel.LABEL_REPEATED
+ and not is_map(self.proto_obj, self.parent)
+ )
+
+ @property
+ def optional(self) -> bool:
+ return self.proto_obj.proto3_optional
+
+ @property
+ def mutable(self) -> bool:
+ """True if the field is a mutable type, otherwise False."""
+ return self.annotation.startswith(("List[", "Dict["))
+
+ @property
+ def field_type(self) -> str:
+ """String representation of proto field type."""
+ return (
+ FieldDescriptorProtoType(self.proto_obj.type)
+ .name.lower()
+ .replace("type_", "")
+ )
+
+ @property
+ def default_value_string(self) -> str:
+ """Python representation of the default proto value."""
+ if self.repeated:
+ return "[]"
+ if self.optional:
+ return "None"
+ if self.py_type == "int":
+ return "0"
+ if self.py_type == "float":
+ return "0.0"
+ elif self.py_type == "bool":
+ return "False"
+ elif self.py_type == "str":
+ return '""'
+ elif self.py_type == "bytes":
+ return 'b""'
+ elif self.field_type == "enum":
+ enum_proto_obj_name = self.proto_obj.type_name.split(".").pop()
+ enum = next(
+ e
+ for e in self.output_file.enums
+ if e.proto_obj.name == enum_proto_obj_name
+ )
+ return enum.default_value_string
+ else:
+ # Message type
+ return "None"
+
+ @property
+ def packed(self) -> bool:
+ """True if the wire representation is a packed format."""
+ return self.repeated and self.proto_obj.type in PROTO_PACKED_TYPES
+
+ @property
+ def py_name(self) -> str:
+ """Pythonized name."""
+ return pythonize_field_name(self.proto_name)
+
+ @property
+ def proto_name(self) -> str:
+ """Original protobuf name."""
+ return self.proto_obj.name
+
+ @property
+ def py_type(self) -> str:
+ """String representation of Python type."""
+ if self.proto_obj.type in PROTO_FLOAT_TYPES:
+ return "float"
+ elif self.proto_obj.type in PROTO_INT_TYPES:
+ return "int"
+ elif self.proto_obj.type in PROTO_BOOL_TYPES:
+ return "bool"
+ elif self.proto_obj.type in PROTO_STR_TYPES:
+ return "str"
+ elif self.proto_obj.type in PROTO_BYTES_TYPES:
+ return "bytes"
+ elif self.proto_obj.type in PROTO_MESSAGE_TYPES:
+ # Type referencing another defined Message or a named enum
+ return get_type_reference(
+ package=self.output_file.package,
+ imports=self.output_file.imports,
+ source_type=self.proto_obj.type_name,
+ pydantic=self.output_file.pydantic_dataclasses,
+ )
+ else:
+ raise NotImplementedError(f"Unknown type {self.proto_obj.type}")
+
+ @property
+ def annotation(self) -> str:
+ py_type = self.py_type
+ if self.use_builtins:
+ py_type = f"builtins.{py_type}"
+ if self.repeated:
+ return f"List[{py_type}]"
+ if self.optional:
+ return f"Optional[{py_type}]"
+ return py_type
+
+
+@dataclass
+class OneOfFieldCompiler(FieldCompiler):
+ @property
+ def aristaproto_field_args(self) -> List[str]:
+ args = super().aristaproto_field_args
+ group = self.parent.proto_obj.oneof_decl[self.proto_obj.oneof_index].name
+ args.append(f'group="{group}"')
+ return args
+
+
+@dataclass
+class PydanticOneOfFieldCompiler(OneOfFieldCompiler):
+ @property
+ def optional(self) -> bool:
+ # Force the optional to be True. This will allow the pydantic dataclass
+ # to validate the object correctly by allowing the field to be let empty.
+ # We add a pydantic validator later to ensure exactly one field is defined.
+ return True
+
+ @property
+ def pydantic_imports(self) -> Set[str]:
+ return {"root_validator"}
+
+
+@dataclass
+class MapEntryCompiler(FieldCompiler):
+ py_k_type: Type = PLACEHOLDER
+ py_v_type: Type = PLACEHOLDER
+ proto_k_type: str = PLACEHOLDER
+ proto_v_type: str = PLACEHOLDER
+
+ def __post_init__(self) -> None:
+ """Explore nested types and set k_type and v_type if unset."""
+ map_entry = f"{self.proto_obj.name.replace('_', '').lower()}entry"
+ for nested in self.parent.proto_obj.nested_type:
+ if (
+ nested.name.replace("_", "").lower() == map_entry
+ and nested.options.map_entry
+ ):
+ # Get Python types
+ self.py_k_type = FieldCompiler(
+ source_file=self.source_file,
+ parent=self,
+ proto_obj=nested.field[0], # key
+ ).py_type
+ self.py_v_type = FieldCompiler(
+ source_file=self.source_file,
+ parent=self,
+ proto_obj=nested.field[1], # value
+ ).py_type
+
+ # Get proto types
+ self.proto_k_type = FieldDescriptorProtoType(nested.field[0].type).name
+ self.proto_v_type = FieldDescriptorProtoType(nested.field[1].type).name
+ super().__post_init__() # call FieldCompiler-> MessageCompiler __post_init__
+
+ @property
+ def aristaproto_field_args(self) -> List[str]:
+ return [f"aristaproto.{self.proto_k_type}", f"aristaproto.{self.proto_v_type}"]
+
+ @property
+ def field_type(self) -> str:
+ return "map"
+
+ @property
+ def annotation(self) -> str:
+ return f"Dict[{self.py_k_type}, {self.py_v_type}]"
+
+ @property
+ def repeated(self) -> bool:
+ return False # maps cannot be repeated
+
+
+@dataclass
+class EnumDefinitionCompiler(MessageCompiler):
+ """Representation of a proto Enum definition."""
+
+ proto_obj: EnumDescriptorProto = PLACEHOLDER
+ entries: List["EnumDefinitionCompiler.EnumEntry"] = PLACEHOLDER
+
+ @dataclass(unsafe_hash=True)
+ class EnumEntry:
+ """Representation of an Enum entry."""
+
+ name: str
+ value: int
+ comment: str
+
+ def __post_init__(self) -> None:
+ # Get entries/allowed values for this Enum
+ self.entries = [
+ self.EnumEntry(
+ name=pythonize_enum_member_name(
+ entry_proto_value.name, self.proto_obj.name
+ ),
+ value=entry_proto_value.number,
+ comment=get_comment(
+ proto_file=self.source_file, path=self.path + [2, entry_number]
+ ),
+ )
+ for entry_number, entry_proto_value in enumerate(self.proto_obj.value)
+ ]
+ super().__post_init__() # call MessageCompiler __post_init__
+
+ @property
+ def default_value_string(self) -> str:
+ """Python representation of the default value for Enums.
+
+ As per the spec, this is the first value of the Enum.
+ """
+ return str(self.entries[0].value) # ideally, should ALWAYS be int(0)!
+
+
+@dataclass
+class ServiceCompiler(ProtoContentBase):
+ parent: OutputTemplate = PLACEHOLDER
+ proto_obj: DescriptorProto = PLACEHOLDER
+ path: List[int] = PLACEHOLDER
+ methods: List["ServiceMethodCompiler"] = field(default_factory=list)
+
+ def __post_init__(self) -> None:
+ # Add service to output file
+ self.output_file.services.append(self)
+ self.output_file.typing_imports.add("Dict")
+ super().__post_init__() # check for unset fields
+
+ @property
+ def proto_name(self) -> str:
+ return self.proto_obj.name
+
+ @property
+ def py_name(self) -> str:
+ return pythonize_class_name(self.proto_name)
+
+
+@dataclass
+class ServiceMethodCompiler(ProtoContentBase):
+ parent: ServiceCompiler
+ proto_obj: MethodDescriptorProto
+ path: List[int] = PLACEHOLDER
+ comment_indent: int = 8
+
+ def __post_init__(self) -> None:
+ # Add method to service
+ self.parent.methods.append(self)
+
+ # Check for imports
+ if "Optional" in self.py_output_message_type:
+ self.output_file.typing_imports.add("Optional")
+
+ # Check for Async imports
+ if self.client_streaming:
+ self.output_file.typing_imports.add("AsyncIterable")
+ self.output_file.typing_imports.add("Iterable")
+ self.output_file.typing_imports.add("Union")
+
+ # Required by both client and server
+ if self.client_streaming or self.server_streaming:
+ self.output_file.typing_imports.add("AsyncIterator")
+
+ # add imports required for request arguments timeout, deadline and metadata
+ self.output_file.typing_imports.add("Optional")
+ self.output_file.imports_type_checking_only.add("import grpclib.server")
+ self.output_file.imports_type_checking_only.add(
+ "from aristaproto.grpc.grpclib_client import MetadataLike"
+ )
+ self.output_file.imports_type_checking_only.add(
+ "from grpclib.metadata import Deadline"
+ )
+
+ super().__post_init__() # check for unset fields
+
+ @property
+ def py_name(self) -> str:
+ """Pythonized method name."""
+ return pythonize_method_name(self.proto_obj.name)
+
+ @property
+ def proto_name(self) -> str:
+ """Original protobuf name."""
+ return self.proto_obj.name
+
+ @property
+ def route(self) -> str:
+ package_part = (
+ f"{self.output_file.package}." if self.output_file.package else ""
+ )
+ return f"/{package_part}{self.parent.proto_name}/{self.proto_name}"
+
+ @property
+ def py_input_message(self) -> Optional[MessageCompiler]:
+ """Find the input message object.
+
+ Returns
+ -------
+ Optional[MessageCompiler]
+ Method instance representing the input message.
+ If not input message could be found or there are no
+ input messages, None is returned.
+ """
+ package, name = parse_source_type_name(self.proto_obj.input_type)
+
+ # Nested types are currently flattened without dots.
+ # Todo: keep a fully quantified name in types, that is
+ # comparable with method.input_type
+ for msg in self.request.all_messages:
+ if (
+ msg.py_name == pythonize_class_name(name.replace(".", ""))
+ and msg.output_file.package == package
+ ):
+ return msg
+ return None
+
+ @property
+ def py_input_message_type(self) -> str:
+ """String representation of the Python type corresponding to the
+ input message.
+
+ Returns
+ -------
+ str
+ String representation of the Python type corresponding to the input message.
+ """
+ return get_type_reference(
+ package=self.output_file.package,
+ imports=self.output_file.imports,
+ source_type=self.proto_obj.input_type,
+ unwrap=False,
+ pydantic=self.output_file.pydantic_dataclasses,
+ ).strip('"')
+
+ @property
+ def py_input_message_param(self) -> str:
+ """Param name corresponding to py_input_message_type.
+
+ Returns
+ -------
+ str
+ Param name corresponding to py_input_message_type.
+ """
+ return pythonize_field_name(self.py_input_message_type)
+
+ @property
+ def py_output_message_type(self) -> str:
+ """String representation of the Python type corresponding to the
+ output message.
+
+ Returns
+ -------
+ str
+ String representation of the Python type corresponding to the output message.
+ """
+ return get_type_reference(
+ package=self.output_file.package,
+ imports=self.output_file.imports,
+ source_type=self.proto_obj.output_type,
+ unwrap=False,
+ pydantic=self.output_file.pydantic_dataclasses,
+ ).strip('"')
+
+ @property
+ def client_streaming(self) -> bool:
+ return self.proto_obj.client_streaming
+
+ @property
+ def server_streaming(self) -> bool:
+ return self.proto_obj.server_streaming
diff --git a/src/aristaproto/plugin/parser.py b/src/aristaproto/plugin/parser.py
new file mode 100644
index 0000000..f761af6
--- /dev/null
+++ b/src/aristaproto/plugin/parser.py
@@ -0,0 +1,221 @@
+import pathlib
+import sys
+from typing import (
+ Generator,
+ List,
+ Set,
+ Tuple,
+ Union,
+)
+
+from aristaproto.lib.google.protobuf import (
+ DescriptorProto,
+ EnumDescriptorProto,
+ FieldDescriptorProto,
+ FileDescriptorProto,
+ ServiceDescriptorProto,
+)
+from aristaproto.lib.google.protobuf.compiler import (
+ CodeGeneratorRequest,
+ CodeGeneratorResponse,
+ CodeGeneratorResponseFeature,
+ CodeGeneratorResponseFile,
+)
+
+from .compiler import outputfile_compiler
+from .models import (
+ EnumDefinitionCompiler,
+ FieldCompiler,
+ MapEntryCompiler,
+ MessageCompiler,
+ OneOfFieldCompiler,
+ OutputTemplate,
+ PluginRequestCompiler,
+ PydanticOneOfFieldCompiler,
+ ServiceCompiler,
+ ServiceMethodCompiler,
+ is_map,
+ is_oneof,
+)
+
+
+def traverse(
+ proto_file: FileDescriptorProto,
+) -> Generator[
+ Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None
+]:
+ # Todo: Keep information about nested hierarchy
+ def _traverse(
+ path: List[int],
+ items: Union[List[EnumDescriptorProto], List[DescriptorProto]],
+ prefix: str = "",
+ ) -> Generator[
+ Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None
+ ]:
+ for i, item in enumerate(items):
+ # Adjust the name since we flatten the hierarchy.
+ # Todo: don't change the name, but include full name in returned tuple
+ item.name = next_prefix = f"{prefix}_{item.name}"
+ yield item, [*path, i]
+
+ if isinstance(item, DescriptorProto):
+ # Get nested types.
+ yield from _traverse([*path, i, 4], item.enum_type, next_prefix)
+ yield from _traverse([*path, i, 3], item.nested_type, next_prefix)
+
+ yield from _traverse([5], proto_file.enum_type)
+ yield from _traverse([4], proto_file.message_type)
+
+
+def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
+ response = CodeGeneratorResponse()
+
+ plugin_options = request.parameter.split(",") if request.parameter else []
+ response.supported_features = CodeGeneratorResponseFeature.FEATURE_PROTO3_OPTIONAL
+
+ request_data = PluginRequestCompiler(plugin_request_obj=request)
+ # Gather output packages
+ for proto_file in request.proto_file:
+ output_package_name = proto_file.package
+ if output_package_name not in request_data.output_packages:
+ # Create a new output if there is no output for this package
+ request_data.output_packages[output_package_name] = OutputTemplate(
+ parent_request=request_data, package_proto_obj=proto_file
+ )
+ # Add this input file to the output corresponding to this package
+ request_data.output_packages[output_package_name].input_files.append(proto_file)
+
+ if (
+ proto_file.package == "google.protobuf"
+ and "INCLUDE_GOOGLE" not in plugin_options
+ ):
+ # If not INCLUDE_GOOGLE,
+ # skip outputting Google's well-known types
+ request_data.output_packages[output_package_name].output = False
+
+ if "pydantic_dataclasses" in plugin_options:
+ request_data.output_packages[
+ output_package_name
+ ].pydantic_dataclasses = True
+
+ # Read Messages and Enums
+ # We need to read Messages before Services in so that we can
+ # get the references to input/output messages for each service
+ for output_package_name, output_package in request_data.output_packages.items():
+ for proto_input_file in output_package.input_files:
+ for item, path in traverse(proto_input_file):
+ read_protobuf_type(
+ source_file=proto_input_file,
+ item=item,
+ path=path,
+ output_package=output_package,
+ )
+
+ # Read Services
+ for output_package_name, output_package in request_data.output_packages.items():
+ for proto_input_file in output_package.input_files:
+ for index, service in enumerate(proto_input_file.service):
+ read_protobuf_service(service, index, output_package)
+
+ # Generate output files
+ output_paths: Set[pathlib.Path] = set()
+ for output_package_name, output_package in request_data.output_packages.items():
+ if not output_package.output:
+ continue
+
+ # Add files to the response object
+ output_path = pathlib.Path(*output_package_name.split("."), "__init__.py")
+ output_paths.add(output_path)
+
+ response.file.append(
+ CodeGeneratorResponseFile(
+ name=str(output_path),
+ # Render and then format the output file
+ content=outputfile_compiler(output_file=output_package),
+ )
+ )
+
+ # Make each output directory a package with __init__ file
+ init_files = {
+ directory.joinpath("__init__.py")
+ for path in output_paths
+ for directory in path.parents
+ if not directory.joinpath("__init__.py").exists()
+ } - output_paths
+
+ for init_file in init_files:
+ response.file.append(CodeGeneratorResponseFile(name=str(init_file)))
+
+ for output_package_name in sorted(output_paths.union(init_files)):
+ print(f"Writing {output_package_name}", file=sys.stderr)
+
+ return response
+
+
+def _make_one_of_field_compiler(
+ output_package: OutputTemplate,
+ source_file: "FileDescriptorProto",
+ parent: MessageCompiler,
+ proto_obj: "FieldDescriptorProto",
+ path: List[int],
+) -> FieldCompiler:
+ pydantic = output_package.pydantic_dataclasses
+ Cls = PydanticOneOfFieldCompiler if pydantic else OneOfFieldCompiler
+ return Cls(
+ source_file=source_file,
+ parent=parent,
+ proto_obj=proto_obj,
+ path=path,
+ )
+
+
+def read_protobuf_type(
+ item: DescriptorProto,
+ path: List[int],
+ source_file: "FileDescriptorProto",
+ output_package: OutputTemplate,
+) -> None:
+ if isinstance(item, DescriptorProto):
+ if item.options.map_entry:
+ # Skip generated map entry messages since we just use dicts
+ return
+ # Process Message
+ message_data = MessageCompiler(
+ source_file=source_file, parent=output_package, proto_obj=item, path=path
+ )
+ for index, field in enumerate(item.field):
+ if is_map(field, item):
+ MapEntryCompiler(
+ source_file=source_file,
+ parent=message_data,
+ proto_obj=field,
+ path=path + [2, index],
+ )
+ elif is_oneof(field):
+ _make_one_of_field_compiler(
+ output_package, source_file, message_data, field, path + [2, index]
+ )
+ else:
+ FieldCompiler(
+ source_file=source_file,
+ parent=message_data,
+ proto_obj=field,
+ path=path + [2, index],
+ )
+ elif isinstance(item, EnumDescriptorProto):
+ # Enum
+ EnumDefinitionCompiler(
+ source_file=source_file, parent=output_package, proto_obj=item, path=path
+ )
+
+
+def read_protobuf_service(
+ service: ServiceDescriptorProto, index: int, output_package: OutputTemplate
+) -> None:
+ service_data = ServiceCompiler(
+ parent=output_package, proto_obj=service, path=[6, index]
+ )
+ for j, method in enumerate(service.method):
+ ServiceMethodCompiler(
+ parent=service_data, proto_obj=method, path=[6, index, 2, j]
+ )
diff --git a/src/aristaproto/plugin/plugin.bat b/src/aristaproto/plugin/plugin.bat
new file mode 100644
index 0000000..2a4444d
--- /dev/null
+++ b/src/aristaproto/plugin/plugin.bat
@@ -0,0 +1,2 @@
+@SET plugin_dir=%~dp0
+@python -m %plugin_dir% %* \ No newline at end of file
diff --git a/src/aristaproto/py.typed b/src/aristaproto/py.typed
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/aristaproto/py.typed
diff --git a/src/aristaproto/templates/template.py.j2 b/src/aristaproto/templates/template.py.j2
new file mode 100644
index 0000000..f2f1425
--- /dev/null
+++ b/src/aristaproto/templates/template.py.j2
@@ -0,0 +1,257 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# sources: {{ ', '.join(output_file.input_filenames) }}
+# plugin: python-aristaproto
+# This file has been @generated
+{% for i in output_file.python_module_imports|sort %}
+import {{ i }}
+{% endfor %}
+
+{% if output_file.pydantic_dataclasses %}
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+ from dataclasses import dataclass
+else:
+ from pydantic.dataclasses import dataclass
+{%- else -%}
+from dataclasses import dataclass
+{% endif %}
+
+{% if output_file.datetime_imports %}
+from datetime import {% for i in output_file.datetime_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
+
+{% endif%}
+{% if output_file.typing_imports %}
+from typing import {% for i in output_file.typing_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
+
+{% endif %}
+
+{% if output_file.pydantic_imports %}
+from pydantic import {% for i in output_file.pydantic_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %}
+
+{% endif %}
+
+import aristaproto
+{% if output_file.services %}
+from aristaproto.grpc.grpclib_server import ServiceBase
+import grpclib
+{% endif %}
+
+{% for i in output_file.imports|sort %}
+{{ i }}
+{% endfor %}
+
+{% if output_file.imports_type_checking_only %}
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+{% for i in output_file.imports_type_checking_only|sort %} {{ i }}
+{% endfor %}
+{% endif %}
+
+{% if output_file.enums %}{% for enum in output_file.enums %}
+class {{ enum.py_name }}(aristaproto.Enum):
+ {% if enum.comment %}
+{{ enum.comment }}
+
+ {% endif %}
+ {% for entry in enum.entries %}
+ {{ entry.name }} = {{ entry.value }}
+ {% if entry.comment %}
+{{ entry.comment }}
+
+ {% endif %}
+ {% endfor %}
+
+
+{% endfor %}
+{% endif %}
+{% for message in output_file.messages %}
+@dataclass(eq=False, repr=False)
+class {{ message.py_name }}(aristaproto.Message):
+ {% if message.comment %}
+{{ message.comment }}
+
+ {% endif %}
+ {% for field in message.fields %}
+ {{ field.get_field_string() }}
+ {% if field.comment %}
+{{ field.comment }}
+
+ {% endif %}
+ {% endfor %}
+ {% if not message.fields %}
+ pass
+ {% endif %}
+
+ {% if message.deprecated or message.has_deprecated_fields %}
+ def __post_init__(self) -> None:
+ {% if message.deprecated %}
+ warnings.warn("{{ message.py_name }} is deprecated", DeprecationWarning)
+ {% endif %}
+ super().__post_init__()
+ {% for field in message.deprecated_fields %}
+ if self.is_set("{{ field }}"):
+ warnings.warn("{{ message.py_name }}.{{ field }} is deprecated", DeprecationWarning)
+ {% endfor %}
+ {% endif %}
+
+ {% if output_file.pydantic_dataclasses and message.has_oneof_fields %}
+ @root_validator()
+ def check_oneof(cls, values):
+ return cls._validate_field_groups(values)
+ {% endif %}
+
+{% endfor %}
+{% for service in output_file.services %}
+class {{ service.py_name }}Stub(aristaproto.ServiceStub):
+ {% if service.comment %}
+{{ service.comment }}
+
+ {% elif not service.methods %}
+ pass
+ {% endif %}
+ {% for method in service.methods %}
+ async def {{ method.py_name }}(self
+ {%- if not method.client_streaming -%}
+ {%- if method.py_input_message -%}, {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}"{%- endif -%}
+ {%- else -%}
+ {# Client streaming: need a request iterator instead #}
+ , {{ method.py_input_message_param }}_iterator: Union[AsyncIterable["{{ method.py_input_message_type }}"], Iterable["{{ method.py_input_message_type }}"]]
+ {%- endif -%}
+ ,
+ *
+ , timeout: Optional[float] = None
+ , deadline: Optional["Deadline"] = None
+ , metadata: Optional["MetadataLike"] = None
+ ) -> {% if method.server_streaming %}AsyncIterator["{{ method.py_output_message_type }}"]{% else %}"{{ method.py_output_message_type }}"{% endif %}:
+ {% if method.comment %}
+{{ method.comment }}
+
+ {% endif %}
+ {% if method.server_streaming %}
+ {% if method.client_streaming %}
+ async for response in self._stream_stream(
+ "{{ method.route }}",
+ {{ method.py_input_message_param }}_iterator,
+ {{ method.py_input_message_type }},
+ {{ method.py_output_message_type.strip('"') }},
+ timeout=timeout,
+ deadline=deadline,
+ metadata=metadata,
+ ):
+ yield response
+ {% else %}{# i.e. not client streaming #}
+ async for response in self._unary_stream(
+ "{{ method.route }}",
+ {{ method.py_input_message_param }},
+ {{ method.py_output_message_type.strip('"') }},
+ timeout=timeout,
+ deadline=deadline,
+ metadata=metadata,
+ ):
+ yield response
+
+ {% endif %}{# if client streaming #}
+ {% else %}{# i.e. not server streaming #}
+ {% if method.client_streaming %}
+ return await self._stream_unary(
+ "{{ method.route }}",
+ {{ method.py_input_message_param }}_iterator,
+ {{ method.py_input_message_type }},
+ {{ method.py_output_message_type.strip('"') }},
+ timeout=timeout,
+ deadline=deadline,
+ metadata=metadata,
+ )
+ {% else %}{# i.e. not client streaming #}
+ return await self._unary_unary(
+ "{{ method.route }}",
+ {{ method.py_input_message_param }},
+ {{ method.py_output_message_type.strip('"') }},
+ timeout=timeout,
+ deadline=deadline,
+ metadata=metadata,
+ )
+ {% endif %}{# client streaming #}
+ {% endif %}
+
+ {% endfor %}
+{% endfor %}
+
+{% for service in output_file.services %}
+class {{ service.py_name }}Base(ServiceBase):
+ {% if service.comment %}
+{{ service.comment }}
+
+ {% endif %}
+
+ {% for method in service.methods %}
+ async def {{ method.py_name }}(self
+ {%- if not method.client_streaming -%}
+ {%- if method.py_input_message -%}, {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}"{%- endif -%}
+ {%- else -%}
+ {# Client streaming: need a request iterator instead #}
+ , {{ method.py_input_message_param }}_iterator: AsyncIterator["{{ method.py_input_message_type }}"]
+ {%- endif -%}
+ ) -> {% if method.server_streaming %}AsyncIterator["{{ method.py_output_message_type }}"]{% else %}"{{ method.py_output_message_type }}"{% endif %}:
+ {% if method.comment %}
+{{ method.comment }}
+
+ {% endif %}
+ raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
+ {% if method.server_streaming %}
+ {# Commented out to avoid unreachable code. #}
+ {# yield {{ method.py_output_message_type }}() #}
+ {% endif %}
+
+ {% endfor %}
+
+ {% for method in service.methods %}
+ async def __rpc_{{ method.py_name }}(self, stream: "grpclib.server.Stream[{{ method.py_input_message_type }}, {{ method.py_output_message_type }}]") -> None:
+ {% if not method.client_streaming %}
+ request = await stream.recv_message()
+ {% else %}
+ request = stream.__aiter__()
+ {% endif %}
+ {% if not method.server_streaming %}
+ response = await self.{{ method.py_name }}(request)
+ await stream.send_message(response)
+ {% else %}
+ await self._call_rpc_handler_server_stream(
+ self.{{ method.py_name }},
+ stream,
+ request,
+ )
+ {% endif %}
+
+ {% endfor %}
+
+ def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
+ return {
+ {% for method in service.methods %}
+ "{{ method.route }}": grpclib.const.Handler(
+ self.__rpc_{{ method.py_name }},
+ {% if not method.client_streaming and not method.server_streaming %}
+ grpclib.const.Cardinality.UNARY_UNARY,
+ {% elif not method.client_streaming and method.server_streaming %}
+ grpclib.const.Cardinality.UNARY_STREAM,
+ {% elif method.client_streaming and not method.server_streaming %}
+ grpclib.const.Cardinality.STREAM_UNARY,
+ {% else %}
+ grpclib.const.Cardinality.STREAM_STREAM,
+ {% endif %}
+ {{ method.py_input_message_type }},
+ {{ method.py_output_message_type }},
+ ),
+ {% endfor %}
+ }
+
+{% endfor %}
+
+{% if output_file.pydantic_dataclasses %}
+{% for message in output_file.messages %}
+{% if message.has_message_field %}
+{{ message.py_name }}.__pydantic_model__.update_forward_refs() # type: ignore
+{% endif %}
+{% endfor %}
+{% endif %}
diff --git a/src/aristaproto/utils.py b/src/aristaproto/utils.py
new file mode 100644
index 0000000..b977fc7
--- /dev/null
+++ b/src/aristaproto/utils.py
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+from typing import (
+ Any,
+ Callable,
+ Generic,
+ Optional,
+ Type,
+ TypeVar,
+)
+
+from typing_extensions import (
+ Concatenate,
+ ParamSpec,
+ Self,
+)
+
+
+SelfT = TypeVar("SelfT")
+P = ParamSpec("P")
+HybridT = TypeVar("HybridT", covariant=True)
+
+
+class hybridmethod(Generic[SelfT, P, HybridT]):
+ def __init__(
+ self,
+ func: Callable[
+ Concatenate[type[SelfT], P], HybridT
+ ], # Must be the classmethod version
+ ):
+ self.cls_func = func
+ self.__doc__ = func.__doc__
+
+ def instancemethod(self, func: Callable[Concatenate[SelfT, P], HybridT]) -> Self:
+ self.instance_func = func
+ return self
+
+ def __get__(
+ self, instance: Optional[SelfT], owner: Type[SelfT]
+ ) -> Callable[P, HybridT]:
+ if instance is None or self.instance_func is None:
+ # either bound to the class, or no instance method available
+ return self.cls_func.__get__(owner, None)
+ return self.instance_func.__get__(instance, owner)
+
+
+T_co = TypeVar("T_co")
+TT_co = TypeVar("TT_co", bound="type[Any]")
+
+
+class classproperty(Generic[TT_co, T_co]):
+ def __init__(self, func: Callable[[TT_co], T_co]):
+ self.__func__ = func
+
+ def __get__(self, instance: Any, type: TT_co) -> T_co:
+ return self.__func__(type)