feat: initial stub generator package
commit
65d135a6d1
@ -0,0 +1,49 @@
|
|||||||
|
"""Types definitions that are common in capnproto schemas."""
|
||||||
|
from types import ModuleType
|
||||||
|
from typing import Dict, Tuple
|
||||||
|
|
||||||
|
CAPNP_TYPE_TO_PYTHON = {
|
||||||
|
"void": "None",
|
||||||
|
"bool": "bool",
|
||||||
|
"int8": "int",
|
||||||
|
"int16": "int",
|
||||||
|
"int32": "int",
|
||||||
|
"int64": "int",
|
||||||
|
"uint8": "int",
|
||||||
|
"uint16": "int",
|
||||||
|
"uint32": "int",
|
||||||
|
"uint64": "int",
|
||||||
|
"float32": "float",
|
||||||
|
"float64": "float",
|
||||||
|
"text": "str",
|
||||||
|
"data": "bytes",
|
||||||
|
}
|
||||||
|
|
||||||
|
class CapnpFieldType:
|
||||||
|
"""Types of capnproto fields."""
|
||||||
|
|
||||||
|
GROUP = "group"
|
||||||
|
SLOT = "slot"
|
||||||
|
|
||||||
|
|
||||||
|
class CapnpSlotType:
|
||||||
|
"""Types of capnproto slots.
|
||||||
|
|
||||||
|
If CapnpFieldType is 'slot', this defines the type of that slot.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ANY_POINTER = "anyPointer"
|
||||||
|
STRUCT = "struct"
|
||||||
|
ENUM = "enum"
|
||||||
|
LIST = "list"
|
||||||
|
|
||||||
|
|
||||||
|
class CapnpElementType:
|
||||||
|
"""Types of capnproto elements."""
|
||||||
|
|
||||||
|
ENUM = "enum"
|
||||||
|
STRUCT = "struct"
|
||||||
|
CONST = "const"
|
||||||
|
|
||||||
|
|
||||||
|
ModuleRegistryType = Dict[int, Tuple[str, ModuleType]]
|
@ -0,0 +1,112 @@
|
|||||||
|
"""Command-line interface for generating type hints for *.capnp schemas.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- This generator requires pycapnp >= 1.0.0.
|
||||||
|
- Capnp interfaces (RPC) are not yet supported.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
from typing import Sequence
|
||||||
|
|
||||||
|
from capnp_stub_generator.generator import run
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_output_argument(parser: argparse.ArgumentParser):
|
||||||
|
"""Add an output argument to a parser.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parser (argparse.ArgumentParser): The parser to add the argument to.
|
||||||
|
"""
|
||||||
|
parser.add_argument(
|
||||||
|
"-o",
|
||||||
|
"--output-dir",
|
||||||
|
dest="output_dir",
|
||||||
|
default=None,
|
||||||
|
help="override directory where to write the .pyi file",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_recursive_argument(parser: argparse.ArgumentParser):
|
||||||
|
"""Add a recursive argument to a parser.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parser (argparse.ArgumentParser): The parser to add the argument to.
|
||||||
|
"""
|
||||||
|
parser.add_argument(
|
||||||
|
"-r",
|
||||||
|
"--recursive",
|
||||||
|
dest="recursive",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="recursively search for *.capnp files with a given glob expression.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_parser() -> argparse.ArgumentParser:
|
||||||
|
"""Setup for the parser.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
argparse.ArgumentParser: The parser after setup.
|
||||||
|
"""
|
||||||
|
parser = argparse.ArgumentParser(description="Generate type hints for capnp schema files.")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-p",
|
||||||
|
"--paths",
|
||||||
|
type=str,
|
||||||
|
nargs="+",
|
||||||
|
default=["**/*.capnp"],
|
||||||
|
help="path or glob expressions that match *.capnp files.",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-e",
|
||||||
|
"--excludes",
|
||||||
|
type=str,
|
||||||
|
nargs="+",
|
||||||
|
default=["**/c-capnproto/**/*.capnp"],
|
||||||
|
help="path or glob expressions to exclude from matches.",
|
||||||
|
)
|
||||||
|
|
||||||
|
_add_output_argument(parser)
|
||||||
|
_add_recursive_argument(parser)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv: Sequence[str] | None = None) -> int:
|
||||||
|
"""Entry point of the stub generator.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
argv (Sequence[str] | None, optional): Run arguments. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: Error code.
|
||||||
|
"""
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
root_directory = os.getcwd()
|
||||||
|
logging.info("Working from root directory: %s", root_directory)
|
||||||
|
|
||||||
|
parser = setup_parser()
|
||||||
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
|
run(args, root_directory)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main(
|
||||||
|
[
|
||||||
|
"-p",
|
||||||
|
"python/libraries/mars/mars/interfaces/**/*.capnp",
|
||||||
|
"-r",
|
||||||
|
]
|
||||||
|
)
|
@ -0,0 +1,85 @@
|
|||||||
|
"""Top-level module for stub generation."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import glob
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
from types import ModuleType
|
||||||
|
from typing import Set
|
||||||
|
|
||||||
|
import black
|
||||||
|
import capnp
|
||||||
|
import isort
|
||||||
|
from capnp_stub_generator.capnp_types import ModuleRegistryType
|
||||||
|
from capnp_stub_generator.helper import replace_capnp_suffix
|
||||||
|
from capnp_stub_generator.writer import Writer
|
||||||
|
|
||||||
|
capnp.remove_import_hook()
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
PYI_SUFFIX = ".pyi"
|
||||||
|
PY_SUFFIX = ".py"
|
||||||
|
|
||||||
|
|
||||||
|
def generate_stubs(module: ModuleType, module_registry: ModuleRegistryType, output_file_path: str):
|
||||||
|
"""Entry-point for generating *.pyi stubs from a module definition.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
module (ModuleType): The module to generate stubs for.
|
||||||
|
module_registry (ModuleRegistryType): A registry of all detected modules.
|
||||||
|
output_file_path (str): The name of the output stub files, without file extension.
|
||||||
|
"""
|
||||||
|
writer = Writer(module, module_registry)
|
||||||
|
writer.generate_recursive()
|
||||||
|
|
||||||
|
for outputs, suffix in zip((writer.dumps_pyi(), writer.dumps_py()), (PYI_SUFFIX, PY_SUFFIX)):
|
||||||
|
sorted_imports = isort.code(outputs, config=isort.Config(profile="black"))
|
||||||
|
formatted_output = black.format_str(sorted_imports, mode=black.Mode(is_pyi=True, line_length=79))
|
||||||
|
|
||||||
|
with open(output_file_path + suffix, "w", encoding="utf8") as output_file:
|
||||||
|
output_file.write(formatted_output)
|
||||||
|
|
||||||
|
logger.info("Wrote stubs to '%s(%s/%s)'.", output_file_path, PYI_SUFFIX, PY_SUFFIX)
|
||||||
|
|
||||||
|
|
||||||
|
def run(args: argparse.Namespace, root_directory: str):
|
||||||
|
"""Run the stub generator on a set of paths that point to *.capnp schemas.
|
||||||
|
|
||||||
|
Uses `generate_stubs` on each input file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
args (argparse.Namespace): The arguments that were passed when calling the stub generator.
|
||||||
|
root_directory (str): The directory, from which the generator is executed.
|
||||||
|
"""
|
||||||
|
paths: str = args.paths
|
||||||
|
excludes: str = args.excludes
|
||||||
|
|
||||||
|
excluded_paths: Set[str] = set()
|
||||||
|
for exclude in excludes:
|
||||||
|
exclude_directory = os.path.join(root_directory, exclude)
|
||||||
|
excluded_paths = excluded_paths.union(glob.glob(exclude_directory, recursive=args.recursive))
|
||||||
|
|
||||||
|
search_paths: Set[str] = set()
|
||||||
|
for path in paths:
|
||||||
|
search_directory = os.path.join(root_directory, path)
|
||||||
|
search_paths = search_paths.union(glob.glob(search_directory, recursive=args.recursive))
|
||||||
|
|
||||||
|
# The `valid_paths` contain the automatically detected search paths, except for specifically excluded paths.
|
||||||
|
valid_paths = search_paths - excluded_paths
|
||||||
|
|
||||||
|
parser = capnp.SchemaParser()
|
||||||
|
module_registry: ModuleRegistryType = {}
|
||||||
|
|
||||||
|
for path in valid_paths:
|
||||||
|
module = parser.load(path)
|
||||||
|
module_registry[module.schema.node.id] = (path, module)
|
||||||
|
|
||||||
|
for path, module in module_registry.values():
|
||||||
|
output_directory = os.path.dirname(path)
|
||||||
|
output_file_name = replace_capnp_suffix(os.path.basename(path))
|
||||||
|
|
||||||
|
generate_stubs(module, module_registry, os.path.join(output_directory, output_file_name))
|
@ -0,0 +1,19 @@
|
|||||||
|
"""Helper functionality that is used in other modules of this package."""
|
||||||
|
|
||||||
|
|
||||||
|
def replace_capnp_suffix(original: str) -> str:
|
||||||
|
"""If found, replaces the .capnp suffix in a string with _capnp.
|
||||||
|
|
||||||
|
For example, `some_module.capnp` becomes `some_module_capnp`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
original (str): The string to replace the suffix in.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The string with the replaced suffix.
|
||||||
|
"""
|
||||||
|
if original.endswith(".capnp"):
|
||||||
|
return original.replace(".capnp", "_capnp")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return original
|
@ -0,0 +1,810 @@
|
|||||||
|
"""Generate type hints for *.capnp schemas.
|
||||||
|
|
||||||
|
Note: This generator requires pycapnp >= 1.0.0.
|
||||||
|
|
||||||
|
Note: capnp interfaces (RPC) are not yet supported.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import dataclasses
|
||||||
|
import keyword
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
import pathlib
|
||||||
|
from types import ModuleType
|
||||||
|
from typing import Any, Literal, Set
|
||||||
|
|
||||||
|
import capnp
|
||||||
|
from capnp_stub_generator.capnp_types import (
|
||||||
|
CAPNP_TYPE_TO_PYTHON,
|
||||||
|
CapnpElementType,
|
||||||
|
CapnpFieldType,
|
||||||
|
CapnpSlotType,
|
||||||
|
ModuleRegistryType,
|
||||||
|
)
|
||||||
|
from capnp_stub_generator.helper import replace_capnp_suffix
|
||||||
|
|
||||||
|
capnp.remove_import_hook()
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
INDENT_SPACES = 4
|
||||||
|
|
||||||
|
|
||||||
|
class NoParentError(Exception):
|
||||||
|
"""Raised, when the parent of a scope is not available."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class Scope:
|
||||||
|
"""A scope within the output .pyi file.
|
||||||
|
|
||||||
|
Scopes contain text and are indented by a certain amount. They often have parents, within which they are located.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): The name of the scope. Use an empty name for the root scope ("").
|
||||||
|
id (int): A numerical identifier of the scope.
|
||||||
|
parent (Scope | None): The direct parent scope of this scope, if there is any.
|
||||||
|
return scope (Scope | None): The scope to which to return, when closing this one.
|
||||||
|
lines (list[str]): The list of text lines in this scope.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
id: int
|
||||||
|
parent: Scope | None
|
||||||
|
return_scope: Scope | None
|
||||||
|
lines: list[str] = dataclasses.field(default_factory=list)
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
"""Assures that, if this is the root scope, its name is empty."""
|
||||||
|
assert (self.is_root) == (self.name == "")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parents(self) -> list[Scope]:
|
||||||
|
"""A list of all parent scopes of this scope, starting from the first parent.
|
||||||
|
|
||||||
|
If the returned list is empty, this scope has no parents. The first parent in the list has no further
|
||||||
|
parents, it is the root scope.
|
||||||
|
"""
|
||||||
|
parents: list[Scope] = []
|
||||||
|
scope: Scope | None = self.parent
|
||||||
|
|
||||||
|
while scope is not None:
|
||||||
|
parents.append(scope)
|
||||||
|
scope = scope.parent
|
||||||
|
|
||||||
|
parents.reverse()
|
||||||
|
|
||||||
|
return parents
|
||||||
|
|
||||||
|
@property
|
||||||
|
def trace(self) -> list[Scope]:
|
||||||
|
"""A list of all scopes that lead to this scope, starting from the first parent.
|
||||||
|
|
||||||
|
The first parent has no further parents.
|
||||||
|
"""
|
||||||
|
return self.parents + [self]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def root(self) -> Scope:
|
||||||
|
"""Get the root scope that has no further parents."""
|
||||||
|
if not self.parents:
|
||||||
|
return self
|
||||||
|
|
||||||
|
else:
|
||||||
|
return self.parents[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_root(self) -> bool:
|
||||||
|
"""Determine, whether this is the root scope."""
|
||||||
|
return self.root == self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def indent_spaces(self) -> int:
|
||||||
|
"""The number of spaces by which this scope is indented."""
|
||||||
|
return len(self.parents) * INDENT_SPACES
|
||||||
|
|
||||||
|
def add_line(self, line: str = ""):
|
||||||
|
"""Add a line to this scope, taking into account the current indent spaces.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
line (str): The line to add. Optional, defaults to "".
|
||||||
|
"""
|
||||||
|
if not line:
|
||||||
|
self.lines.append("")
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.lines.append(" " * self.indent_spaces + line)
|
||||||
|
|
||||||
|
def trace_as_str(self, delimiter: Literal[".", "_"] = ".") -> str:
|
||||||
|
"""A string representation of this scope's relative trace.
|
||||||
|
|
||||||
|
Follow the trace of the scope, and connect parent scopes with a delimiter.
|
||||||
|
The root scope is not included in this trace string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
delimiter (Literal[".", "_"]): The delimiter to join the scope names with.
|
||||||
|
"""
|
||||||
|
return delimiter.join((scope.name for scope in self.trace if not scope.is_root))
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""A string representation of this scope.
|
||||||
|
|
||||||
|
Follow the path of scopes, and connect parent scopes with '.'.
|
||||||
|
"""
|
||||||
|
return self.trace_as_str(".")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class CapnpType:
|
||||||
|
"""Represents a type that is extracted from a .capnp schema.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any):
|
||||||
|
name (str):
|
||||||
|
scope (Scope):
|
||||||
|
generic_params (list[str]):
|
||||||
|
"""
|
||||||
|
|
||||||
|
schema: Any
|
||||||
|
name: str
|
||||||
|
scope: Scope
|
||||||
|
generic_params: list[str] = dataclasses.field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class Writer:
|
||||||
|
"""A class that handles writing the stub file, based on a provided module definition."""
|
||||||
|
|
||||||
|
VALID_TYPING_IMPORTS = Literal["Generic", "TypeVar", "List", "Literal", "Union", "overload"]
|
||||||
|
|
||||||
|
def __init__(self, module: ModuleType, module_registry: ModuleRegistryType):
|
||||||
|
"""Initialize the stub writer with a module definition.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
module (ModuleType): The module definition to parse and write a stub for.
|
||||||
|
module_registry (ModuleRegistryType): The module registry, for finding dependencies between loaded modules.
|
||||||
|
"""
|
||||||
|
self.scope = Scope(name="", id=module.schema.node.id, parent=None, return_scope=None)
|
||||||
|
self.scopes_by_id: dict[int, Scope] = {self.scope.id: self.scope}
|
||||||
|
|
||||||
|
self._module = module
|
||||||
|
self._module_registry = module_registry
|
||||||
|
|
||||||
|
if self._module.__file__:
|
||||||
|
self._module_path = pathlib.Path(self._module.__file__)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError("The module has no file path attached to it.")
|
||||||
|
|
||||||
|
self._imports: Set[str] = set()
|
||||||
|
self._add_import("from __future__ import annotations")
|
||||||
|
|
||||||
|
self._typing_imports: Set[Writer.VALID_TYPING_IMPORTS] = set()
|
||||||
|
|
||||||
|
self.type_vars: set[str] = set()
|
||||||
|
self.type_map: dict[int, CapnpType] = {}
|
||||||
|
|
||||||
|
self.docstring = f'"""This is an automatically generated stub for `{self._module_path.name}`."""'
|
||||||
|
|
||||||
|
def _add_typing_import(self, module_name: Writer.VALID_TYPING_IMPORTS):
|
||||||
|
"""Add an import for a module from the 'typing' package.
|
||||||
|
|
||||||
|
E.g., when using
|
||||||
|
add_typing_import("List")
|
||||||
|
add_typing_import("Union")
|
||||||
|
|
||||||
|
this generates an import line `from typing import List, Union`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
module_name (Writer.VALID_TYPING_IMPORTS): The module to import from `typing`.
|
||||||
|
"""
|
||||||
|
self._typing_imports.add(module_name)
|
||||||
|
|
||||||
|
def _add_import(self, import_line: str):
|
||||||
|
"""Add a full import line.
|
||||||
|
|
||||||
|
E.g. 'import numpy as np'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
import_line (str): The import line to add.
|
||||||
|
"""
|
||||||
|
self._imports.add(import_line)
|
||||||
|
|
||||||
|
def _add_enum_import(self):
|
||||||
|
"""Adds an import for the `Enum` class."""
|
||||||
|
self._add_import("from enum import Enum")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base_module_name(self) -> str:
|
||||||
|
"""The base name of this writer's target module."""
|
||||||
|
return self._module.schema.node.displayName
|
||||||
|
|
||||||
|
@property
|
||||||
|
def imports(self) -> list[str]:
|
||||||
|
"""Get the full list of import strings that were added to the writer, including typing imports.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: The list of imports that were previously added.
|
||||||
|
"""
|
||||||
|
import_lines: list[str] = []
|
||||||
|
|
||||||
|
for imp in self._imports:
|
||||||
|
import_lines.append(imp)
|
||||||
|
|
||||||
|
if self._typing_imports:
|
||||||
|
import_lines.append("from typing import " + ", ".join(sorted(self._typing_imports)))
|
||||||
|
|
||||||
|
return import_lines
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_display_name(schema: Any) -> str:
|
||||||
|
"""Extract the display name from the schema.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any): The schema to get the display name from.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The display name of the schema.
|
||||||
|
"""
|
||||||
|
return schema.node.displayName[schema.node.displayNamePrefixLength :]
|
||||||
|
|
||||||
|
def gen_const(self, schema: Any) -> None:
|
||||||
|
"""Generate a `const` object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any): The schema to generate the `const` object out of.
|
||||||
|
"""
|
||||||
|
assert schema.node.which() == CapnpElementType.CONST
|
||||||
|
|
||||||
|
name = self.get_display_name(schema)
|
||||||
|
python_type = CAPNP_TYPE_TO_PYTHON[schema.node.const.type.which()]
|
||||||
|
self.scope.add_line(f"{name}: {python_type}")
|
||||||
|
|
||||||
|
def gen_enum(self, schema: Any) -> CapnpType | None:
|
||||||
|
"""Generate an `enum` object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any): The schema to generate the `enum` object out of.
|
||||||
|
"""
|
||||||
|
assert schema.node.which() == CapnpElementType.ENUM
|
||||||
|
|
||||||
|
imported = self.register_import(schema)
|
||||||
|
|
||||||
|
if imported is not None:
|
||||||
|
return imported
|
||||||
|
|
||||||
|
name = self.get_display_name(schema)
|
||||||
|
self._add_enum_import()
|
||||||
|
|
||||||
|
self.new_scope(name, schema.node, f"class {name}(str, Enum):")
|
||||||
|
self.register_type(schema.node.id, schema, name)
|
||||||
|
|
||||||
|
for enumerant in schema.node.enum.enumerants:
|
||||||
|
value = enumerant.name
|
||||||
|
name = enumerant.name
|
||||||
|
|
||||||
|
if enumerant.name in keyword.kwlist:
|
||||||
|
# Avoid naming collisions with Python keywords.
|
||||||
|
name += "_"
|
||||||
|
|
||||||
|
self.scope.add_line(f'{name}: str = "{value}"')
|
||||||
|
|
||||||
|
self.return_from_scope()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def gen_generic(self, schema: Any) -> list[str]:
|
||||||
|
"""Generate a `generic` type variable.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any): The schema to generate the `generic` object out of.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: The list of registered generic type variables.
|
||||||
|
"""
|
||||||
|
self._add_typing_import("TypeVar")
|
||||||
|
self._add_typing_import("Generic")
|
||||||
|
|
||||||
|
generic_params: list[str] = [param.name for param in schema.node.parameters]
|
||||||
|
referenced_params: list[str] = []
|
||||||
|
|
||||||
|
for field, _ in zip(schema.node.struct.fields, schema.as_struct().fields_list):
|
||||||
|
if field.slot.type.which() == "anyPointer" and field.slot.type.anyPointer.which() == "parameter":
|
||||||
|
param = field.slot.type.anyPointer.parameter
|
||||||
|
|
||||||
|
t = self.get_type_by_id(param.scopeId)
|
||||||
|
|
||||||
|
if t is not None:
|
||||||
|
param_source = t.schema
|
||||||
|
source_params: list[str] = [param.name for param in param_source.node.parameters]
|
||||||
|
referenced_params.append(source_params[param.parameterIndex])
|
||||||
|
|
||||||
|
return [self.register_type_var(param) for param in generic_params + referenced_params]
|
||||||
|
|
||||||
|
def gen_slot(
|
||||||
|
self,
|
||||||
|
schema: Any,
|
||||||
|
field: Any,
|
||||||
|
raw_field: Any,
|
||||||
|
registered_type: CapnpType,
|
||||||
|
contructor_kwargs: list[str],
|
||||||
|
init_choices: list[tuple[str, str]],
|
||||||
|
) -> None:
|
||||||
|
"""Generate a slot of a type that is yet to be determined.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any): The schema to extract the slot from.
|
||||||
|
field (Any): FIXME
|
||||||
|
raw_field (Any): FIXME
|
||||||
|
registered_type (Type): FIXME
|
||||||
|
contructor_kwargs (list[str]): FIXME
|
||||||
|
init_choices (list[tuple[str, str]]): FIXME
|
||||||
|
"""
|
||||||
|
|
||||||
|
def gen_list_slot():
|
||||||
|
"""Generate a slot, which contains a `list`."""
|
||||||
|
list_slot_type: CapnpElementType = field.slot.type.list.elementType.which()
|
||||||
|
|
||||||
|
if list_slot_type == CapnpElementType.STRUCT:
|
||||||
|
if not self.is_type_id_known(field.slot.type.list.elementType.struct.typeId):
|
||||||
|
self.generate_nested(raw_field.schema.elementType)
|
||||||
|
|
||||||
|
elif list_slot_type == CapnpElementType.ENUM:
|
||||||
|
if not self.is_type_id_known(field.slot.type.list.elementType.enum.typeId):
|
||||||
|
self.generate_nested(raw_field.schema.elementType)
|
||||||
|
|
||||||
|
type_name = self.get_type_name(field.slot.type.list.elementType)
|
||||||
|
field_py_code = f"{field.name}: List[{type_name}]"
|
||||||
|
self.scope.add_line(field_py_code)
|
||||||
|
contructor_kwargs.append(field_py_code)
|
||||||
|
self._add_typing_import("List")
|
||||||
|
|
||||||
|
def gen_python_type_slot():
|
||||||
|
"""Generate a slot, which contains a regular Python type."""
|
||||||
|
python_type_name: str = CAPNP_TYPE_TO_PYTHON[field_slot_type]
|
||||||
|
|
||||||
|
field_py_code = f"{field.name}: {python_type_name}"
|
||||||
|
self.scope.add_line(field_py_code)
|
||||||
|
contructor_kwargs.append(field_py_code)
|
||||||
|
|
||||||
|
def gen_enum_slot():
|
||||||
|
"""Generate a slot, which contains an `enum`."""
|
||||||
|
if not self.is_type_id_known(field.slot.type.enum.typeId):
|
||||||
|
try:
|
||||||
|
self.generate_nested(raw_field.schema)
|
||||||
|
|
||||||
|
except NoParentError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
type_name = self.get_type_name(field.slot.type)
|
||||||
|
field_py_code = f"{field.name}: {type_name}"
|
||||||
|
self.scope.add_line(field_py_code)
|
||||||
|
contructor_kwargs.append(field_py_code)
|
||||||
|
|
||||||
|
def gen_struct_slot():
|
||||||
|
"""Generate a slot, which contains a `struct`."""
|
||||||
|
elem_type = raw_field.schema
|
||||||
|
|
||||||
|
if not self.is_type_id_known(elem_type.node.id):
|
||||||
|
self.gen_struct(elem_type)
|
||||||
|
|
||||||
|
type_name = self.get_type_name(field.slot.type)
|
||||||
|
field_py_code = f"{field.name}: {type_name}"
|
||||||
|
self.scope.add_line(field_py_code)
|
||||||
|
contructor_kwargs.append(field_py_code)
|
||||||
|
init_choices.append((field.name, type_name))
|
||||||
|
|
||||||
|
def gen_any_pointer_slot():
|
||||||
|
"""Generate a slot, which contains an `any_pointer` object."""
|
||||||
|
param = field.slot.type.anyPointer.parameter
|
||||||
|
type_name = registered_type.generic_params[param.parameterIndex]
|
||||||
|
field_py_code = f"{field.name}: {type_name}"
|
||||||
|
self.scope.add_line(field_py_code)
|
||||||
|
contructor_kwargs.append(field_py_code)
|
||||||
|
|
||||||
|
field_slot_type = field.slot.type.which()
|
||||||
|
|
||||||
|
if field_slot_type == CapnpSlotType.LIST:
|
||||||
|
gen_list_slot()
|
||||||
|
|
||||||
|
elif field_slot_type in CAPNP_TYPE_TO_PYTHON:
|
||||||
|
gen_python_type_slot()
|
||||||
|
|
||||||
|
elif field_slot_type == CapnpSlotType.ENUM:
|
||||||
|
gen_enum_slot()
|
||||||
|
|
||||||
|
elif field_slot_type == CapnpSlotType.STRUCT:
|
||||||
|
gen_struct_slot()
|
||||||
|
|
||||||
|
elif field_slot_type == CapnpSlotType.ANY_POINTER:
|
||||||
|
gen_any_pointer_slot()
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AssertionError(f"{schema.node.displayName}: {field.name}: " f"{field_slot_type}")
|
||||||
|
|
||||||
|
def gen_struct(self, schema: Any, type_name: str = "") -> CapnpType:
|
||||||
|
"""Generate a `struct` object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any): The schema to generate the `struct` object out of.
|
||||||
|
type_name (str, optional): A type name to override the display name of the struct. Defaults to "".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Type: The `struct`-type module that was generated.
|
||||||
|
"""
|
||||||
|
assert schema.node.which() == CapnpElementType.STRUCT
|
||||||
|
|
||||||
|
imported = self.register_import(schema)
|
||||||
|
|
||||||
|
if imported is not None:
|
||||||
|
return imported
|
||||||
|
|
||||||
|
if not type_name:
|
||||||
|
type_name = self.get_display_name(schema)
|
||||||
|
|
||||||
|
registered_params: list[str] = []
|
||||||
|
if schema.node.isGeneric:
|
||||||
|
registered_params = self.gen_generic()
|
||||||
|
|
||||||
|
if registered_params:
|
||||||
|
scope_decl_line = f"class {type_name}(Generic[{', '.join(registered_params)}]):"
|
||||||
|
|
||||||
|
else:
|
||||||
|
scope_decl_line = f"class {type_name}:"
|
||||||
|
|
||||||
|
self.new_scope(type_name, schema.node, scope_decl_line)
|
||||||
|
|
||||||
|
registered_type: CapnpType = self.register_type(schema.node.id, schema, name=type_name)
|
||||||
|
registered_type.generic_params = registered_params
|
||||||
|
type_name = registered_type.name
|
||||||
|
definition_has_body = False
|
||||||
|
|
||||||
|
init_choices: list[tuple[str, str]] = []
|
||||||
|
contructor_kwargs: list[str] = []
|
||||||
|
|
||||||
|
for field, raw_field in zip(schema.node.struct.fields, schema.as_struct().fields_list):
|
||||||
|
field_type = field.which()
|
||||||
|
|
||||||
|
if field_type == CapnpFieldType.SLOT:
|
||||||
|
definition_has_body = True
|
||||||
|
self.gen_slot(schema, field, raw_field, registered_type, contructor_kwargs, init_choices)
|
||||||
|
|
||||||
|
elif field_type == CapnpFieldType.GROUP:
|
||||||
|
group_name = field.name[0].upper() + field.name[1:]
|
||||||
|
|
||||||
|
assert group_name != field.name
|
||||||
|
|
||||||
|
raw_schema = raw_field.schema
|
||||||
|
group_name = self.gen_struct(raw_schema, type_name=group_name).name
|
||||||
|
field_py_code = f"{field.name}: {group_name}"
|
||||||
|
self.scope.add_line(field_py_code)
|
||||||
|
contructor_kwargs.append(field_py_code)
|
||||||
|
definition_has_body = True
|
||||||
|
init_choices.append((field.name, group_name))
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AssertionError(f"{schema.node.displayName}: {field.name}: " f"{field.which()}")
|
||||||
|
|
||||||
|
if not registered_type.scope.is_root:
|
||||||
|
scoped_name = f"{registered_type.scope}.{type_name}"
|
||||||
|
|
||||||
|
else:
|
||||||
|
scoped_name = type_name
|
||||||
|
|
||||||
|
self.scope.add_line("@staticmethod")
|
||||||
|
self.scope.add_line(f"def from_bytes(data: bytes) -> {scoped_name}: ...")
|
||||||
|
self.scope.add_line("def to_bytes(self) -> bytes: ...")
|
||||||
|
definition_has_body = True
|
||||||
|
|
||||||
|
if schema.node.struct.discriminantCount:
|
||||||
|
literals = ", ".join(
|
||||||
|
f'Literal["{field.name}"]' for field in schema.node.struct.fields if field.discriminantValue != 65535
|
||||||
|
)
|
||||||
|
self._add_typing_import("Literal")
|
||||||
|
self._add_typing_import("Union")
|
||||||
|
self.scope.add_line(f"def which(self) -> Union[{literals}]: ...")
|
||||||
|
definition_has_body = True
|
||||||
|
|
||||||
|
if contructor_kwargs:
|
||||||
|
kwargs = ", ".join(f"{kwarg} = ..." for kwarg in contructor_kwargs)
|
||||||
|
self.scope.add_line(f"def __init__(self, *, {kwargs}) -> None: ...")
|
||||||
|
definition_has_body = True
|
||||||
|
|
||||||
|
if len(init_choices) > 1:
|
||||||
|
self._add_typing_import("Literal")
|
||||||
|
self._add_typing_import("overload")
|
||||||
|
|
||||||
|
for field_name, field_type in init_choices:
|
||||||
|
|
||||||
|
self.scope.add_line("@overload")
|
||||||
|
self.scope.add_line(f'def init(self, name: Literal["{field_name}"])' f" -> {field_type}: ...")
|
||||||
|
|
||||||
|
elif len(init_choices) == 1:
|
||||||
|
self._add_typing_import("Literal")
|
||||||
|
field_name, field_type = init_choices[0]
|
||||||
|
self.scope.add_line(f'def init(self, name: Literal["{field_name}"])' f" -> {field_type}: ...")
|
||||||
|
|
||||||
|
if not definition_has_body:
|
||||||
|
self.scope.add_line("pass")
|
||||||
|
|
||||||
|
self.return_from_scope()
|
||||||
|
|
||||||
|
return registered_type
|
||||||
|
|
||||||
|
def generate_nested(self, schema: Any) -> None:
|
||||||
|
"""Generate the type for a nested schema.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any): The schema to generate types for.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
AssertionError: If the schema belongs to an unknown type.
|
||||||
|
"""
|
||||||
|
if schema.node.id in self.type_map:
|
||||||
|
return # already generated type hints for this type
|
||||||
|
|
||||||
|
node_type = schema.node.which()
|
||||||
|
|
||||||
|
if node_type == "const":
|
||||||
|
self.gen_const(schema)
|
||||||
|
|
||||||
|
elif node_type == "struct":
|
||||||
|
self.gen_struct(schema)
|
||||||
|
|
||||||
|
elif node_type == "enum":
|
||||||
|
self.gen_enum(schema)
|
||||||
|
|
||||||
|
elif node_type == "interface":
|
||||||
|
logger.warning("Skipping interface: not implemented")
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AssertionError(node_type)
|
||||||
|
|
||||||
|
def generate_recursive(self):
|
||||||
|
"""Generate types for all nested nodes, recursively."""
|
||||||
|
for node in self._module.schema.node.nestedNodes:
|
||||||
|
self.generate_nested(self._module.schema.get_nested(node.name))
|
||||||
|
|
||||||
|
def register_import(self, schema) -> CapnpType | None:
|
||||||
|
"""Determine, whether a schema is imported from the base module.
|
||||||
|
|
||||||
|
If so, the type definition that the schema contains, is added to the type registry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema (Any): The schema to check.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Type | None: The type of the import, if the schema is imported,
|
||||||
|
or None if the schema defines the base module itself.
|
||||||
|
"""
|
||||||
|
module_name, definition_name = schema.node.displayName.split(":")
|
||||||
|
|
||||||
|
if module_name == self.base_module_name:
|
||||||
|
# This is the base module, not an import.
|
||||||
|
return None
|
||||||
|
|
||||||
|
common_path: str
|
||||||
|
matching_path: pathlib.Path | None = None
|
||||||
|
|
||||||
|
# Find the path of the parent module, from which this schema is imported.
|
||||||
|
for path, module in self._module_registry.values():
|
||||||
|
for node in module.schema.node.nestedNodes:
|
||||||
|
if node.id == schema.node.id:
|
||||||
|
matching_path = pathlib.Path(path)
|
||||||
|
break
|
||||||
|
|
||||||
|
# Since this is an import, there must be a parent module.
|
||||||
|
assert matching_path is not None
|
||||||
|
|
||||||
|
# Find the relative path to go from the parent module, to this imported module.
|
||||||
|
common_path = os.path.commonpath([self._module_path, matching_path])
|
||||||
|
|
||||||
|
relative_module_path = self._module_path.relative_to(common_path)
|
||||||
|
relative_import_path = matching_path.relative_to(common_path)
|
||||||
|
|
||||||
|
# Shape the relative path to a relative Python import statement.
|
||||||
|
python_import_path = "." * len(relative_module_path.parents) + replace_capnp_suffix(
|
||||||
|
".".join(relative_import_path.parts)
|
||||||
|
)
|
||||||
|
self._add_import(f"from {python_import_path} import {definition_name}")
|
||||||
|
|
||||||
|
return self.register_type(schema.node.id, schema, name=definition_name, scope=self.scope.root)
|
||||||
|
|
||||||
|
def register_type_var(self, name: str) -> str:
|
||||||
|
"""Find and register the full name of a type variable, which includes its scopes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): The type name to register.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The full name in the format scope0_scope1_..._scopeN_name, including the type name to register.
|
||||||
|
"""
|
||||||
|
full_name: str = self.scope.trace_as_str("_") + f"_{name}"
|
||||||
|
|
||||||
|
self.type_vars.add(full_name)
|
||||||
|
return full_name
|
||||||
|
|
||||||
|
def register_type(self, type_id: int, schema: Any, name: str = "", scope: Scope | None = None) -> CapnpType:
|
||||||
|
"""Register a new type in the writer's registry of types.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
type_id (int): The identification number of the type.
|
||||||
|
schema (Any): The schema that defines the type.
|
||||||
|
name (str, optional): An name to specify, if overriding the type name. Defaults to "".
|
||||||
|
scope (Scope | None, optional): The scope in which the type is defined. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Type: The registered type.
|
||||||
|
"""
|
||||||
|
if not name:
|
||||||
|
name = self.get_display_name(schema)
|
||||||
|
|
||||||
|
if scope is None:
|
||||||
|
scope = self.scope.parent
|
||||||
|
|
||||||
|
if scope is None:
|
||||||
|
raise ValueError(f"No valid scope was found for registering the type '{name}'.")
|
||||||
|
|
||||||
|
self.type_map[type_id] = retval = CapnpType(schema=schema, name=name, scope=scope)
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def is_type_id_known(self, type_id: int) -> bool:
|
||||||
|
"""Check, whether a type ID was previously registered.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
type_id (int): The type ID to check.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True, if the type ID is known, False otherwise.
|
||||||
|
"""
|
||||||
|
return type_id in self.type_map
|
||||||
|
|
||||||
|
def get_type_by_id(self, type_id: int) -> CapnpType:
|
||||||
|
"""Look up a type in the type registry, by means of its ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
type_id (int): The identification number of the type.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: If the type ID was not found in the registry.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Type: The type, if it exists.
|
||||||
|
"""
|
||||||
|
if self.is_type_id_known(type_id):
|
||||||
|
return self.type_map[type_id]
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise KeyError(f"The type ID '{type_id} was not found in the type registry.'")
|
||||||
|
|
||||||
|
def new_scope(self, name: str, node: Any, scope_heading: str) -> None:
|
||||||
|
"""Creates a new scope below the scope of the provided node.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): The name of the new scope.
|
||||||
|
node (Any): The node whose scope is the parent scope of the new scope.
|
||||||
|
scope_heading (str): The line of code that starts this new scope.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
parent_scope = self.scopes_by_id[node.scopeId]
|
||||||
|
|
||||||
|
except KeyError as e:
|
||||||
|
raise NoParentError(f"The scope with name '{name}' has no parent.") from e
|
||||||
|
|
||||||
|
# Add the heading of the scope to the parent scope.
|
||||||
|
parent_scope.add_line(scope_heading)
|
||||||
|
|
||||||
|
# Then, make a new scope that is one indent level deeper.
|
||||||
|
child_scope = Scope(name=name, id=node.id, parent=parent_scope, return_scope=self.scope)
|
||||||
|
|
||||||
|
self.scope = child_scope
|
||||||
|
self.scopes_by_id[node.id] = child_scope
|
||||||
|
|
||||||
|
def return_from_scope(self):
|
||||||
|
"""Return from the current scope."""
|
||||||
|
# Cannot return from the root scope, as it is the highest of all scopes.
|
||||||
|
assert not self.scope.is_root
|
||||||
|
|
||||||
|
scope = self.scope
|
||||||
|
scope.parent.lines += scope.lines
|
||||||
|
self.scope = scope.return_scope
|
||||||
|
|
||||||
|
def get_type_name(self, type_reader: capnp._DynamicStructReader) -> str:
|
||||||
|
"""Extract the type name from a type reader.
|
||||||
|
|
||||||
|
The output type name is prepended by the scope name, if there is a parent scope.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
type_reader (capnp._DynamicStructReader): The type reader to get the type name from.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The extracted type name.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return CAPNP_TYPE_TO_PYTHON[type_reader.which()]
|
||||||
|
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
type_reader_type = type_reader.which()
|
||||||
|
|
||||||
|
if type_reader_type == "struct":
|
||||||
|
element_type = self.get_type_by_id(type_reader.struct.typeId)
|
||||||
|
type_name = element_type.name
|
||||||
|
generic_params = []
|
||||||
|
|
||||||
|
for brand_scope in type_reader.struct.brand.scopes:
|
||||||
|
brand_scope_type = brand_scope.which()
|
||||||
|
|
||||||
|
if brand_scope_type == "inherit":
|
||||||
|
parent_scope = self.get_type_by_id(brand_scope.scopeId)
|
||||||
|
generic_params.extend(parent_scope.generic_params)
|
||||||
|
|
||||||
|
elif brand_scope_type == "bind":
|
||||||
|
for bind in brand_scope.bind:
|
||||||
|
generic_params.append(self.get_type_name(bind.type))
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise TypeError(f"Unknown brand scope '{brand_scope_type}'.")
|
||||||
|
|
||||||
|
if generic_params:
|
||||||
|
type_name += f"[{', '.join(generic_params)}]"
|
||||||
|
|
||||||
|
elif type_reader_type == "enum":
|
||||||
|
element_type = self.get_type_by_id(type_reader.enum.typeId)
|
||||||
|
type_name = element_type.name
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise TypeError(f"Unknown type reader type '{type_reader_type}'.")
|
||||||
|
|
||||||
|
if not element_type.scope.is_root:
|
||||||
|
return f"{element_type.scope}.{type_name}"
|
||||||
|
|
||||||
|
else:
|
||||||
|
return type_name
|
||||||
|
|
||||||
|
def dumps_pyi(self) -> str:
|
||||||
|
"""Generates string output for the *.pyi stub file that provides type hinting.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The output string.
|
||||||
|
"""
|
||||||
|
assert self.scope.is_root
|
||||||
|
|
||||||
|
out = []
|
||||||
|
out.append(self.docstring)
|
||||||
|
out.extend(self.imports)
|
||||||
|
out.append("")
|
||||||
|
|
||||||
|
if self.type_vars:
|
||||||
|
for name in sorted(self.type_vars):
|
||||||
|
out.append(f'{name} = TypeVar("{name}")')
|
||||||
|
out.append("")
|
||||||
|
|
||||||
|
out.extend(self.scope.lines)
|
||||||
|
return "\n".join(out)
|
||||||
|
|
||||||
|
def dumps_py(self) -> str:
|
||||||
|
"""Generates string output for the *.py stub file that handles the import of capnproto schemas.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The output string.
|
||||||
|
"""
|
||||||
|
assert self.scope.is_root
|
||||||
|
|
||||||
|
out = []
|
||||||
|
out.append(self.docstring)
|
||||||
|
out.append("import os")
|
||||||
|
out.append("import capnp")
|
||||||
|
out.append("capnp.remove_import_hook()")
|
||||||
|
out.append("here = os.path.dirname(os.path.abspath(__file__))")
|
||||||
|
|
||||||
|
out.append(f'module_file = os.path.abspath(os.path.join(here, "{self.base_module_name}"))')
|
||||||
|
|
||||||
|
for scope in self.scopes_by_id.values():
|
||||||
|
if scope.parent is not None and scope.parent.is_root:
|
||||||
|
out.append(f"{scope.name} = capnp.load(module_file).{scope.name}")
|
||||||
|
|
||||||
|
return "\n".join(out)
|
@ -0,0 +1,330 @@
|
|||||||
|
[[package]]
|
||||||
|
name = "atomicwrites"
|
||||||
|
version = "1.4.0"
|
||||||
|
description = "Atomic file writes."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "attrs"
|
||||||
|
version = "21.4.0"
|
||||||
|
description = "Classes Without Boilerplate"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
|
||||||
|
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
|
||||||
|
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
|
||||||
|
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "black"
|
||||||
|
version = "22.6.0"
|
||||||
|
description = "The uncompromising code formatter."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6.2"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
click = ">=8.0.0"
|
||||||
|
mypy-extensions = ">=0.4.3"
|
||||||
|
pathspec = ">=0.9.0"
|
||||||
|
platformdirs = ">=2"
|
||||||
|
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
|
||||||
|
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
colorama = ["colorama (>=0.4.3)"]
|
||||||
|
d = ["aiohttp (>=3.7.4)"]
|
||||||
|
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||||
|
uvloop = ["uvloop (>=0.15.2)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click"
|
||||||
|
version = "8.1.3"
|
||||||
|
description = "Composable command line interface toolkit"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorama"
|
||||||
|
version = "0.4.5"
|
||||||
|
description = "Cross-platform colored terminal text."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "isort"
|
||||||
|
version = "5.10.1"
|
||||||
|
description = "A Python utility / library to sort Python imports."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6.1,<4.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
|
||||||
|
requirements_deprecated_finder = ["pipreqs", "pip-api"]
|
||||||
|
colors = ["colorama (>=0.4.3,<0.5.0)"]
|
||||||
|
plugins = ["setuptools"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "more-itertools"
|
||||||
|
version = "8.13.0"
|
||||||
|
description = "More routines for operating on iterables, beyond itertools"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mypy-extensions"
|
||||||
|
version = "0.4.3"
|
||||||
|
description = "Experimental type system extensions for programs checked with the mypy typechecker."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "21.3"
|
||||||
|
description = "Core utilities for Python packages"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pathspec"
|
||||||
|
version = "0.9.0"
|
||||||
|
description = "Utility library for gitignore style pattern matching of file paths."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "platformdirs"
|
||||||
|
version = "2.5.2"
|
||||||
|
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
|
||||||
|
test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pluggy"
|
||||||
|
version = "0.13.1"
|
||||||
|
description = "plugin and hook calling mechanisms for python"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["pre-commit", "tox"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "py"
|
||||||
|
version = "1.11.0"
|
||||||
|
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pycapnp"
|
||||||
|
version = "1.1.1"
|
||||||
|
description = "A cython wrapping of the C++ Cap'n Proto library"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pyparsing"
|
||||||
|
version = "3.0.9"
|
||||||
|
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6.8"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
diagrams = ["railroad-diagrams", "jinja2"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest"
|
||||||
|
version = "5.4.3"
|
||||||
|
description = "pytest: simple powerful testing with Python"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
|
||||||
|
attrs = ">=17.4.0"
|
||||||
|
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||||
|
more-itertools = ">=4.0.0"
|
||||||
|
packaging = "*"
|
||||||
|
pluggy = ">=0.12,<1.0"
|
||||||
|
py = ">=1.5.0"
|
||||||
|
wcwidth = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
checkqa-mypy = ["mypy (==v0.761)"]
|
||||||
|
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tomli"
|
||||||
|
version = "2.0.1"
|
||||||
|
description = "A lil' TOML parser"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-extensions"
|
||||||
|
version = "4.2.0"
|
||||||
|
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wcwidth"
|
||||||
|
version = "0.2.5"
|
||||||
|
description = "Measures the displayed width of unicode strings in a terminal"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
|
[metadata]
|
||||||
|
lock-version = "1.1"
|
||||||
|
python-versions = "^3.8"
|
||||||
|
content-hash = "cb72b821776054307543e305bf211ea56ad6e97d83b9b443ec5087ddef10fa98"
|
||||||
|
|
||||||
|
[metadata.files]
|
||||||
|
atomicwrites = [
|
||||||
|
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
|
||||||
|
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
|
||||||
|
]
|
||||||
|
attrs = [
|
||||||
|
{file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
|
||||||
|
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
|
||||||
|
]
|
||||||
|
black = [
|
||||||
|
{file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"},
|
||||||
|
{file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"},
|
||||||
|
{file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"},
|
||||||
|
{file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"},
|
||||||
|
{file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"},
|
||||||
|
{file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"},
|
||||||
|
{file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"},
|
||||||
|
{file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"},
|
||||||
|
{file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"},
|
||||||
|
{file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"},
|
||||||
|
{file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"},
|
||||||
|
{file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"},
|
||||||
|
{file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"},
|
||||||
|
{file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"},
|
||||||
|
{file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"},
|
||||||
|
{file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"},
|
||||||
|
{file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"},
|
||||||
|
{file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"},
|
||||||
|
{file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"},
|
||||||
|
{file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"},
|
||||||
|
{file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"},
|
||||||
|
{file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"},
|
||||||
|
{file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"},
|
||||||
|
]
|
||||||
|
click = [
|
||||||
|
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
|
||||||
|
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
|
||||||
|
]
|
||||||
|
colorama = [
|
||||||
|
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
|
||||||
|
{file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
|
||||||
|
]
|
||||||
|
isort = [
|
||||||
|
{file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
|
||||||
|
{file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
|
||||||
|
]
|
||||||
|
more-itertools = [
|
||||||
|
{file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"},
|
||||||
|
{file = "more_itertools-8.13.0-py3-none-any.whl", hash = "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb"},
|
||||||
|
]
|
||||||
|
mypy-extensions = [
|
||||||
|
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
||||||
|
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
||||||
|
]
|
||||||
|
packaging = [
|
||||||
|
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
||||||
|
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
||||||
|
]
|
||||||
|
pathspec = [
|
||||||
|
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
|
||||||
|
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
|
||||||
|
]
|
||||||
|
platformdirs = [
|
||||||
|
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
|
||||||
|
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
|
||||||
|
]
|
||||||
|
pluggy = [
|
||||||
|
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
|
||||||
|
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
||||||
|
]
|
||||||
|
py = [
|
||||||
|
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
||||||
|
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||||
|
]
|
||||||
|
pycapnp = [
|
||||||
|
{file = "pycapnp-1.1.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:48169ebf71b032c83348320160abef3992a9b17e7a588b372bb256426cad7564"},
|
||||||
|
{file = "pycapnp-1.1.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1138be597238ca1a5837fd9c81fe10743c8826750e6d2b112d43d3c551bc474f"},
|
||||||
|
{file = "pycapnp-1.1.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:313dbdf28d0f93e22df5db1ba1f0b2bf568e66f55487794ab431175fa2235d57"},
|
||||||
|
{file = "pycapnp-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0627a8f981c7ad509197e4f8b4be613cc36f1c287e3f8917b77c04c50ac8b31a"},
|
||||||
|
{file = "pycapnp-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:97bbd1106fa8eb46a0a6acb1ab81c8dff2ab67b0e1cb679465f368784a85d894"},
|
||||||
|
{file = "pycapnp-1.1.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e4808d524f252d8883db0f0a72f00512bdc137159272ae9436a222b7f3266ef0"},
|
||||||
|
{file = "pycapnp-1.1.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:99903494567d1bec4e3eed45f8ddb67c46552ea10dc148ab12f489b02a605c2a"},
|
||||||
|
{file = "pycapnp-1.1.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2dc3f390f4fd46ad84d1dee6f135770fa538b4964cd2d5430b96806958e86f15"},
|
||||||
|
{file = "pycapnp-1.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:295f9c364d662a603f3454dc9a8f294a55cf7cf0f1d527032e4d933fcacb0574"},
|
||||||
|
{file = "pycapnp-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f5d97c4e0ab7c84308153ee23200d75b9253b2e8617b7b9f838a1c114d448ce3"},
|
||||||
|
{file = "pycapnp-1.1.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:97257ab9e33c223e2c04541c5066c09f91575c5ba7205af94c518461f7222e1a"},
|
||||||
|
{file = "pycapnp-1.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a5ce83e1c1e492a58031f55a86f21402e067a12b7ffeb4747b3503eff87fb16e"},
|
||||||
|
{file = "pycapnp-1.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4d824455d6e624ced4af5eaea37d88b731de078bf250f3158728ed4f8435e1bb"},
|
||||||
|
{file = "pycapnp-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afbd0705d22d3179d8c3ae0bc37c4fe1c9e084f5266a1930fba6ee690cfe6703"},
|
||||||
|
{file = "pycapnp-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:d4664c5b982a89ffc936e55963c333b68919d6a008f96f6989fff17c379a047c"},
|
||||||
|
{file = "pycapnp-1.1.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:72d5259060f54698ca2cd1c72f57ec301a7505191c14efa3a58b536935afc011"},
|
||||||
|
{file = "pycapnp-1.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:78b5b23c796ec956e438f04b787e31b4b059ce65ec225f4476ba7ff662d9aea8"},
|
||||||
|
{file = "pycapnp-1.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0bac4b4af672229dcf40aa16108f6363e4dfc38ef424954a19be797eccd6da25"},
|
||||||
|
{file = "pycapnp-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33165fa2bafe1916b182b123e1edb1d5288a2f4872e070bf3cfe8965c87add6d"},
|
||||||
|
{file = "pycapnp-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:5652f9730f7fa695df20b7f5613a31d27bbb7827392a93b8ef32ab60504ef007"},
|
||||||
|
{file = "pycapnp-1.1.1.tar.gz", hash = "sha256:45e77810624b9d2b37cbdc4d1854ff9984b3dea20d1b3f7dd4a65403263a5aea"},
|
||||||
|
]
|
||||||
|
pyparsing = [
|
||||||
|
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
|
||||||
|
{file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
|
||||||
|
]
|
||||||
|
pytest = [
|
||||||
|
{file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
|
||||||
|
{file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
|
||||||
|
]
|
||||||
|
tomli = [
|
||||||
|
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
|
||||||
|
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
||||||
|
]
|
||||||
|
typing-extensions = [
|
||||||
|
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
|
||||||
|
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
|
||||||
|
]
|
||||||
|
wcwidth = [
|
||||||
|
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
||||||
|
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
||||||
|
]
|
@ -0,0 +1,21 @@
|
|||||||
|
[tool.poetry]
|
||||||
|
name = "capnp-stub-generator"
|
||||||
|
version = "1.0.0"
|
||||||
|
description = ""
|
||||||
|
authors = ["Adrian Figueroa <adrian.figueroa@metirionic.com>"]
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.8"
|
||||||
|
black = "^22.6.0"
|
||||||
|
isort = "^5.10.1"
|
||||||
|
pycapnp = "^1.1.1"
|
||||||
|
|
||||||
|
[tool.poetry.dev-dependencies]
|
||||||
|
pytest = "^5.2"
|
||||||
|
|
||||||
|
[tool.poetry.scripts]
|
||||||
|
capnp-stub-generator = "capnp_stub_generator.cli:main"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
Loading…
Reference in New Issue