1.7.33 sync

This commit is contained in:
Ayush Saini 2024-03-10 15:37:50 +05:30
parent 0aad008699
commit ea0d105e12
23 changed files with 405 additions and 228 deletions

View file

@ -288,14 +288,12 @@ class DirectoryScan:
) -> None: ) -> None:
"""Scan provided path and add module entries to provided list.""" """Scan provided path and add module entries to provided list."""
try: try:
# Special case: let's save some time and skip the whole 'babase'
# package since we know it doesn't contain any meta tags.
fullpath = Path(path, subpath) fullpath = Path(path, subpath)
# Note: skipping hidden dirs (starting with '.').
entries = [ entries = [
(path, Path(subpath, name)) (path, Path(subpath, name))
for name in os.listdir(fullpath) for name in os.listdir(fullpath)
# Actually scratch that for now; trying to avoid special cases. if not name.startswith('.')
# if name != 'babase'
] ]
except PermissionError: except PermissionError:
# Expected sometimes. # Expected sometimes.

View file

@ -75,9 +75,9 @@ class ResponseData:
delay_seconds: Annotated[float, IOAttrs('d', store_default=False)] = 0.0 delay_seconds: Annotated[float, IOAttrs('d', store_default=False)] = 0.0
login: Annotated[str | None, IOAttrs('l', store_default=False)] = None login: Annotated[str | None, IOAttrs('l', store_default=False)] = None
logout: Annotated[bool, IOAttrs('lo', store_default=False)] = False logout: Annotated[bool, IOAttrs('lo', store_default=False)] = False
dir_manifest: Annotated[ dir_manifest: Annotated[str | None, IOAttrs('man', store_default=False)] = (
str | None, IOAttrs('man', store_default=False) None
] = None )
uploads: Annotated[ uploads: Annotated[
tuple[list[str], str, dict] | None, IOAttrs('u', store_default=False) tuple[list[str], str, dict] | None, IOAttrs('u', store_default=False)
] = None ] = None
@ -97,9 +97,9 @@ class ResponseData:
input_prompt: Annotated[ input_prompt: Annotated[
tuple[str, bool] | None, IOAttrs('inp', store_default=False) tuple[str, bool] | None, IOAttrs('inp', store_default=False)
] = None ] = None
end_message: Annotated[ end_message: Annotated[str | None, IOAttrs('em', store_default=False)] = (
str | None, IOAttrs('em', store_default=False) None
] = None )
end_message_end: Annotated[str, IOAttrs('eme', store_default=False)] = '\n' end_message_end: Annotated[str, IOAttrs('eme', store_default=False)] = '\n'
end_command: Annotated[ end_command: Annotated[
tuple[str, dict] | None, IOAttrs('ec', store_default=False) tuple[str, dict] | None, IOAttrs('ec', store_default=False)

View file

@ -63,9 +63,9 @@ class PrivateHostingConfig:
randomize: bool = False randomize: bool = False
tutorial: bool = False tutorial: bool = False
custom_team_names: tuple[str, str] | None = None custom_team_names: tuple[str, str] | None = None
custom_team_colors: tuple[ custom_team_colors: (
tuple[float, float, float], tuple[float, float, float] tuple[tuple[float, float, float], tuple[float, float, float]] | None
] | None = None ) = None
playlist: list[dict[str, Any]] | None = None playlist: list[dict[str, Any]] | None = None
exit_minutes: float = 120.0 exit_minutes: float = 120.0
exit_minutes_unclean: float = 180.0 exit_minutes_unclean: float = 180.0

View file

@ -134,9 +134,9 @@ class ServerConfig:
team_names: tuple[str, str] | None = None team_names: tuple[str, str] | None = None
# Team colors (teams mode only). # Team colors (teams mode only).
team_colors: tuple[ team_colors: (
tuple[float, float, float], tuple[float, float, float] tuple[tuple[float, float, float], tuple[float, float, float]] | None
] | None = None ) = None
# Whether to enable the queue where players can line up before entering # Whether to enable the queue where players can line up before entering
# your server. Disabling this can be used as a workaround to deal with # your server. Disabling this can be used as a workaround to deal with

View file

@ -18,10 +18,10 @@ if TYPE_CHECKING:
@ioprepped @ioprepped
@dataclass @dataclass
class DirectoryManifestFile: class DirectoryManifestFile:
"""Describes metadata and hashes for a file in a manifest.""" """Describes a file in a manifest."""
filehash: Annotated[str, IOAttrs('h')] hash_sha256: Annotated[str, IOAttrs('h')]
filesize: Annotated[int, IOAttrs('s')] size: Annotated[int, IOAttrs('s')]
@ioprepped @ioprepped
@ -67,7 +67,7 @@ class DirectoryManifest:
return ( return (
filepath, filepath,
DirectoryManifestFile( DirectoryManifestFile(
filehash=sha.hexdigest(), filesize=filesize hash_sha256=sha.hexdigest(), size=filesize
), ),
) )

View file

@ -52,7 +52,7 @@ if TYPE_CHECKING:
# Build number and version of the ballistica binary we expect to be # Build number and version of the ballistica binary we expect to be
# using. # using.
TARGET_BALLISTICA_BUILD = 21762 TARGET_BALLISTICA_BUILD = 21766
TARGET_BALLISTICA_VERSION = '1.7.33' TARGET_BALLISTICA_VERSION = '1.7.33'
@ -287,9 +287,9 @@ def _setup_certs(contains_python_dist: bool) -> None:
import certifi import certifi
# Let both OpenSSL and requests (if present) know to use this. # Let both OpenSSL and requests (if present) know to use this.
os.environ['SSL_CERT_FILE'] = os.environ[ os.environ['SSL_CERT_FILE'] = os.environ['REQUESTS_CA_BUNDLE'] = (
'REQUESTS_CA_BUNDLE' certifi.where()
] = certifi.where() )
def _setup_paths( def _setup_paths(

View file

@ -188,7 +188,7 @@ class MainMenuActivity(bs.Activity[bs.Player, bs.Team]):
'scale': 1, 'scale': 1,
'vr_depth': -60, 'vr_depth': -60,
'position': pos, 'position': pos,
'text': bs.Lstr(resource='testBuildText'), 'text': 'BCS Build',
}, },
) )
) )

View file

@ -60,6 +60,16 @@ def party_icon_activate(origin: Sequence[float]) -> None:
logging.warning('party_icon_activate: no classic.') logging.warning('party_icon_activate: no classic.')
def on_button_press_x() ->None:
import ui_hooks
ui_hooks.on_button_xy_press("X")
def on_button_press_y() ->None:
import ui_hooks
ui_hooks.on_button_xy_press("X")
def quit_window(quit_type: babase.QuitType) -> None: def quit_window(quit_type: babase.QuitType) -> None:
from babase import app from babase import app

View file

@ -83,57 +83,46 @@ if TYPE_CHECKING:
class _CallNoArgs(Generic[OutT]): class _CallNoArgs(Generic[OutT]):
"""Single argument variant of call wrapper.""" """Single argument variant of call wrapper."""
def __init__(self, _call: Callable[[], OutT]): def __init__(self, _call: Callable[[], OutT]): ...
...
def __call__(self) -> OutT: def __call__(self) -> OutT: ...
...
class _Call1Arg(Generic[In1T, OutT]): class _Call1Arg(Generic[In1T, OutT]):
"""Single argument variant of call wrapper.""" """Single argument variant of call wrapper."""
def __init__(self, _call: Callable[[In1T], OutT]): def __init__(self, _call: Callable[[In1T], OutT]): ...
...
def __call__(self, _arg1: In1T) -> OutT: def __call__(self, _arg1: In1T) -> OutT: ...
...
class _Call2Args(Generic[In1T, In2T, OutT]): class _Call2Args(Generic[In1T, In2T, OutT]):
"""Two argument variant of call wrapper""" """Two argument variant of call wrapper"""
def __init__(self, _call: Callable[[In1T, In2T], OutT]): def __init__(self, _call: Callable[[In1T, In2T], OutT]): ...
...
def __call__(self, _arg1: In1T, _arg2: In2T) -> OutT: def __call__(self, _arg1: In1T, _arg2: In2T) -> OutT: ...
...
class _Call3Args(Generic[In1T, In2T, In3T, OutT]): class _Call3Args(Generic[In1T, In2T, In3T, OutT]):
"""Three argument variant of call wrapper""" """Three argument variant of call wrapper"""
def __init__(self, _call: Callable[[In1T, In2T, In3T], OutT]): def __init__(self, _call: Callable[[In1T, In2T, In3T], OutT]): ...
...
def __call__(self, _arg1: In1T, _arg2: In2T, _arg3: In3T) -> OutT: def __call__(self, _arg1: In1T, _arg2: In2T, _arg3: In3T) -> OutT: ...
...
class _Call4Args(Generic[In1T, In2T, In3T, In4T, OutT]): class _Call4Args(Generic[In1T, In2T, In3T, In4T, OutT]):
"""Four argument variant of call wrapper""" """Four argument variant of call wrapper"""
def __init__(self, _call: Callable[[In1T, In2T, In3T, In4T], OutT]): def __init__(self, _call: Callable[[In1T, In2T, In3T, In4T], OutT]): ...
...
def __call__( def __call__(
self, _arg1: In1T, _arg2: In2T, _arg3: In3T, _arg4: In4T self, _arg1: In1T, _arg2: In2T, _arg3: In3T, _arg4: In4T
) -> OutT: ) -> OutT: ...
...
class _Call5Args(Generic[In1T, In2T, In3T, In4T, In5T, OutT]): class _Call5Args(Generic[In1T, In2T, In3T, In4T, In5T, OutT]):
"""Five argument variant of call wrapper""" """Five argument variant of call wrapper"""
def __init__( def __init__(
self, _call: Callable[[In1T, In2T, In3T, In4T, In5T], OutT] self, _call: Callable[[In1T, In2T, In3T, In4T, In5T], OutT]
): ): ...
...
def __call__( def __call__(
self, self,
@ -142,16 +131,14 @@ if TYPE_CHECKING:
_arg3: In3T, _arg3: In3T,
_arg4: In4T, _arg4: In4T,
_arg5: In5T, _arg5: In5T,
) -> OutT: ) -> OutT: ...
...
class _Call6Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, OutT]): class _Call6Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, OutT]):
"""Six argument variant of call wrapper""" """Six argument variant of call wrapper"""
def __init__( def __init__(
self, _call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T], OutT] self, _call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T], OutT]
): ): ...
...
def __call__( def __call__(
self, self,
@ -161,8 +148,7 @@ if TYPE_CHECKING:
_arg4: In4T, _arg4: In4T,
_arg5: In5T, _arg5: In5T,
_arg6: In6T, _arg6: In6T,
) -> OutT: ) -> OutT: ...
...
class _Call7Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, In7T, OutT]): class _Call7Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, In7T, OutT]):
"""Seven argument variant of call wrapper""" """Seven argument variant of call wrapper"""
@ -170,8 +156,7 @@ if TYPE_CHECKING:
def __init__( def __init__(
self, self,
_call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T, In7T], OutT], _call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T, In7T], OutT],
): ): ...
...
def __call__( def __call__(
self, self,
@ -182,50 +167,43 @@ if TYPE_CHECKING:
_arg5: In5T, _arg5: In5T,
_arg6: In6T, _arg6: In6T,
_arg7: In7T, _arg7: In7T,
) -> OutT: ) -> OutT: ...
...
# No arg call; no args bundled. # No arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call(call: Callable[[], OutT]) -> _CallNoArgs[OutT]: def Call(call: Callable[[], OutT]) -> _CallNoArgs[OutT]: ...
...
# 1 arg call; 1 arg bundled. # 1 arg call; 1 arg bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call(call: Callable[[In1T], OutT], arg1: In1T) -> _CallNoArgs[OutT]: def Call(call: Callable[[In1T], OutT], arg1: In1T) -> _CallNoArgs[OutT]: ...
...
# 1 arg call; no args bundled. # 1 arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call(call: Callable[[In1T], OutT]) -> _Call1Arg[In1T, OutT]: def Call(call: Callable[[In1T], OutT]) -> _Call1Arg[In1T, OutT]: ...
...
# 2 arg call; 2 args bundled. # 2 arg call; 2 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T], OutT], arg1: In1T, arg2: In2T call: Callable[[In1T, In2T], OutT], arg1: In1T, arg2: In2T
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 2 arg call; 1 arg bundled. # 2 arg call; 1 arg bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T], OutT], arg1: In1T call: Callable[[In1T, In2T], OutT], arg1: In1T
) -> _Call1Arg[In2T, OutT]: ) -> _Call1Arg[In2T, OutT]: ...
...
# 2 arg call; no args bundled. # 2 arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T], OutT] call: Callable[[In1T, In2T], OutT]
) -> _Call2Args[In1T, In2T, OutT]: ) -> _Call2Args[In1T, In2T, OutT]: ...
...
# 3 arg call; 3 args bundled. # 3 arg call; 3 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -235,32 +213,28 @@ if TYPE_CHECKING:
arg1: In1T, arg1: In1T,
arg2: In2T, arg2: In2T,
arg3: In3T, arg3: In3T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 3 arg call; 2 args bundled. # 3 arg call; 2 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T, arg2: In2T call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T, arg2: In2T
) -> _Call1Arg[In3T, OutT]: ) -> _Call1Arg[In3T, OutT]: ...
...
# 3 arg call; 1 arg bundled. # 3 arg call; 1 arg bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T
) -> _Call2Args[In2T, In3T, OutT]: ) -> _Call2Args[In2T, In3T, OutT]: ...
...
# 3 arg call; no args bundled. # 3 arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T, In3T], OutT] call: Callable[[In1T, In2T, In3T], OutT]
) -> _Call3Args[In1T, In2T, In3T, OutT]: ) -> _Call3Args[In1T, In2T, In3T, OutT]: ...
...
# 4 arg call; 4 args bundled. # 4 arg call; 4 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -271,8 +245,7 @@ if TYPE_CHECKING:
arg2: In2T, arg2: In2T,
arg3: In3T, arg3: In3T,
arg4: In4T, arg4: In4T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 4 arg call; 3 args bundled. # 4 arg call; 3 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -282,8 +255,7 @@ if TYPE_CHECKING:
arg1: In1T, arg1: In1T,
arg2: In2T, arg2: In2T,
arg3: In3T, arg3: In3T,
) -> _Call1Arg[In4T, OutT]: ) -> _Call1Arg[In4T, OutT]: ...
...
# 4 arg call; 2 args bundled. # 4 arg call; 2 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -292,8 +264,7 @@ if TYPE_CHECKING:
call: Callable[[In1T, In2T, In3T, In4T], OutT], call: Callable[[In1T, In2T, In3T, In4T], OutT],
arg1: In1T, arg1: In1T,
arg2: In2T, arg2: In2T,
) -> _Call2Args[In3T, In4T, OutT]: ) -> _Call2Args[In3T, In4T, OutT]: ...
...
# 4 arg call; 1 arg bundled. # 4 arg call; 1 arg bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -301,16 +272,14 @@ if TYPE_CHECKING:
def Call( def Call(
call: Callable[[In1T, In2T, In3T, In4T], OutT], call: Callable[[In1T, In2T, In3T, In4T], OutT],
arg1: In1T, arg1: In1T,
) -> _Call3Args[In2T, In3T, In4T, OutT]: ) -> _Call3Args[In2T, In3T, In4T, OutT]: ...
...
# 4 arg call; no args bundled. # 4 arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T, In3T, In4T], OutT], call: Callable[[In1T, In2T, In3T, In4T], OutT],
) -> _Call4Args[In1T, In2T, In3T, In4T, OutT]: ) -> _Call4Args[In1T, In2T, In3T, In4T, OutT]: ...
...
# 5 arg call; 5 args bundled. # 5 arg call; 5 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -322,8 +291,7 @@ if TYPE_CHECKING:
arg3: In3T, arg3: In3T,
arg4: In4T, arg4: In4T,
arg5: In5T, arg5: In5T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 6 arg call; 6 args bundled. # 6 arg call; 6 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -336,8 +304,7 @@ if TYPE_CHECKING:
arg4: In4T, arg4: In4T,
arg5: In5T, arg5: In5T,
arg6: In6T, arg6: In6T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 7 arg call; 7 args bundled. # 7 arg call; 7 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -351,12 +318,10 @@ if TYPE_CHECKING:
arg5: In5T, arg5: In5T,
arg6: In6T, arg6: In6T,
arg7: In7T, arg7: In7T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# noinspection PyPep8Naming # noinspection PyPep8Naming
def Call(*_args: Any, **_keywds: Any) -> Any: def Call(*_args: Any, **_keywds: Any) -> Any: ...
...
# (Type-safe Partial) # (Type-safe Partial)
# A convenient wrapper around functools.partial which adds type-safety # A convenient wrapper around functools.partial which adds type-safety

View file

@ -11,7 +11,13 @@ data formats in a nondestructive manner.
from __future__ import annotations from __future__ import annotations
from efro.util import set_canonical_module_names from efro.util import set_canonical_module_names
from efro.dataclassio._base import Codec, IOAttrs, IOExtendedData from efro.dataclassio._base import (
Codec,
IOAttrs,
IOExtendedData,
IOMultiType,
EXTRA_ATTRS_ATTR,
)
from efro.dataclassio._prep import ( from efro.dataclassio._prep import (
ioprep, ioprep,
ioprepped, ioprepped,
@ -29,20 +35,22 @@ from efro.dataclassio._api import (
) )
__all__ = [ __all__ = [
'JsonStyle',
'Codec', 'Codec',
'DataclassFieldLookup',
'EXTRA_ATTRS_ATTR',
'IOAttrs', 'IOAttrs',
'IOExtendedData', 'IOExtendedData',
'ioprep', 'IOMultiType',
'ioprepped', 'JsonStyle',
'will_ioprep',
'is_ioprepped_dataclass',
'DataclassFieldLookup',
'dataclass_to_dict',
'dataclass_to_json',
'dataclass_from_dict', 'dataclass_from_dict',
'dataclass_from_json', 'dataclass_from_json',
'dataclass_to_dict',
'dataclass_to_json',
'dataclass_validate', 'dataclass_validate',
'ioprep',
'ioprepped',
'is_ioprepped_dataclass',
'will_ioprep',
] ]
# Have these things present themselves cleanly as 'thismodule.SomeClass' # Have these things present themselves cleanly as 'thismodule.SomeClass'

View file

@ -27,7 +27,7 @@ class JsonStyle(Enum):
"""Different style types for json.""" """Different style types for json."""
# Single line, no spaces, no sorting. Not deterministic. # Single line, no spaces, no sorting. Not deterministic.
# Use this for most storage purposes. # Use this where speed is more important than determinism.
FAST = 'fast' FAST = 'fast'
# Single line, no spaces, sorted keys. Deterministic. # Single line, no spaces, sorted keys. Deterministic.
@ -40,7 +40,9 @@ class JsonStyle(Enum):
def dataclass_to_dict( def dataclass_to_dict(
obj: Any, codec: Codec = Codec.JSON, coerce_to_float: bool = True obj: Any,
codec: Codec = Codec.JSON,
coerce_to_float: bool = True,
) -> dict: ) -> dict:
"""Given a dataclass object, return a json-friendly dict. """Given a dataclass object, return a json-friendly dict.
@ -101,32 +103,36 @@ def dataclass_from_dict(
The dict must be formatted to match the specified codec (generally The dict must be formatted to match the specified codec (generally
json-friendly object types). This means that sequence values such as json-friendly object types). This means that sequence values such as
tuples or sets should be passed as lists, enums should be passed as their tuples or sets should be passed as lists, enums should be passed as
associated values, nested dataclasses should be passed as dicts, etc. their associated values, nested dataclasses should be passed as dicts,
etc.
All values are checked to ensure their types/values are valid. All values are checked to ensure their types/values are valid.
Data for attributes of type Any will be checked to ensure they match Data for attributes of type Any will be checked to ensure they match
types supported directly by json. This does not include types such types supported directly by json. This does not include types such
as tuples which are implicitly translated by Python's json module as tuples which are implicitly translated by Python's json module
(as this would break the ability to do a lossless round-trip with data). (as this would break the ability to do a lossless round-trip with
data).
If coerce_to_float is True, int values passed for float typed fields If coerce_to_float is True, int values passed for float typed fields
will be converted to float values. Otherwise, a TypeError is raised. will be converted to float values. Otherwise, a TypeError is raised.
If allow_unknown_attrs is False, AttributeErrors will be raised for If `allow_unknown_attrs` is False, AttributeErrors will be raised for
attributes present in the dict but not on the data class. Otherwise, they attributes present in the dict but not on the data class. Otherwise,
will be preserved as part of the instance and included if it is they will be preserved as part of the instance and included if it is
exported back to a dict, unless discard_unknown_attrs is True, in which exported back to a dict, unless `discard_unknown_attrs` is True, in
case they will simply be discarded. which case they will simply be discarded.
""" """
return _Inputter( val = _Inputter(
cls, cls,
codec=codec, codec=codec,
coerce_to_float=coerce_to_float, coerce_to_float=coerce_to_float,
allow_unknown_attrs=allow_unknown_attrs, allow_unknown_attrs=allow_unknown_attrs,
discard_unknown_attrs=discard_unknown_attrs, discard_unknown_attrs=discard_unknown_attrs,
).run(values) ).run(values)
assert isinstance(val, cls)
return val
def dataclass_from_json( def dataclass_from_json(

View file

@ -8,39 +8,23 @@ import dataclasses
import typing import typing
import datetime import datetime
from enum import Enum from enum import Enum
from typing import TYPE_CHECKING, get_args from typing import TYPE_CHECKING, get_args, TypeVar, Generic
# noinspection PyProtectedMember # noinspection PyProtectedMember
from typing import _AnnotatedAlias # type: ignore from typing import _AnnotatedAlias # type: ignore
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Any, Callable from typing import Any, Callable, Literal, ClassVar, Self
# Types which we can pass through as-is. # Types which we can pass through as-is.
SIMPLE_TYPES = {int, bool, str, float, type(None)} SIMPLE_TYPES = {int, bool, str, float, type(None)}
# Attr name for dict of extra attributes included on dataclass instances. # Attr name for dict of extra attributes included on dataclass
# Note that this is only added if extra attributes are present. # instances. Note that this is only added if extra attributes are
# present.
EXTRA_ATTRS_ATTR = '_DCIOEXATTRS' EXTRA_ATTRS_ATTR = '_DCIOEXATTRS'
def _raise_type_error(
fieldpath: str, valuetype: type, expected: tuple[type, ...]
) -> None:
"""Raise an error when a field value's type does not match expected."""
assert isinstance(expected, tuple)
assert all(isinstance(e, type) for e in expected)
if len(expected) == 1:
expected_str = expected[0].__name__
else:
expected_str = ' | '.join(t.__name__ for t in expected)
raise TypeError(
f'Invalid value type for "{fieldpath}";'
f' expected "{expected_str}", got'
f' "{valuetype.__name__}".'
)
class Codec(Enum): class Codec(Enum):
"""Specifies expected data format exported to or imported from.""" """Specifies expected data format exported to or imported from."""
@ -78,32 +62,46 @@ class IOExtendedData:
""" """
def _is_valid_for_codec(obj: Any, codec: Codec) -> bool: EnumT = TypeVar('EnumT', bound=Enum)
"""Return whether a value consists solely of json-supported types.
Note that this does not include things like tuples which are
implicitly translated to lists by python's json module. class IOMultiType(Generic[EnumT]):
"""A base class for types that can map to multiple dataclass types.
This enables usage of high level base classes (for example
a 'Message' type) in annotations, with dataclassio automatically
serializing & deserializing dataclass subclasses based on their
type ('MessagePing', 'MessageChat', etc.)
Standard usage involves creating a class which inherits from this
one which acts as a 'registry', and then creating dataclass classes
inheriting from that registry class. Dataclassio will then do the
right thing when that registry class is used in type annotations.
See tests/test_efro/test_dataclassio.py for examples.
""" """
if obj is None:
return True
objtype = type(obj) # Dataclasses inheriting from an IOMultiType will store a type-id
if objtype in (int, float, str, bool): # with this key in their serialized data. This value can be
return True # overridden in IOMultiType subclasses as desired.
if objtype is dict: ID_STORAGE_NAME = '_dciotype'
# JSON 'objects' supports only string dict keys, but all value types.
return all(
isinstance(k, str) and _is_valid_for_codec(v, codec)
for k, v in obj.items()
)
if objtype is list:
return all(_is_valid_for_codec(elem, codec) for elem in obj)
# A few things are valid in firestore but not json. @classmethod
if issubclass(objtype, datetime.datetime) or objtype is bytes: def get_type(cls, type_id: EnumT) -> type[Self]:
return codec is Codec.FIRESTORE """Return a specific subclass given a type-id."""
raise NotImplementedError()
return False @classmethod
def get_type_id(cls) -> EnumT:
"""Return the type-id for this subclass."""
raise NotImplementedError()
@classmethod
def get_type_id_type(cls) -> type[EnumT]:
"""Return the Enum type this class uses as its type-id."""
out: type[EnumT] = cls.__orig_bases__[0].__args__[0] # type: ignore
assert issubclass(out, Enum)
return out
class IOAttrs: class IOAttrs:
@ -192,7 +190,7 @@ class IOAttrs:
"""Ensure the IOAttrs instance is ok to use with the provided field.""" """Ensure the IOAttrs instance is ok to use with the provided field."""
# Turning off store_default requires the field to have either # Turning off store_default requires the field to have either
# a default or a a default_factory or for us to have soft equivalents. # a default or a default_factory or for us to have soft equivalents.
if not self.store_default: if not self.store_default:
field_default_factory: Any = field.default_factory field_default_factory: Any = field.default_factory
@ -241,6 +239,52 @@ class IOAttrs:
) )
def _raise_type_error(
fieldpath: str, valuetype: type, expected: tuple[type, ...]
) -> None:
"""Raise an error when a field value's type does not match expected."""
assert isinstance(expected, tuple)
assert all(isinstance(e, type) for e in expected)
if len(expected) == 1:
expected_str = expected[0].__name__
else:
expected_str = ' | '.join(t.__name__ for t in expected)
raise TypeError(
f'Invalid value type for "{fieldpath}";'
f' expected "{expected_str}", got'
f' "{valuetype.__name__}".'
)
def _is_valid_for_codec(obj: Any, codec: Codec) -> bool:
"""Return whether a value consists solely of json-supported types.
Note that this does not include things like tuples which are
implicitly translated to lists by python's json module.
"""
if obj is None:
return True
objtype = type(obj)
if objtype in (int, float, str, bool):
return True
if objtype is dict:
# JSON 'objects' supports only string dict keys, but all value
# types.
return all(
isinstance(k, str) and _is_valid_for_codec(v, codec)
for k, v in obj.items()
)
if objtype is list:
return all(_is_valid_for_codec(elem, codec) for elem in obj)
# A few things are valid in firestore but not json.
if issubclass(objtype, datetime.datetime) or objtype is bytes:
return codec is Codec.FIRESTORE
return False
def _get_origin(anntype: Any) -> Any: def _get_origin(anntype: Any) -> Any:
"""Given a type annotation, return its origin or itself if there is none. """Given a type annotation, return its origin or itself if there is none.
@ -255,9 +299,9 @@ def _get_origin(anntype: Any) -> Any:
def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]: def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
"""Parse Annotated() constructs, returning annotated type & IOAttrs.""" """Parse Annotated() constructs, returning annotated type & IOAttrs."""
# If we get an Annotated[foo, bar, eep] we take # If we get an Annotated[foo, bar, eep] we take foo as the actual
# foo as the actual type, and we look for IOAttrs instances in # type, and we look for IOAttrs instances in bar/eep to affect our
# bar/eep to affect our behavior. # behavior.
ioattrs: IOAttrs | None = None ioattrs: IOAttrs | None = None
if isinstance(anntype, _AnnotatedAlias): if isinstance(anntype, _AnnotatedAlias):
annargs = get_args(anntype) annargs = get_args(anntype)
@ -270,8 +314,8 @@ def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
) )
ioattrs = annarg ioattrs = annarg
# I occasionally just throw a 'x' down when I mean IOAttrs('x'); # I occasionally just throw a 'x' down when I mean
# catch these mistakes. # IOAttrs('x'); catch these mistakes.
elif isinstance(annarg, (str, int, float, bool)): elif isinstance(annarg, (str, int, float, bool)):
raise RuntimeError( raise RuntimeError(
f'Raw {type(annarg)} found in Annotated[] entry:' f'Raw {type(annarg)} found in Annotated[] entry:'
@ -279,3 +323,21 @@ def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
) )
anntype = annargs[0] anntype = annargs[0]
return anntype, ioattrs return anntype, ioattrs
def _get_multitype_type(
cls: type[IOMultiType], fieldpath: str, val: Any
) -> type[Any]:
if not isinstance(val, dict):
raise ValueError(
f"Found a {type(val)} at '{fieldpath}'; expected a dict."
)
storename = cls.ID_STORAGE_NAME
id_val = val.get(storename)
if id_val is None:
raise ValueError(
f"Expected a '{storename}'" f" value for object at '{fieldpath}'."
)
id_enum_type = cls.get_type_id_type()
id_enum = id_enum_type(id_val)
return cls.get_type(id_enum)

View file

@ -13,7 +13,7 @@ import dataclasses
import typing import typing
import types import types
import datetime import datetime
from typing import TYPE_CHECKING, Generic, TypeVar from typing import TYPE_CHECKING
from efro.util import enum_by_value, check_utc from efro.util import enum_by_value, check_utc
from efro.dataclassio._base import ( from efro.dataclassio._base import (
@ -25,6 +25,8 @@ from efro.dataclassio._base import (
SIMPLE_TYPES, SIMPLE_TYPES,
_raise_type_error, _raise_type_error,
IOExtendedData, IOExtendedData,
_get_multitype_type,
IOMultiType,
) )
from efro.dataclassio._prep import PrepSession from efro.dataclassio._prep import PrepSession
@ -34,13 +36,11 @@ if TYPE_CHECKING:
from efro.dataclassio._base import IOAttrs from efro.dataclassio._base import IOAttrs
from efro.dataclassio._outputter import _Outputter from efro.dataclassio._outputter import _Outputter
T = TypeVar('T')
class _Inputter:
class _Inputter(Generic[T]):
def __init__( def __init__(
self, self,
cls: type[T], cls: type[Any],
codec: Codec, codec: Codec,
coerce_to_float: bool, coerce_to_float: bool,
allow_unknown_attrs: bool = True, allow_unknown_attrs: bool = True,
@ -59,27 +59,45 @@ class _Inputter(Generic[T]):
' when allow_unknown_attrs is False.' ' when allow_unknown_attrs is False.'
) )
def run(self, values: dict) -> T: def run(self, values: dict) -> Any:
"""Do the thing.""" """Do the thing."""
# For special extended data types, call their 'will_output' callback. outcls: type[Any]
tcls = self._cls
if issubclass(tcls, IOExtendedData): # If we're dealing with a multi-type subclass which is NOT a
# dataclass, we must rely on its stored type to figure out
# what type of dataclass we're going to. If we are a dataclass
# then we already know what type we're going to so we can
# survive without this, which is often necessary when reading
# old data that doesn't have a type id attr yet.
if issubclass(self._cls, IOMultiType) and not dataclasses.is_dataclass(
self._cls
):
type_id_val = values.get(self._cls.ID_STORAGE_NAME)
if type_id_val is None:
raise ValueError(
f'No type id value present for multi-type object:'
f' {values}.'
)
type_id_enum = self._cls.get_type_id_type()
enum_val = type_id_enum(type_id_val)
outcls = self._cls.get_type(enum_val)
else:
outcls = self._cls
# FIXME - should probably move this into _dataclass_from_input
# so it can work on nested values.
if issubclass(outcls, IOExtendedData):
is_ext = True is_ext = True
tcls.will_input(values) outcls.will_input(values)
else: else:
is_ext = False is_ext = False
out = self._dataclass_from_input(self._cls, '', values) out = self._dataclass_from_input(outcls, '', values)
assert isinstance(out, self._cls) assert isinstance(out, outcls)
if is_ext: if is_ext:
# mypy complains that we're no longer returning a T out.did_input()
# if we operate on out directly.
out2 = out
assert isinstance(out2, IOExtendedData)
out2.did_input()
return out return out
@ -111,8 +129,8 @@ class _Inputter(Generic[T]):
# noinspection PyPep8 # noinspection PyPep8
if origin is typing.Union or origin is types.UnionType: if origin is typing.Union or origin is types.UnionType:
# Currently, the only unions we support are None/Value # Currently, the only unions we support are None/Value
# (translated from Optional), which we verified on prep. # (translated from Optional), which we verified on prep. So
# So let's treat this as a simple optional case. # let's treat this as a simple optional case.
if value is None: if value is None:
return None return None
childanntypes_l = [ childanntypes_l = [
@ -123,13 +141,15 @@ class _Inputter(Generic[T]):
cls, fieldpath, childanntypes_l[0], value, ioattrs cls, fieldpath, childanntypes_l[0], value, ioattrs
) )
# Everything below this point assumes the annotation type resolves # Everything below this point assumes the annotation type
# to a concrete type. (This should have been verified at prep time). # resolves to a concrete type. (This should have been verified
# at prep time).
assert isinstance(origin, type) assert isinstance(origin, type)
if origin in SIMPLE_TYPES: if origin in SIMPLE_TYPES:
if type(value) is not origin: if type(value) is not origin:
# Special case: if they want to coerce ints to floats, do so. # Special case: if they want to coerce ints to floats,
# do so.
if ( if (
self._coerce_to_float self._coerce_to_float
and origin is float and origin is float
@ -157,6 +177,16 @@ class _Inputter(Generic[T]):
if dataclasses.is_dataclass(origin): if dataclasses.is_dataclass(origin):
return self._dataclass_from_input(origin, fieldpath, value) return self._dataclass_from_input(origin, fieldpath, value)
# ONLY consider something as a multi-type when it's not a
# dataclass (all dataclasses inheriting from the multi-type
# should just be processed as dataclasses).
if issubclass(origin, IOMultiType):
return self._dataclass_from_input(
_get_multitype_type(anntype, fieldpath, value),
fieldpath,
value,
)
if issubclass(origin, Enum): if issubclass(origin, Enum):
return enum_by_value(origin, value) return enum_by_value(origin, value)
@ -228,10 +258,23 @@ class _Inputter(Generic[T]):
f.name: _parse_annotated(prep.annotations[f.name]) for f in fields f.name: _parse_annotated(prep.annotations[f.name]) for f in fields
} }
# Special case: if this is a multi-type class it probably has a
# type attr. Ignore that while parsing since we already have a
# definite type and it will just pollute extra-attrs otherwise.
if issubclass(cls, IOMultiType):
type_id_store_name = cls.ID_STORAGE_NAME
else:
type_id_store_name = None
# Go through all data in the input, converting it to either dataclass # Go through all data in the input, converting it to either dataclass
# args or extra data. # args or extra data.
args: dict[str, Any] = {} args: dict[str, Any] = {}
for rawkey, value in values.items(): for rawkey, value in values.items():
# Ignore _dciotype or whatnot.
if type_id_store_name is not None and rawkey == type_id_store_name:
continue
key = prep.storage_names_to_attr_names.get(rawkey, rawkey) key = prep.storage_names_to_attr_names.get(rawkey, rawkey)
field = fields_by_name.get(key) field = fields_by_name.get(key)
@ -473,6 +516,19 @@ class _Inputter(Generic[T]):
# We contain elements of some specified type. # We contain elements of some specified type.
assert len(childanntypes) == 1 assert len(childanntypes) == 1
childanntype = childanntypes[0] childanntype = childanntypes[0]
# If our annotation type inherits from IOMultiType, use type-id
# values to determine which type to load for each element.
if issubclass(childanntype, IOMultiType):
return seqtype(
self._dataclass_from_input(
_get_multitype_type(childanntype, fieldpath, i),
fieldpath,
i,
)
for i in value
)
return seqtype( return seqtype(
self._value_from_input(cls, fieldpath, childanntype, i, ioattrs) self._value_from_input(cls, fieldpath, childanntype, i, ioattrs)
for i in value for i in value

View file

@ -25,6 +25,7 @@ from efro.dataclassio._base import (
SIMPLE_TYPES, SIMPLE_TYPES,
_raise_type_error, _raise_type_error,
IOExtendedData, IOExtendedData,
IOMultiType,
) )
from efro.dataclassio._prep import PrepSession from efro.dataclassio._prep import PrepSession
@ -49,6 +50,8 @@ class _Outputter:
assert dataclasses.is_dataclass(self._obj) assert dataclasses.is_dataclass(self._obj)
# For special extended data types, call their 'will_output' callback. # For special extended data types, call their 'will_output' callback.
# FIXME - should probably move this into _process_dataclass so it
# can work on nested values.
if isinstance(self._obj, IOExtendedData): if isinstance(self._obj, IOExtendedData):
self._obj.will_output() self._obj.will_output()
@ -69,6 +72,7 @@ class _Outputter:
def _process_dataclass(self, cls: type, obj: Any, fieldpath: str) -> Any: def _process_dataclass(self, cls: type, obj: Any, fieldpath: str) -> Any:
# pylint: disable=too-many-locals # pylint: disable=too-many-locals
# pylint: disable=too-many-branches # pylint: disable=too-many-branches
# pylint: disable=too-many-statements
prep = PrepSession(explicit=False).prep_dataclass( prep = PrepSession(explicit=False).prep_dataclass(
type(obj), recursion_level=0 type(obj), recursion_level=0
) )
@ -139,6 +143,25 @@ class _Outputter:
if self._create: if self._create:
assert out is not None assert out is not None
out.update(extra_attrs) out.update(extra_attrs)
# If this obj inherits from multi-type, store its type id.
if isinstance(obj, IOMultiType):
type_id = obj.get_type_id()
# Sanity checks; make sure looking up this id gets us this
# type.
assert isinstance(type_id.value, str)
if obj.get_type(type_id) is not type(obj):
raise RuntimeError(
f'dataclassio: object of type {type(obj)}'
f' gives type-id {type_id} but that id gives type'
f' {obj.get_type(type_id)}. Something is out of sync.'
)
assert obj.get_type(type_id) is type(obj)
if self._create:
assert out is not None
out[obj.ID_STORAGE_NAME] = type_id.value
return out return out
def _process_value( def _process_value(
@ -231,6 +254,7 @@ class _Outputter:
f'Expected a list for {fieldpath};' f'Expected a list for {fieldpath};'
f' found a {type(value)}' f' found a {type(value)}'
) )
childanntypes = typing.get_args(anntype) childanntypes = typing.get_args(anntype)
# 'Any' type children; make sure they are valid values for # 'Any' type children; make sure they are valid values for
@ -246,8 +270,37 @@ class _Outputter:
# Hmm; should we do a copy here? # Hmm; should we do a copy here?
return value if self._create else None return value if self._create else None
# We contain elements of some specified type. # We contain elements of some single specified type.
assert len(childanntypes) == 1 assert len(childanntypes) == 1
childanntype = childanntypes[0]
# If that type is a multi-type, we determine our type per-object.
if issubclass(childanntype, IOMultiType):
# In the multi-type case, we use each object's own type
# to do its conversion, but lets at least make sure each
# of those types inherits from the annotated multi-type
# class.
for x in value:
if not isinstance(x, childanntype):
raise ValueError(
f"Found a {type(x)} value under '{fieldpath}'."
f' Everything must inherit from'
f' {childanntype}.'
)
if self._create:
out: list[Any] = []
for x in value:
# We know these are dataclasses so no need to do
# the generic _process_value.
out.append(self._process_dataclass(cls, x, fieldpath))
return out
for x in value:
# We know these are dataclasses so no need to do
# the generic _process_value.
self._process_dataclass(cls, x, fieldpath)
# Normal non-multitype case; everything's got the same type.
if self._create: if self._create:
return [ return [
self._process_value( self._process_value(
@ -307,6 +360,21 @@ class _Outputter:
) )
return self._process_dataclass(cls, value, fieldpath) return self._process_dataclass(cls, value, fieldpath)
# ONLY consider something as a multi-type when it's not a
# dataclass (all dataclasses inheriting from the multi-type should
# just be processed as dataclasses).
if issubclass(origin, IOMultiType):
# In the multi-type case, we use each object's own type to
# do its conversion, but lets at least make sure each of
# those types inherits from the annotated multi-type class.
if not isinstance(value, origin):
raise ValueError(
f"Found a {type(value)} value at '{fieldpath}'."
f' It is expected to inherit from {origin}.'
)
return self._process_dataclass(cls, value, fieldpath)
if issubclass(origin, Enum): if issubclass(origin, Enum):
if not isinstance(value, origin): if not isinstance(value, origin):
raise TypeError( raise TypeError(

View file

@ -17,7 +17,12 @@ import datetime
from typing import TYPE_CHECKING, TypeVar, get_type_hints from typing import TYPE_CHECKING, TypeVar, get_type_hints
# noinspection PyProtectedMember # noinspection PyProtectedMember
from efro.dataclassio._base import _parse_annotated, _get_origin, SIMPLE_TYPES from efro.dataclassio._base import (
_parse_annotated,
_get_origin,
SIMPLE_TYPES,
IOMultiType,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Any from typing import Any
@ -260,6 +265,13 @@ class PrepSession:
origin = _get_origin(anntype) origin = _get_origin(anntype)
# If we inherit from IOMultiType, we use its type map to
# determine which type we're going to instead of the annotation.
# And we can't really check those types because they are
# lazy-loaded. So I guess we're done here.
if issubclass(origin, IOMultiType):
return
# noinspection PyPep8 # noinspection PyPep8
if origin is typing.Union or origin is types.UnionType: if origin is typing.Union or origin is types.UnionType:
self.prep_union( self.prep_union(

View file

@ -278,9 +278,7 @@ def _desc(obj: Any) -> str:
tpss = ( tpss = (
f', contains [{tpsj}, ...]' f', contains [{tpsj}, ...]'
if len(obj) > 3 if len(obj) > 3
else f', contains [{tpsj}]' else f', contains [{tpsj}]' if tps else ''
if tps
else ''
) )
extra = f' (len {len(obj)}{tpss})' extra = f' (len {len(obj)}{tpss})'
elif isinstance(obj, dict): elif isinstance(obj, dict):
@ -299,9 +297,7 @@ def _desc(obj: Any) -> str:
pairss = ( pairss = (
f', contains {{{pairsj}, ...}}' f', contains {{{pairsj}, ...}}'
if len(obj) > 3 if len(obj) > 3
else f', contains {{{pairsj}}}' else f', contains {{{pairsj}}}' if pairs else ''
if pairs
else ''
) )
extra = f' (len {len(obj)}{pairss})' extra = f' (len {len(obj)}{pairss})'
if extra is None: if extra is None:

View file

@ -92,9 +92,9 @@ class LogEntry:
# incorporated into custom log processing. To populate this, our # incorporated into custom log processing. To populate this, our
# LogHandler class looks for a 'labels' dict passed in the optional # LogHandler class looks for a 'labels' dict passed in the optional
# 'extra' dict arg to standard Python log calls. # 'extra' dict arg to standard Python log calls.
labels: Annotated[ labels: Annotated[dict[str, str], IOAttrs('la', store_default=False)] = (
dict[str, str], IOAttrs('la', store_default=False) field(default_factory=dict)
] = field(default_factory=dict) )
@ioprepped @ioprepped
@ -483,12 +483,12 @@ class LogHandler(logging.Handler):
# after a short bit if we never get a newline. # after a short bit if we never get a newline.
ship_task = self._file_chunk_ship_task[name] ship_task = self._file_chunk_ship_task[name]
if ship_task is None: if ship_task is None:
self._file_chunk_ship_task[ self._file_chunk_ship_task[name] = (
name self._event_loop.create_task(
] = self._event_loop.create_task(
self._ship_chunks_task(name), self._ship_chunks_task(name),
name='log ship file chunks', name='log ship file chunks',
) )
)
except Exception: except Exception:
import traceback import traceback

View file

@ -499,8 +499,7 @@ class MessageProtocol:
f' @overload\n' f' @overload\n'
f' {pfx}def send{sfx}(self,' f' {pfx}def send{sfx}(self,'
f' message: {msgtypevar})' f' message: {msgtypevar})'
f' -> {rtypevar}:\n' f' -> {rtypevar}: ...\n'
f' ...\n'
) )
rtypevar = 'Response | None' rtypevar = 'Response | None'
if async_pass: if async_pass:
@ -607,8 +606,7 @@ class MessageProtocol:
f' call: Callable[[Any, {msgtypevar}], ' f' call: Callable[[Any, {msgtypevar}], '
f'{rtypevar}],\n' f'{rtypevar}],\n'
f' )' f' )'
f' -> Callable[[Any, {msgtypevar}], {rtypevar}]:\n' f' -> Callable[[Any, {msgtypevar}], {rtypevar}]: ...\n'
f' ...\n'
) )
out += ( out += (
'\n' '\n'

View file

@ -55,12 +55,13 @@ class MessageReceiver:
def __init__(self, protocol: MessageProtocol) -> None: def __init__(self, protocol: MessageProtocol) -> None:
self.protocol = protocol self.protocol = protocol
self._handlers: dict[type[Message], Callable] = {} self._handlers: dict[type[Message], Callable] = {}
self._decode_filter_call: Callable[ self._decode_filter_call: (
[Any, dict, Message], None Callable[[Any, dict, Message], None] | None
] | None = None ) = None
self._encode_filter_call: Callable[ self._encode_filter_call: (
[Any, Message | None, Response | SysResponse, dict], None Callable[[Any, Message | None, Response | SysResponse, dict], None]
] | None = None | None
) = None
# noinspection PyProtectedMember # noinspection PyProtectedMember
def register_handler( def register_handler(

View file

@ -41,18 +41,18 @@ class MessageSender:
def __init__(self, protocol: MessageProtocol) -> None: def __init__(self, protocol: MessageProtocol) -> None:
self.protocol = protocol self.protocol = protocol
self._send_raw_message_call: Callable[[Any, str], str] | None = None self._send_raw_message_call: Callable[[Any, str], str] | None = None
self._send_async_raw_message_call: Callable[ self._send_async_raw_message_call: (
[Any, str], Awaitable[str] Callable[[Any, str], Awaitable[str]] | None
] | None = None ) = None
self._send_async_raw_message_ex_call: Callable[ self._send_async_raw_message_ex_call: (
[Any, str, Message], Awaitable[str] Callable[[Any, str, Message], Awaitable[str]] | None
] | None = None ) = None
self._encode_filter_call: Callable[ self._encode_filter_call: (
[Any, Message, dict], None Callable[[Any, Message, dict], None] | None
] | None = None ) = None
self._decode_filter_call: Callable[ self._decode_filter_call: (
[Any, Message, dict, Response | SysResponse], None Callable[[Any, Message, dict, Response | SysResponse], None] | None
] | None = None ) = None
self._peer_desc_call: Callable[[Any], str] | None = None self._peer_desc_call: Callable[[Any], str] | None = None
def send_method( def send_method(

View file

@ -317,8 +317,6 @@ _envval = os.environ.get('EFRO_TERMCOLORS')
color_enabled: bool = ( color_enabled: bool = (
True True
if _envval == '1' if _envval == '1'
else False else False if _envval == '0' else _default_color_enabled()
if _envval == '0'
else _default_color_enabled()
) )
Clr: type[ClrBase] = ClrAlways if color_enabled else ClrNever Clr: type[ClrBase] = ClrAlways if color_enabled else ClrNever

View file

@ -459,8 +459,7 @@ if TYPE_CHECKING:
class ValueDispatcherMethod(Generic[ValT, RetT]): class ValueDispatcherMethod(Generic[ValT, RetT]):
"""Used by the valuedispatchmethod decorator.""" """Used by the valuedispatchmethod decorator."""
def __call__(self, value: ValT) -> RetT: def __call__(self, value: ValT) -> RetT: ...
...
def register( def register(
self, value: ValT self, value: ValT

Binary file not shown.