mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-01-25 00:13:27 +08:00
Merge branch 'efroemling:master' into master
This commit is contained in:
commit
d49be7ac65
14
.efrocachemap
generated
14
.efrocachemap
generated
@ -421,7 +421,7 @@
|
||||
"build/assets/ba_data/audio/zoeOw.ogg": "74befe45a8417e95b6a2233c51992a26",
|
||||
"build/assets/ba_data/audio/zoePickup01.ogg": "48ab8cddfcde36a750856f3f81dd20c8",
|
||||
"build/assets/ba_data/audio/zoeScream01.ogg": "2b468aedfa8741090247f04eb9e6df55",
|
||||
"build/assets/ba_data/data/langdata.json": "49b75d31e07e050e45676534ac8637b7",
|
||||
"build/assets/ba_data/data/langdata.json": "1de1f6150bd8e602b839378a669f3634",
|
||||
"build/assets/ba_data/data/languages/arabic.json": "2c2915e10124bb8f69206da9c608d57c",
|
||||
"build/assets/ba_data/data/languages/belarussian.json": "09954e550d13d3d9cb5a635a1d32a151",
|
||||
"build/assets/ba_data/data/languages/chinese.json": "5fa538e855bcfe20e727e0ad5831efad",
|
||||
@ -432,7 +432,7 @@
|
||||
"build/assets/ba_data/data/languages/dutch.json": "b0900d572c9141897d53d6574c471343",
|
||||
"build/assets/ba_data/data/languages/english.json": "48fe4c6f97b07420238244309b54a61e",
|
||||
"build/assets/ba_data/data/languages/esperanto.json": "0e397cfa5f3fb8cef5f4a64f21cda880",
|
||||
"build/assets/ba_data/data/languages/filipino.json": "e8b639601d17512ffcf0f05c57c544a9",
|
||||
"build/assets/ba_data/data/languages/filipino.json": "838148a9390d5a19ba2514da7c48bc98",
|
||||
"build/assets/ba_data/data/languages/french.json": "917e4174d6f0eb7f00c27fd79cfbb924",
|
||||
"build/assets/ba_data/data/languages/german.json": "eaf3f1bf633566de133c61f4f5377e62",
|
||||
"build/assets/ba_data/data/languages/gibberish.json": "a1afce99249645003017ebec50e716fe",
|
||||
@ -444,7 +444,7 @@
|
||||
"build/assets/ba_data/data/languages/korean.json": "4e3524327a0174250aff5e1ef4c0c597",
|
||||
"build/assets/ba_data/data/languages/malay.json": "f6ce0426d03a62612e3e436ed5d1be1f",
|
||||
"build/assets/ba_data/data/languages/persian.json": "d42aa034d03f487edd15e651d6f469ab",
|
||||
"build/assets/ba_data/data/languages/polish.json": "9d22c6643c097c4cb268d0d6b6319cd4",
|
||||
"build/assets/ba_data/data/languages/polish.json": "b90feb3cc20a80284ef44546df7099e6",
|
||||
"build/assets/ba_data/data/languages/portuguese.json": "5dcc9a324a8e926a6d5dd109cceaee1a",
|
||||
"build/assets/ba_data/data/languages/romanian.json": "b3e46efd6f869dbd78014570e037c290",
|
||||
"build/assets/ba_data/data/languages/russian.json": "3efaaf5eac320fceef029501dec4109b",
|
||||
@ -4065,10 +4065,10 @@
|
||||
"build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1",
|
||||
"build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae",
|
||||
"build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599",
|
||||
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "c3c2a9aac9d82e89f63f5c3126e819ab",
|
||||
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "6a031509cfb7d89f4b30e9e024f0be47",
|
||||
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "f4841f8f3ac255c38911c51357165b7c",
|
||||
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "3e9cfce74758f7c9b806ee1448a9111b",
|
||||
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "2dd4e422903be4c49e784adf1419653a",
|
||||
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "075af3e80bf53538ef9f3080b2b63658",
|
||||
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "04b7d010bb6d9de3d107b0a6fa8ec6f1",
|
||||
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "0928e648767af6594ffc57c34482a6e5",
|
||||
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "1f0f54a836ed7381c77a22a3928c4b3a",
|
||||
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "d6a12c876c4d6d628c14bb2bb4417862",
|
||||
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "47dde4684e31ad30a716a2418b40af2b",
|
||||
|
||||
@ -1182,7 +1182,7 @@ def test_multi_type() -> None:
|
||||
|
||||
# Test converting single instances back and forth.
|
||||
val1: MTTestBase = MTTestClass1(ival=123)
|
||||
tpname = MTTestBase.ID_STORAGE_NAME
|
||||
tpname = MTTestBase.get_type_id_storage_name()
|
||||
outdict = dataclass_to_dict(val1)
|
||||
assert outdict == {'ival': 123, tpname: 'm1'}
|
||||
val2: MTTestBase = MTTestClass2(sval='whee')
|
||||
@ -1314,3 +1314,117 @@ def test_multi_type() -> None:
|
||||
outdict = dataclass_to_dict(container7)
|
||||
container7b = dataclass_from_dict(_TestContainerClass7, {})
|
||||
assert container7 == container7b
|
||||
|
||||
|
||||
class MTTest2TypeID(Enum):
|
||||
"""IDs for our multi-type class."""
|
||||
|
||||
CLASS_1 = 'm1'
|
||||
CLASS_2 = 'm2'
|
||||
CLASS_3 = 'm3'
|
||||
|
||||
|
||||
class MTTest2Base(IOMultiType[MTTest2TypeID]):
|
||||
"""Another multi-type test.
|
||||
|
||||
This one tests overriding type-id-storage-name.
|
||||
"""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type_id_storage_name(cls) -> str:
|
||||
return 'type'
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type(cls, type_id: MTTest2TypeID) -> type[MTTest2Base]:
|
||||
val: type[MTTest2Base]
|
||||
if type_id is MTTest2TypeID.CLASS_1:
|
||||
val = MTTest2Class1
|
||||
elif type_id is MTTest2TypeID.CLASS_2:
|
||||
val = MTTest2Class2
|
||||
elif type_id is MTTest2TypeID.CLASS_3:
|
||||
val = MTTest2Class3
|
||||
else:
|
||||
assert_never(type_id)
|
||||
return val
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type_id(cls) -> MTTest2TypeID:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class MTTest2Class1(MTTest2Base):
|
||||
"""A test child-class for use with our multi-type class."""
|
||||
|
||||
ival: int
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type_id(cls) -> MTTest2TypeID:
|
||||
return MTTest2TypeID.CLASS_1
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class MTTest2Class2(MTTest2Base):
|
||||
"""Another test child-class for use with our multi-type class."""
|
||||
|
||||
sval: str
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type_id(cls) -> MTTest2TypeID:
|
||||
return MTTest2TypeID.CLASS_2
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class MTTest2Class3(MTTest2Base):
|
||||
"""Another test child-class for use with our multi-type class."""
|
||||
|
||||
type: str = ''
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type_id(cls) -> MTTest2TypeID:
|
||||
return MTTest2TypeID.CLASS_3
|
||||
|
||||
|
||||
def test_multi_type_2() -> None:
|
||||
"""Test IOMultiType stuff."""
|
||||
|
||||
# Make sure this serializes correctly with 'test' as a type name.
|
||||
|
||||
val1: MTTest2Base = MTTest2Class1(ival=123)
|
||||
outdict = dataclass_to_dict(val1)
|
||||
assert outdict == {'ival': 123, 'type': 'm1'}
|
||||
|
||||
val1b = dataclass_from_dict(MTTest2Base, outdict)
|
||||
assert val1 == val1b
|
||||
|
||||
val2: MTTest2Base = MTTest2Class2(sval='whee')
|
||||
outdict2 = dataclass_to_dict(val2)
|
||||
assert outdict2 == {'sval': 'whee', 'type': 'm2'}
|
||||
|
||||
val2b = dataclass_from_dict(MTTest2Base, outdict2)
|
||||
assert val2 == val2b
|
||||
|
||||
# If a multi-type class uses 'type' itself, make sure we error
|
||||
# instead of letting things break due to the name clash. In an ideal
|
||||
# world this would error at prep time, but IOMultiType is built
|
||||
# around lazy-loading so it can't actually examine all types at that
|
||||
# time.
|
||||
|
||||
# Make sure we error on output...
|
||||
val3: MTTest2Base = MTTest2Class3()
|
||||
with pytest.raises(RuntimeError):
|
||||
outdict = dataclass_to_dict(val3)
|
||||
|
||||
# And input.
|
||||
indict3 = {'type': 'm3'}
|
||||
with pytest.raises(RuntimeError):
|
||||
val3 = dataclass_from_dict(MTTest2Base, indict3)
|
||||
|
||||
@ -81,11 +81,6 @@ class IOMultiType(Generic[EnumT]):
|
||||
See tests/test_efro/test_dataclassio.py for examples.
|
||||
"""
|
||||
|
||||
# Dataclasses inheriting from an IOMultiType will store a type-id
|
||||
# with this key in their serialized data. This value can be
|
||||
# overridden in IOMultiType subclasses as desired.
|
||||
ID_STORAGE_NAME = '_dciotype'
|
||||
|
||||
@classmethod
|
||||
def get_type(cls, type_id: EnumT) -> type[Self]:
|
||||
"""Return a specific subclass given a type-id."""
|
||||
@ -103,6 +98,18 @@ class IOMultiType(Generic[EnumT]):
|
||||
assert issubclass(out, Enum)
|
||||
return out
|
||||
|
||||
@classmethod
|
||||
def get_type_id_storage_name(cls) -> str:
|
||||
"""Return the key used to store type id in serialized data.
|
||||
|
||||
The default is an obscure value so that it does not conflict
|
||||
with members of individual type attrs, but in some cases one
|
||||
might prefer to serialize it to something simpler like 'type'
|
||||
by overriding this call. One just needs to make sure that no
|
||||
encompassed types serialize anything to 'type' themself.
|
||||
"""
|
||||
return '_dciotype'
|
||||
|
||||
|
||||
class IOAttrs:
|
||||
"""For specifying io behavior in annotations.
|
||||
@ -332,7 +339,7 @@ def _get_multitype_type(
|
||||
raise ValueError(
|
||||
f"Found a {type(val)} at '{fieldpath}'; expected a dict."
|
||||
)
|
||||
storename = cls.ID_STORAGE_NAME
|
||||
storename = cls.get_type_id_storage_name()
|
||||
id_val = val.get(storename)
|
||||
if id_val is None:
|
||||
raise ValueError(
|
||||
|
||||
@ -73,7 +73,7 @@ class _Inputter:
|
||||
if issubclass(self._cls, IOMultiType) and not dataclasses.is_dataclass(
|
||||
self._cls
|
||||
):
|
||||
type_id_val = values.get(self._cls.ID_STORAGE_NAME)
|
||||
type_id_val = values.get(self._cls.get_type_id_storage_name())
|
||||
if type_id_val is None:
|
||||
raise ValueError(
|
||||
f'No type id value present for multi-type object:'
|
||||
@ -234,6 +234,7 @@ class _Inputter:
|
||||
associated values, and nested dataclasses should be passed as dicts.
|
||||
"""
|
||||
# pylint: disable=too-many-locals
|
||||
# pylint: disable=too-many-statements
|
||||
# pylint: disable=too-many-branches
|
||||
if not isinstance(values, dict):
|
||||
raise TypeError(
|
||||
@ -252,8 +253,8 @@ class _Inputter:
|
||||
fields = dataclasses.fields(cls)
|
||||
fields_by_name = {f.name: f for f in fields}
|
||||
|
||||
# Preprocess all fields to convert Annotated[] to contained types
|
||||
# and IOAttrs.
|
||||
# Preprocess all fields to convert Annotated[] to contained
|
||||
# types and IOAttrs.
|
||||
parsed_field_annotations = {
|
||||
f.name: _parse_annotated(prep.annotations[f.name]) for f in fields
|
||||
}
|
||||
@ -262,12 +263,25 @@ class _Inputter:
|
||||
# type attr. Ignore that while parsing since we already have a
|
||||
# definite type and it will just pollute extra-attrs otherwise.
|
||||
if issubclass(cls, IOMultiType):
|
||||
type_id_store_name = cls.ID_STORAGE_NAME
|
||||
type_id_store_name = cls.get_type_id_storage_name()
|
||||
|
||||
# However we do want to make sure the class we're loading
|
||||
# doesn't itself use this same name, as this could lead to
|
||||
# tricky breakage. We can't verify this for types at prep
|
||||
# time because IOMultiTypes are lazy-loaded, so this is
|
||||
# the best we can do.
|
||||
if type_id_store_name in fields_by_name:
|
||||
raise RuntimeError(
|
||||
f"{cls} contains a '{type_id_store_name}' field"
|
||||
' which clashes with the type-id-storage-name of'
|
||||
' the IOMultiType it inherits from.'
|
||||
)
|
||||
|
||||
else:
|
||||
type_id_store_name = None
|
||||
|
||||
# Go through all data in the input, converting it to either dataclass
|
||||
# args or extra data.
|
||||
# Go through all data in the input, converting it to either
|
||||
# dataclass args or extra data.
|
||||
args: dict[str, Any] = {}
|
||||
for rawkey, value in values.items():
|
||||
|
||||
@ -284,8 +298,8 @@ class _Inputter:
|
||||
if self._discard_unknown_attrs:
|
||||
continue
|
||||
|
||||
# Treat this like 'Any' data; ensure that it is valid
|
||||
# raw json.
|
||||
# Treat this like 'Any' data; ensure that it is
|
||||
# valid raw json.
|
||||
if not _is_valid_for_codec(value, self._codec):
|
||||
raise TypeError(
|
||||
f'Unknown attr \'{key}\''
|
||||
|
||||
@ -161,7 +161,15 @@ class _Outputter:
|
||||
assert obj.get_type(type_id) is type(obj)
|
||||
if self._create:
|
||||
assert out is not None
|
||||
out[obj.ID_STORAGE_NAME] = type_id.value
|
||||
storagename = obj.get_type_id_storage_name()
|
||||
if any(f.name == storagename for f in fields):
|
||||
raise RuntimeError(
|
||||
f'dataclassio: {type(obj)} contains a'
|
||||
f" '{storagename}' field which clashes with"
|
||||
f' the type-id-storage-name of the IOMulticlass'
|
||||
f' it inherits from.'
|
||||
)
|
||||
out[storagename] = type_id.value
|
||||
|
||||
return out
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
|
||||
# Note: We do lots of comparing of exact types here which is normally
|
||||
# frowned upon (stuff like isinstance() is usually encouraged).
|
||||
#
|
||||
# pylint: disable=unidiomatic-typecheck
|
||||
|
||||
from __future__ import annotations
|
||||
@ -142,8 +143,10 @@ class PrepSession:
|
||||
return existing_data
|
||||
|
||||
# Sanity check.
|
||||
# Note that we now support recursive types via the PREP_SESSION_ATTR,
|
||||
# so we theoretically shouldn't run into this this.
|
||||
#
|
||||
# Note that we now support recursive types via the
|
||||
# PREP_SESSION_ATTR, so we theoretically shouldn't run into this
|
||||
# this.
|
||||
if recursion_level > MAX_RECURSION:
|
||||
raise RuntimeError('Max recursion exceeded.')
|
||||
|
||||
@ -152,20 +155,21 @@ class PrepSession:
|
||||
if not isinstance(cls_any, type) or not dataclasses.is_dataclass(cls):
|
||||
raise TypeError(f'Passed arg {cls} is not a dataclass type.')
|
||||
|
||||
# Add a pointer to the prep-session while doing the prep.
|
||||
# This way we can ignore types that we're already in the process
|
||||
# of prepping and can support recursive types.
|
||||
# Add a pointer to the prep-session while doing the prep. This
|
||||
# way we can ignore types that we're already in the process of
|
||||
# prepping and can support recursive types.
|
||||
existing_prep = getattr(cls, PREP_SESSION_ATTR, None)
|
||||
if existing_prep is not None:
|
||||
if existing_prep is self:
|
||||
return None
|
||||
# We shouldn't need to support failed preps
|
||||
# or preps from multiple threads at once.
|
||||
# We shouldn't need to support failed preps or preps from
|
||||
# multiple threads at once.
|
||||
raise RuntimeError('Found existing in-progress prep.')
|
||||
setattr(cls, PREP_SESSION_ATTR, self)
|
||||
|
||||
# Generate a warning on non-explicit preps; we prefer prep to
|
||||
# happen explicitly at runtime so errors can be detected early on.
|
||||
# happen explicitly at runtime so errors can be detected early
|
||||
# on.
|
||||
if not self.explicit:
|
||||
logging.warning(
|
||||
'efro.dataclassio: implicitly prepping dataclass: %s.'
|
||||
@ -201,8 +205,8 @@ class PrepSession:
|
||||
all_storage_names: set[str] = set()
|
||||
storage_names_to_attr_names: dict[str, str] = {}
|
||||
|
||||
# Ok; we've resolved actual types for this dataclass.
|
||||
# now recurse through them, verifying that we support all contained
|
||||
# Ok; we've resolved actual types for this dataclass. now
|
||||
# recurse through them, verifying that we support all contained
|
||||
# types and prepping any contained dataclass types.
|
||||
for attrname, anntype in resolved_annotations.items():
|
||||
anntype, ioattrs = _parse_annotated(anntype)
|
||||
@ -235,7 +239,8 @@ class PrepSession:
|
||||
recursion_level=recursion_level + 1,
|
||||
)
|
||||
|
||||
# Success! Store our resolved stuff with the class and we're done.
|
||||
# Success! Store our resolved stuff with the class and we're
|
||||
# done.
|
||||
prepdata = PrepData(
|
||||
annotations=resolved_annotations,
|
||||
storage_names_to_attr_names=storage_names_to_attr_names,
|
||||
@ -282,8 +287,8 @@ class PrepSession:
|
||||
if anntype is typing.Any:
|
||||
return
|
||||
|
||||
# Everything below this point assumes the annotation type resolves
|
||||
# to a concrete type.
|
||||
# Everything below this point assumes the annotation type
|
||||
# resolves to a concrete type.
|
||||
if not isinstance(origin, type):
|
||||
raise TypeError(
|
||||
f'Unsupported type found for \'{attrname}\' on {cls}:'
|
||||
@ -292,8 +297,8 @@ class PrepSession:
|
||||
|
||||
# If a soft_default value/factory was passed, we do some basic
|
||||
# type checking on the top-level value here. We also run full
|
||||
# recursive validation on values later during inputting, but this
|
||||
# should catch at least some errors early on, which can be
|
||||
# recursive validation on values later during inputting, but
|
||||
# this should catch at least some errors early on, which can be
|
||||
# useful since soft_defaults are not static type checked.
|
||||
if ioattrs is not None:
|
||||
have_soft_default = False
|
||||
@ -319,11 +324,13 @@ class PrepSession:
|
||||
if origin in SIMPLE_TYPES:
|
||||
return
|
||||
|
||||
# For sets and lists, check out their single contained type (if any).
|
||||
# For sets and lists, check out their single contained type (if
|
||||
# any).
|
||||
if origin in (list, set):
|
||||
childtypes = typing.get_args(anntype)
|
||||
if len(childtypes) == 0:
|
||||
# This is equivalent to Any; nothing else needs checking.
|
||||
# This is equivalent to Any; nothing else needs
|
||||
# checking.
|
||||
return
|
||||
if len(childtypes) > 1:
|
||||
raise TypeError(
|
||||
@ -346,7 +353,8 @@ class PrepSession:
|
||||
# For key types we support Any, str, int,
|
||||
# and Enums with uniform str/int values.
|
||||
if not childtypes or childtypes[0] is typing.Any:
|
||||
# 'Any' needs no further checks (just checked per-instance).
|
||||
# 'Any' needs no further checks (just checked
|
||||
# per-instance).
|
||||
pass
|
||||
elif childtypes[0] in (str, int):
|
||||
# str and int are all good as keys.
|
||||
@ -362,7 +370,8 @@ class PrepSession:
|
||||
|
||||
# For value types we support any of our normal types.
|
||||
if not childtypes or _get_origin(childtypes[1]) is typing.Any:
|
||||
# 'Any' needs no further checks (just checked per-instance).
|
||||
# 'Any' needs no further checks (just checked
|
||||
# per-instance).
|
||||
pass
|
||||
else:
|
||||
self.prep_type(
|
||||
@ -374,9 +383,9 @@ class PrepSession:
|
||||
)
|
||||
return
|
||||
|
||||
# For Tuples, simply check individual member types.
|
||||
# (and, for now, explicitly disallow zero member types or usage
|
||||
# of ellipsis)
|
||||
# For Tuples, simply check individual member types. (and, for
|
||||
# now, explicitly disallow zero member types or usage of
|
||||
# ellipsis)
|
||||
if origin is tuple:
|
||||
childtypes = typing.get_args(anntype)
|
||||
if not childtypes:
|
||||
@ -405,8 +414,9 @@ class PrepSession:
|
||||
self.prep_enum(origin)
|
||||
return
|
||||
|
||||
# We allow datetime objects (and google's extended subclass of them
|
||||
# used in firestore, which is why we don't look for exact type here).
|
||||
# We allow datetime objects (and google's extended subclass of
|
||||
# them used in firestore, which is why we don't look for exact
|
||||
# type here).
|
||||
if issubclass(origin, datetime.datetime):
|
||||
return
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user