Merge branch 'master' into replay-rewind

This commit is contained in:
Roman Trapeznikov 2024-02-10 00:49:14 +03:00 committed by GitHub
commit 96af95ef03
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 714 additions and 342 deletions

92
.efrocachemap generated
View File

@ -421,7 +421,7 @@
"build/assets/ba_data/audio/zoeOw.ogg": "74befe45a8417e95b6a2233c51992a26", "build/assets/ba_data/audio/zoeOw.ogg": "74befe45a8417e95b6a2233c51992a26",
"build/assets/ba_data/audio/zoePickup01.ogg": "48ab8cddfcde36a750856f3f81dd20c8", "build/assets/ba_data/audio/zoePickup01.ogg": "48ab8cddfcde36a750856f3f81dd20c8",
"build/assets/ba_data/audio/zoeScream01.ogg": "2b468aedfa8741090247f04eb9e6df55", "build/assets/ba_data/audio/zoeScream01.ogg": "2b468aedfa8741090247f04eb9e6df55",
"build/assets/ba_data/data/langdata.json": "750e45f2f19a94a44703e3d31b9a8e96", "build/assets/ba_data/data/langdata.json": "831b83240126d0a851104f4148712ed1",
"build/assets/ba_data/data/languages/arabic.json": "0db32e21b6d5337ccca478381744aa88", "build/assets/ba_data/data/languages/arabic.json": "0db32e21b6d5337ccca478381744aa88",
"build/assets/ba_data/data/languages/belarussian.json": "a112dfca3e188387516788bd8229c5b0", "build/assets/ba_data/data/languages/belarussian.json": "a112dfca3e188387516788bd8229c5b0",
"build/assets/ba_data/data/languages/chinese.json": "1360ffde06828b63ce4fe956c3c3cd1d", "build/assets/ba_data/data/languages/chinese.json": "1360ffde06828b63ce4fe956c3c3cd1d",
@ -456,7 +456,7 @@
"build/assets/ba_data/data/languages/thai.json": "9c425b420f0488a7f883da98947657ad", "build/assets/ba_data/data/languages/thai.json": "9c425b420f0488a7f883da98947657ad",
"build/assets/ba_data/data/languages/turkish.json": "2be25c89ca754341f27750e0d595f31e", "build/assets/ba_data/data/languages/turkish.json": "2be25c89ca754341f27750e0d595f31e",
"build/assets/ba_data/data/languages/ukrainian.json": "b54a38e93deebafa5706ba2d1f626892", "build/assets/ba_data/data/languages/ukrainian.json": "b54a38e93deebafa5706ba2d1f626892",
"build/assets/ba_data/data/languages/venetian.json": "8e9714d98a85e428ce3543fc49188a46", "build/assets/ba_data/data/languages/venetian.json": "f896fc3df13a42f1bef8813ca80b1a09",
"build/assets/ba_data/data/languages/vietnamese.json": "921cd1e50f60fe3e101f246e172750ba", "build/assets/ba_data/data/languages/vietnamese.json": "921cd1e50f60fe3e101f246e172750ba",
"build/assets/ba_data/data/maps/big_g.json": "1dd301d490643088a435ce75df971054", "build/assets/ba_data/data/maps/big_g.json": "1dd301d490643088a435ce75df971054",
"build/assets/ba_data/data/maps/bridgit.json": "6aea74805f4880cc11237c5734a24422", "build/assets/ba_data/data/maps/bridgit.json": "6aea74805f4880cc11237c5734a24422",
@ -4060,50 +4060,50 @@
"build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1", "build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1",
"build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae", "build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae",
"build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599", "build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599",
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "26eea64d4509875c9a88da74f49e675c", "build/prefab/full/linux_arm64_gui/debug/ballisticakit": "d5a8312cd9cf65f32ca2a7c4a2063c03",
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "0a39319a89364641f3bb0598821b4288", "build/prefab/full/linux_arm64_gui/release/ballisticakit": "aecb00e9044fa677583e1036fa7875d8",
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "84567063607be0227ef779027e12d19d", "build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "eca7f9ab892edfa7423a9d4a6f89e571",
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "f4458855192dedd13a28d36dc3962890", "build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "99647f48362f84112d23a9bc89eaa983",
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "4c0679b0157c2dd63519e5225d99359d", "build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "31e21a64d77fc0834832b633a26d986b",
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "335a3f06dc6dd361d6122fd9143124ae", "build/prefab/full/linux_x86_64_gui/release/ballisticakit": "7c12b4078c3af6e627a4051b1c1d8370",
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "041a300c9fa99c82395e1ebc66e81fe3", "build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "f7a66c48321efa4462e8eae6b72db2b2",
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "181145bf30e752991860acd0e44f972c", "build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "08cdbeb2ca4fa8c996f3369680c4e5cd",
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "8531542c35242bcbffc0309cef10b2b8", "build/prefab/full/mac_arm64_gui/debug/ballisticakit": "f92679bab5a0d057427962869e19f057",
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "48cdebbdea839f6b8fc8f5cb69d7f961", "build/prefab/full/mac_arm64_gui/release/ballisticakit": "d5bcd695f84dab1ab32655989d399c9e",
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "159003daac99048702c74120be565bad", "build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "c766f437ece15dae0ee971e4c2e10a2d",
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "51c9582a1efaae50e1c435c13c390855", "build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "cbecc4c11b9aa4621abfdc996fecfd74",
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "d66c11ebe6d9035ea7e86b362f8505a1", "build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "7af782c9d9bcf1396a15dea6f2493d70",
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "1f8113ffba1d000120bf83ac268c603b", "build/prefab/full/mac_x86_64_gui/release/ballisticakit": "2c04f3f68db3e73e4aad4c656d956c00",
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "6f2a68c0370061a2913278d97b039ecc", "build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "132c83ee8811828739601ac3d0599fe9",
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "471e7f81fac96b4db752c5cdaeed7168", "build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "8de942a2e1ff96c147a9500a56ca4f64",
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "94916e80a9d7bc7801db666beceea026", "build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "6bf51ccbd01937bf1b28cfffe029d857",
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "1bc098ae93dd18143fb64ae5cbc33c19", "build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "c5f0d834a47852f1c240e17a6c933e0a",
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "da99cef03f12a6ff2c0065f4616262f2", "build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "4f74b71dabd207bee732dc91c9a28dc4",
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "14b67157a3bf57b9de067089476f79d5", "build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "f48ab8e4c4d05f4b2231bebf33c965f1",
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "8709ad96140d71760c2f493ee8bd7c43", "build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "ee36a39fd0f524989cb68930c89c8868",
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "ee829cd5488e9750570dc6f602d65589", "build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "dbed9145e5db116d92aa47cb9e98da39",
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "8709ad96140d71760c2f493ee8bd7c43", "build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "ee36a39fd0f524989cb68930c89c8868",
"build/prefab/lib/linux_arm64_server/release/libballisticaplus.a": "ee829cd5488e9750570dc6f602d65589", "build/prefab/lib/linux_arm64_server/release/libballisticaplus.a": "dbed9145e5db116d92aa47cb9e98da39",
"build/prefab/lib/linux_x86_64_gui/debug/libballisticaplus.a": "35fe69d96c154b97b534711dae9d8d3a", "build/prefab/lib/linux_x86_64_gui/debug/libballisticaplus.a": "dc078f11a4e93062adc7d210fd4f08fb",
"build/prefab/lib/linux_x86_64_gui/release/libballisticaplus.a": "2db876e543b3e93128ec421ea5cbb011", "build/prefab/lib/linux_x86_64_gui/release/libballisticaplus.a": "a74bea3380d0fb39f78ac7b7598c1a72",
"build/prefab/lib/linux_x86_64_server/debug/libballisticaplus.a": "35fe69d96c154b97b534711dae9d8d3a", "build/prefab/lib/linux_x86_64_server/debug/libballisticaplus.a": "dc078f11a4e93062adc7d210fd4f08fb",
"build/prefab/lib/linux_x86_64_server/release/libballisticaplus.a": "2db876e543b3e93128ec421ea5cbb011", "build/prefab/lib/linux_x86_64_server/release/libballisticaplus.a": "a74bea3380d0fb39f78ac7b7598c1a72",
"build/prefab/lib/mac_arm64_gui/debug/libballisticaplus.a": "417ea0f30d203d5de0e235550fcd7ab8", "build/prefab/lib/mac_arm64_gui/debug/libballisticaplus.a": "b397e020f33132c4dd2280cb1222cd14",
"build/prefab/lib/mac_arm64_gui/release/libballisticaplus.a": "72d071e977c88454d0623c4a9fb34361", "build/prefab/lib/mac_arm64_gui/release/libballisticaplus.a": "ff0cb4db976707d25bd401bce80a4882",
"build/prefab/lib/mac_arm64_server/debug/libballisticaplus.a": "417ea0f30d203d5de0e235550fcd7ab8", "build/prefab/lib/mac_arm64_server/debug/libballisticaplus.a": "b397e020f33132c4dd2280cb1222cd14",
"build/prefab/lib/mac_arm64_server/release/libballisticaplus.a": "72d071e977c88454d0623c4a9fb34361", "build/prefab/lib/mac_arm64_server/release/libballisticaplus.a": "ff0cb4db976707d25bd401bce80a4882",
"build/prefab/lib/mac_x86_64_gui/debug/libballisticaplus.a": "de1b228d95c47a7c296a853778715326", "build/prefab/lib/mac_x86_64_gui/debug/libballisticaplus.a": "c464accef921df1325459bdd10c59b84",
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "79117cbfdf695298e1d9ae997d990c4d", "build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "0896e849885cef50bcf33ce863efa7d2",
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "984f0990a8e4cca29a382d70e51cc051", "build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "e53c808357cc0a2f0da7b870be147083",
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "79117cbfdf695298e1d9ae997d990c4d", "build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "0896e849885cef50bcf33ce863efa7d2",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "97a0aee0716397c0394c620b0cdc8cfa", "build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "e34cc55fd284e31d9ed1151c5a51bf34",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "5edf5fd129429079b24368da6c792c44", "build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "36cb65be158a0103d81c82d8a51dc8b6",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "e453446a36102733a1f0db636fafb704", "build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "21f8a61745c2fec88749299f5aeeeaf9",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "dfb843bbc924daf7a2e2a2eb6b4811df", "build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "d61272f101f87b140b84895e482b07f4",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "09bb45bcbfad7c0f63b9494ceca669cc", "build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "36c30bcd93d38569b9515ed17896d8de",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "c8d10517d61dc5c4d7c94a5eccecab4a", "build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "841c7cd3cc96c91ecd11335a91c0c465",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "4944d18bb54894b0488cbdaa7b2ef06f", "build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "305aab4423bf510f6bf95fe0c996128f",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "d17c4758367051e734601018b081f786", "build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "f1066b8591d7859df76c8e976ceee2d5",
"src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c", "src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c",
"src/assets/ba_data/python/babase/_mgen/enums.py": "b611c090513a21e2fe90e56582724e9d", "src/assets/ba_data/python/babase/_mgen/enums.py": "b611c090513a21e2fe90e56582724e9d",
"src/ballistica/base/mgen/pyembed/binding_base.inc": "72bfed2cce8ff19741989dec28302f3f", "src/ballistica/base/mgen/pyembed/binding_base.inc": "72bfed2cce8ff19741989dec28302f3f",

View File

@ -9,7 +9,7 @@ assignees: ''
### Description ### Description
Describe the bug. Do not forget to fill the title. Describe the bug. Do not forget to fill the title.
Make sure you're running game without any modifications (unless you want to report an api bug). Make sure you're running game without any modifications.
### Steps to reproduce ### Steps to reproduce
1. Launch BombSquad 1. Launch BombSquad
@ -18,16 +18,17 @@ Make sure you're running game without any modifications (unless you want to repo
4. Bug! 4. Bug!
### Expected behavior ### Expected behavior
Describe what you think should happen. Describe what you think should happen if it's not obvious.
### Machine ### Machine
**Platform**: Windows 10 / Ubuntu 20.04 LTS / AOSP 8.1 / etc. **Platform**: Windows 11 / Ubuntu 22.04 LTS / Android 12 / MyToasterOS 7.3 / ... \
**BombSquad version**: [1.5.27](https://github.com/efroemling/ballistica/releases/tag/v1.5.27) **BombSquad version**: [1.7.32](https://github.com/efroemling/ballistica/tree/v1.7.32) \
**Commit**: [2642488](https://github.com/efroemling/ballistica/commit/2642488a51b250752169738f5aeeccaafa2bc8de) **Commit**: https://github.com/efroemling/ballistica/tree/978f32f9f098bd0ff1dc64b496ec31cf493ded09
Select what do you want to use: release version or commit. Please use a hyperlink.
You may specify BombSquad version you're running or refer to the latest commit.
### Screenshots ### Screenshots
Put some screenshots here if needed. Put some screenshots here if needed.
### Extra ### Extra
Put some extra information here. For example, describe your assumptions about the cause of the bug. You may put some extra information here. For example, describe your assumptions about the cause of the bug.

View File

@ -1,4 +1,4 @@
### 1.7.33 (build 21762, api 8, 2024-01-24) ### 1.7.33 (build 21766, api 8, 2024-02-01)
- Stress test input-devices are now a bit smarter; they won't press any buttons - Stress test input-devices are now a bit smarter; they won't press any buttons
while UIs are up (this could cause lots of chaos if it happened). while UIs are up (this could cause lots of chaos if it happened).
- Added a 'Show Demos When Idle' option in advanced settings. If enabled, the - Added a 'Show Demos When Idle' option in advanced settings. If enabled, the
@ -21,6 +21,8 @@
catch problems where a base class changes or removes a method and child catch problems where a base class changes or removes a method and child
classes forget to adapt to the change. classes forget to adapt to the change.
- Replays now have rewind/fast-forward buttons!! (Thanks Dliwk, vishal332008!) - Replays now have rewind/fast-forward buttons!! (Thanks Dliwk, vishal332008!)
- Implemented `efro.dataclassio.IOMultiType` which will make my life a lot
easier.
### 1.7.32 (build 21741, api 8, 2023-12-20) ### 1.7.32 (build 21741, api 8, 2023-12-20)
- Fixed a screen message that no one will ever see (Thanks vishal332008?...) - Fixed a screen message that no one will ever see (Thanks vishal332008?...)

View File

@ -49,7 +49,7 @@ endif
# Prereq targets that should be safe to run anytime; even if project-files # Prereq targets that should be safe to run anytime; even if project-files
# are out of date. # are out of date.
PREREQS_SAFE = .cache/checkenv $(PCOMMANDBATCHBIN) .dir-locals.el .mypy.ini \ PREREQS_SAFE = .cache/checkenv $(PCOMMANDBATCHBIN) .dir-locals.el .mypy.ini \
.pyrightconfig.json .pycheckers .pylintrc .style.yapf .clang-format \ .pyrightconfig.json .pylintrc .style.yapf .clang-format \
ballisticakit-cmake/.clang-format .editorconfig ballisticakit-cmake/.clang-format .editorconfig
# Prereq targets that may break if the project needs updating should go here. # Prereq targets that may break if the project needs updating should go here.
@ -1216,9 +1216,6 @@ ENV_SRC = $(PCOMMAND) tools/batools/build.py
.pyrightconfig.json: config/toolconfigsrc/pyrightconfig.yaml $(TOOL_CFG_SRC) .pyrightconfig.json: config/toolconfigsrc/pyrightconfig.yaml $(TOOL_CFG_SRC)
@$(TOOL_CFG_INST) $< $@ @$(TOOL_CFG_INST) $< $@
.pycheckers: config/toolconfigsrc/pycheckers $(TOOL_CFG_SRC)
@$(TOOL_CFG_INST) $< $@
# Set this to 1 to skip environment checks. # Set this to 1 to skip environment checks.
SKIP_ENV_CHECKS ?= 0 SKIP_ENV_CHECKS ?= 0

View File

@ -288,14 +288,12 @@ class DirectoryScan:
) -> None: ) -> None:
"""Scan provided path and add module entries to provided list.""" """Scan provided path and add module entries to provided list."""
try: try:
# Special case: let's save some time and skip the whole 'babase'
# package since we know it doesn't contain any meta tags.
fullpath = Path(path, subpath) fullpath = Path(path, subpath)
# Note: skipping hidden dirs (starting with '.').
entries = [ entries = [
(path, Path(subpath, name)) (path, Path(subpath, name))
for name in os.listdir(fullpath) for name in os.listdir(fullpath)
# Actually scratch that for now; trying to avoid special cases. if not name.startswith('.')
# if name != 'babase'
] ]
except PermissionError: except PermissionError:
# Expected sometimes. # Expected sometimes.

View File

@ -52,7 +52,7 @@ if TYPE_CHECKING:
# Build number and version of the ballistica binary we expect to be # Build number and version of the ballistica binary we expect to be
# using. # using.
TARGET_BALLISTICA_BUILD = 21762 TARGET_BALLISTICA_BUILD = 21766
TARGET_BALLISTICA_VERSION = '1.7.33' TARGET_BALLISTICA_VERSION = '1.7.33'
@ -287,9 +287,9 @@ def _setup_certs(contains_python_dist: bool) -> None:
import certifi import certifi
# Let both OpenSSL and requests (if present) know to use this. # Let both OpenSSL and requests (if present) know to use this.
os.environ['SSL_CERT_FILE'] = os.environ[ os.environ['SSL_CERT_FILE'] = os.environ['REQUESTS_CA_BUNDLE'] = (
'REQUESTS_CA_BUNDLE' certifi.where()
] = certifi.where() )
def _setup_paths( def _setup_paths(

View File

@ -14,7 +14,7 @@ Mac:
(brew install python3). (brew install python3).
Linux (x86_64): Linux (x86_64):
- Server binaries are currently compiled against Ubuntu 20 LTS. - Server binaries are currently compiled against Ubuntu 22 LTS.
Raspberry Pi: Raspberry Pi:
- The server binary was compiled on a Raspberry Pi 4 running Raspbian Buster. - The server binary was compiled on a Raspberry Pi 4 running Raspbian Buster.

View File

@ -39,7 +39,7 @@ auto main(int argc, char** argv) -> int {
namespace ballistica { namespace ballistica {
// These are set automatically via script; don't modify them here. // These are set automatically via script; don't modify them here.
const int kEngineBuildNumber = 21762; const int kEngineBuildNumber = 21766;
const char* kEngineVersion = "1.7.33"; const char* kEngineVersion = "1.7.33";
const int kEngineApiVersion = 8; const int kEngineApiVersion = 8;

View File

@ -5,10 +5,18 @@
from __future__ import annotations from __future__ import annotations
from enum import Enum import copy
import datetime import datetime
from enum import Enum
from dataclasses import field, dataclass from dataclasses import field, dataclass
from typing import TYPE_CHECKING, Any, Sequence, Annotated from typing import (
TYPE_CHECKING,
Any,
Sequence,
Annotated,
assert_type,
assert_never,
)
from typing_extensions import override from typing_extensions import override
import pytest import pytest
@ -24,10 +32,11 @@ from efro.dataclassio import (
Codec, Codec,
DataclassFieldLookup, DataclassFieldLookup,
IOExtendedData, IOExtendedData,
IOMultiType,
) )
if TYPE_CHECKING: if TYPE_CHECKING:
pass from typing import Self
class _EnumTest(Enum): class _EnumTest(Enum):
@ -1069,3 +1078,221 @@ def test_soft_default() -> None:
todict = dataclass_to_dict(orig) todict = dataclass_to_dict(orig)
assert todict == {'ival': 2} assert todict == {'ival': 2}
assert dataclass_from_dict(_TestClassE8, todict) == orig assert dataclass_from_dict(_TestClassE8, todict) == orig
class MTTestTypeID(Enum):
"""IDs for our multi-type class."""
CLASS_1 = 'm1'
CLASS_2 = 'm2'
class MTTestBase(IOMultiType[MTTestTypeID]):
"""Our multi-type class.
These top level multi-type classes are special parent classes
that know about all of their child classes and how to serialize
& deserialize them using explicit type ids. We can then use the
parent class in annotations and dataclassio will do the right thing.
Useful for stuff like Message classes where we may want to store a
bunch of different types of them into one place.
"""
@override
@classmethod
def get_type(cls, type_id: MTTestTypeID) -> type[MTTestBase]:
"""Return the subclass for each of our type-ids."""
# This uses assert_never() to ensure we cover all cases in the
# enum. Though this is less efficient than looking up by dict
# would be. If we had lots of values we could also support lazy
# loading by importing classes only when their value is being
# requested.
val: type[MTTestBase]
if type_id is MTTestTypeID.CLASS_1:
val = MTTestClass1
elif type_id is MTTestTypeID.CLASS_2:
val = MTTestClass2
else:
assert_never(type_id)
return val
@override
@classmethod
def get_type_id(cls) -> MTTestTypeID:
"""Provide the type-id for this subclass."""
# If we wanted, we could just maintain a static mapping
# of types-to-ids here, but there are benefits to letting
# each child class speak for itself. Namely that we can
# do lazy-loading and don't need to have all types present
# here.
# So we'll let all our child classes override this.
raise NotImplementedError()
@ioprepped
@dataclass(frozen=True) # Frozen so we can test in set()
class MTTestClass1(MTTestBase):
"""A test child-class for use with our multi-type class."""
ival: int
@override
@classmethod
def get_type_id(cls) -> MTTestTypeID:
return MTTestTypeID.CLASS_1
@ioprepped
@dataclass(frozen=True) # Frozen so we can test in set()
class MTTestClass2(MTTestBase):
"""Another test child-class for use with our multi-type class."""
sval: str
@override
@classmethod
def get_type_id(cls) -> MTTestTypeID:
return MTTestTypeID.CLASS_2
def test_multi_type() -> None:
"""Test IOMultiType stuff."""
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
# Test converting single instances back and forth.
val1: MTTestBase = MTTestClass1(ival=123)
tpname = MTTestBase.ID_STORAGE_NAME
outdict = dataclass_to_dict(val1)
assert outdict == {'ival': 123, tpname: 'm1'}
val2: MTTestBase = MTTestClass2(sval='whee')
outdict2 = dataclass_to_dict(val2)
assert outdict2 == {'sval': 'whee', tpname: 'm2'}
# Make sure types and values work for both concrete types and the
# multi-type.
assert_type(dataclass_from_dict(MTTestClass1, outdict), MTTestClass1)
assert_type(dataclass_from_dict(MTTestBase, outdict), MTTestBase)
assert dataclass_from_dict(MTTestClass1, outdict) == val1
assert dataclass_from_dict(MTTestClass2, outdict2) == val2
assert dataclass_from_dict(MTTestBase, outdict) == val1
assert dataclass_from_dict(MTTestBase, outdict2) == val2
# Trying to load as a multi-type should fail if there is no type
# value present.
outdictmod = copy.deepcopy(outdict)
del outdictmod[tpname]
with pytest.raises(ValueError):
dataclass_from_dict(MTTestBase, outdictmod)
# However it should work when loading an exact type. This can be
# necessary to gracefully upgrade old data to multi-type form.
dataclass_from_dict(MTTestClass1, outdictmod)
# Now test our multi-type embedded in other classes. We should be
# able to throw a mix of things in there and have them deserialize
# back the types we started with.
# Individual values:
@ioprepped
@dataclass
class _TestContainerClass1:
obj_a: MTTestBase
obj_b: MTTestBase
container1 = _TestContainerClass1(
obj_a=MTTestClass1(234), obj_b=MTTestClass2('987')
)
outdict = dataclass_to_dict(container1)
container1b = dataclass_from_dict(_TestContainerClass1, outdict)
assert container1 == container1b
# Lists:
@ioprepped
@dataclass
class _TestContainerClass2:
objs: list[MTTestBase]
container2 = _TestContainerClass2(
objs=[MTTestClass1(111), MTTestClass2('bbb')]
)
outdict = dataclass_to_dict(container2)
container2b = dataclass_from_dict(_TestContainerClass2, outdict)
assert container2 == container2b
# Dict values:
@ioprepped
@dataclass
class _TestContainerClass3:
objs: dict[int, MTTestBase]
container3 = _TestContainerClass3(
objs={1: MTTestClass1(456), 2: MTTestClass2('gronk')}
)
outdict = dataclass_to_dict(container3)
container3b = dataclass_from_dict(_TestContainerClass3, outdict)
assert container3 == container3b
# Tuples:
@ioprepped
@dataclass
class _TestContainerClass4:
objs: tuple[MTTestBase, MTTestBase]
container4 = _TestContainerClass4(
objs=(MTTestClass1(932), MTTestClass2('potato'))
)
outdict = dataclass_to_dict(container4)
container4b = dataclass_from_dict(_TestContainerClass4, outdict)
assert container4 == container4b
# Sets (note: dataclasses must be frozen for this to work):
@ioprepped
@dataclass
class _TestContainerClass5:
objs: set[MTTestBase]
container5 = _TestContainerClass5(
objs={MTTestClass1(424), MTTestClass2('goo')}
)
outdict = dataclass_to_dict(container5)
container5b = dataclass_from_dict(_TestContainerClass5, outdict)
assert container5 == container5b
# Optionals.
@ioprepped
@dataclass
class _TestContainerClass6:
obj: MTTestBase | None
container6 = _TestContainerClass6(obj=None)
outdict = dataclass_to_dict(container6)
container6b = dataclass_from_dict(_TestContainerClass6, outdict)
assert container6 == container6b
container6 = _TestContainerClass6(obj=MTTestClass2('fwr'))
outdict = dataclass_to_dict(container6)
container6b = dataclass_from_dict(_TestContainerClass6, outdict)
assert container6 == container6b
@ioprepped
@dataclass
class _TestContainerClass7:
obj: Annotated[
MTTestBase | None,
IOAttrs('o', soft_default=None),
]
container7 = _TestContainerClass7(obj=None)
outdict = dataclass_to_dict(container7)
container7b = dataclass_from_dict(_TestContainerClass7, {})
assert container7 == container7b

View File

@ -149,16 +149,13 @@ class _BoundTestMessageSenderSync(BoundMessageSender):
"""Protocol-specific bound sender.""" """Protocol-specific bound sender."""
@overload @overload
def send(self, message: _TMsg1) -> _TResp1: def send(self, message: _TMsg1) -> _TResp1: ...
...
@overload @overload
def send(self, message: _TMsg2) -> _TResp1 | _TResp2: def send(self, message: _TMsg2) -> _TResp1 | _TResp2: ...
...
@overload @overload
def send(self, message: _TMsg3) -> None: def send(self, message: _TMsg3) -> None: ...
...
def send(self, message: Message) -> Response | None: def send(self, message: Message) -> Response | None:
"""Send a message synchronously.""" """Send a message synchronously."""
@ -188,16 +185,13 @@ class _BoundTestMessageSenderAsync(BoundMessageSender):
"""Protocol-specific bound sender.""" """Protocol-specific bound sender."""
@overload @overload
async def send_async(self, message: _TMsg1) -> _TResp1: async def send_async(self, message: _TMsg1) -> _TResp1: ...
...
@overload @overload
async def send_async(self, message: _TMsg2) -> _TResp1 | _TResp2: async def send_async(self, message: _TMsg2) -> _TResp1 | _TResp2: ...
...
@overload @overload
async def send_async(self, message: _TMsg3) -> None: async def send_async(self, message: _TMsg3) -> None: ...
...
def send_async(self, message: Message) -> Awaitable[Response | None]: def send_async(self, message: Message) -> Awaitable[Response | None]:
"""Send a message asynchronously.""" """Send a message asynchronously."""
@ -227,40 +221,32 @@ class _BoundTestMessageSenderBBoth(BoundMessageSender):
"""Protocol-specific bound sender.""" """Protocol-specific bound sender."""
@overload @overload
def send(self, message: _TMsg1) -> _TResp1: def send(self, message: _TMsg1) -> _TResp1: ...
...
@overload @overload
def send(self, message: _TMsg2) -> _TResp1 | _TResp2: def send(self, message: _TMsg2) -> _TResp1 | _TResp2: ...
...
@overload @overload
def send(self, message: _TMsg3) -> None: def send(self, message: _TMsg3) -> None: ...
...
@overload @overload
def send(self, message: _TMsg4) -> None: def send(self, message: _TMsg4) -> None: ...
...
def send(self, message: Message) -> Response | None: def send(self, message: Message) -> Response | None:
"""Send a message synchronously.""" """Send a message synchronously."""
return self._sender.send(self._obj, message) return self._sender.send(self._obj, message)
@overload @overload
async def send_async(self, message: _TMsg1) -> _TResp1: async def send_async(self, message: _TMsg1) -> _TResp1: ...
...
@overload @overload
async def send_async(self, message: _TMsg2) -> _TResp1 | _TResp2: async def send_async(self, message: _TMsg2) -> _TResp1 | _TResp2: ...
...
@overload @overload
async def send_async(self, message: _TMsg3) -> None: async def send_async(self, message: _TMsg3) -> None: ...
...
@overload @overload
async def send_async(self, message: _TMsg4) -> None: async def send_async(self, message: _TMsg4) -> None: ...
...
def send_async(self, message: Message) -> Awaitable[Response | None]: def send_async(self, message: Message) -> Awaitable[Response | None]:
"""Send a message asynchronously.""" """Send a message asynchronously."""
@ -338,22 +324,19 @@ class _TestSyncMessageReceiver(MessageReceiver):
def handler( def handler(
self, self,
call: Callable[[Any, _TMsg1], _TResp1], call: Callable[[Any, _TMsg1], _TResp1],
) -> Callable[[Any, _TMsg1], _TResp1]: ) -> Callable[[Any, _TMsg1], _TResp1]: ...
...
@overload @overload
def handler( def handler(
self, self,
call: Callable[[Any, _TMsg2], _TResp1 | _TResp2], call: Callable[[Any, _TMsg2], _TResp1 | _TResp2],
) -> Callable[[Any, _TMsg2], _TResp1 | _TResp2]: ) -> Callable[[Any, _TMsg2], _TResp1 | _TResp2]: ...
...
@overload @overload
def handler( def handler(
self, self,
call: Callable[[Any, _TMsg3], None], call: Callable[[Any, _TMsg3], None],
) -> Callable[[Any, _TMsg3], None]: ) -> Callable[[Any, _TMsg3], None]: ...
...
def handler(self, call: Callable) -> Callable: def handler(self, call: Callable) -> Callable:
"""Decorator to register message handlers.""" """Decorator to register message handlers."""
@ -399,22 +382,19 @@ class _TestAsyncMessageReceiver(MessageReceiver):
def handler( def handler(
self, self,
call: Callable[[Any, _TMsg1], Awaitable[_TResp1]], call: Callable[[Any, _TMsg1], Awaitable[_TResp1]],
) -> Callable[[Any, _TMsg1], Awaitable[_TResp1]]: ) -> Callable[[Any, _TMsg1], Awaitable[_TResp1]]: ...
...
@overload @overload
def handler( def handler(
self, self,
call: Callable[[Any, _TMsg2], Awaitable[_TResp1 | _TResp2]], call: Callable[[Any, _TMsg2], Awaitable[_TResp1 | _TResp2]],
) -> Callable[[Any, _TMsg2], Awaitable[_TResp1 | _TResp2]]: ) -> Callable[[Any, _TMsg2], Awaitable[_TResp1 | _TResp2]]: ...
...
@overload @overload
def handler( def handler(
self, self,
call: Callable[[Any, _TMsg3], Awaitable[None]], call: Callable[[Any, _TMsg3], Awaitable[None]],
) -> Callable[[Any, _TMsg3], Awaitable[None]]: ) -> Callable[[Any, _TMsg3], Awaitable[None]]: ...
...
def handler(self, call: Callable) -> Callable: def handler(self, call: Callable) -> Callable:
"""Decorator to register message handlers.""" """Decorator to register message handlers."""

View File

@ -75,9 +75,9 @@ class ResponseData:
delay_seconds: Annotated[float, IOAttrs('d', store_default=False)] = 0.0 delay_seconds: Annotated[float, IOAttrs('d', store_default=False)] = 0.0
login: Annotated[str | None, IOAttrs('l', store_default=False)] = None login: Annotated[str | None, IOAttrs('l', store_default=False)] = None
logout: Annotated[bool, IOAttrs('lo', store_default=False)] = False logout: Annotated[bool, IOAttrs('lo', store_default=False)] = False
dir_manifest: Annotated[ dir_manifest: Annotated[str | None, IOAttrs('man', store_default=False)] = (
str | None, IOAttrs('man', store_default=False) None
] = None )
uploads: Annotated[ uploads: Annotated[
tuple[list[str], str, dict] | None, IOAttrs('u', store_default=False) tuple[list[str], str, dict] | None, IOAttrs('u', store_default=False)
] = None ] = None
@ -97,9 +97,9 @@ class ResponseData:
input_prompt: Annotated[ input_prompt: Annotated[
tuple[str, bool] | None, IOAttrs('inp', store_default=False) tuple[str, bool] | None, IOAttrs('inp', store_default=False)
] = None ] = None
end_message: Annotated[ end_message: Annotated[str | None, IOAttrs('em', store_default=False)] = (
str | None, IOAttrs('em', store_default=False) None
] = None )
end_message_end: Annotated[str, IOAttrs('eme', store_default=False)] = '\n' end_message_end: Annotated[str, IOAttrs('eme', store_default=False)] = '\n'
end_command: Annotated[ end_command: Annotated[
tuple[str, dict] | None, IOAttrs('ec', store_default=False) tuple[str, dict] | None, IOAttrs('ec', store_default=False)

View File

@ -63,9 +63,9 @@ class PrivateHostingConfig:
randomize: bool = False randomize: bool = False
tutorial: bool = False tutorial: bool = False
custom_team_names: tuple[str, str] | None = None custom_team_names: tuple[str, str] | None = None
custom_team_colors: tuple[ custom_team_colors: (
tuple[float, float, float], tuple[float, float, float] tuple[tuple[float, float, float], tuple[float, float, float]] | None
] | None = None ) = None
playlist: list[dict[str, Any]] | None = None playlist: list[dict[str, Any]] | None = None
exit_minutes: float = 120.0 exit_minutes: float = 120.0
exit_minutes_unclean: float = 180.0 exit_minutes_unclean: float = 180.0

View File

@ -134,9 +134,9 @@ class ServerConfig:
team_names: tuple[str, str] | None = None team_names: tuple[str, str] | None = None
# Team colors (teams mode only). # Team colors (teams mode only).
team_colors: tuple[ team_colors: (
tuple[float, float, float], tuple[float, float, float] tuple[tuple[float, float, float], tuple[float, float, float]] | None
] | None = None ) = None
# Whether to enable the queue where players can line up before entering # Whether to enable the queue where players can line up before entering
# your server. Disabling this can be used as a workaround to deal with # your server. Disabling this can be used as a workaround to deal with

View File

@ -18,10 +18,10 @@ if TYPE_CHECKING:
@ioprepped @ioprepped
@dataclass @dataclass
class DirectoryManifestFile: class DirectoryManifestFile:
"""Describes metadata and hashes for a file in a manifest.""" """Describes a file in a manifest."""
filehash: Annotated[str, IOAttrs('h')] hash_sha256: Annotated[str, IOAttrs('h')]
filesize: Annotated[int, IOAttrs('s')] size: Annotated[int, IOAttrs('s')]
@ioprepped @ioprepped
@ -67,7 +67,7 @@ class DirectoryManifest:
return ( return (
filepath, filepath,
DirectoryManifestFile( DirectoryManifestFile(
filehash=sha.hexdigest(), filesize=filesize hash_sha256=sha.hexdigest(), size=filesize
), ),
) )

View File

@ -52,7 +52,7 @@ PY_REQUIREMENTS = [
PyRequirement(pipname='requests', minversion=[2, 31, 0]), PyRequirement(pipname='requests', minversion=[2, 31, 0]),
PyRequirement(pipname='pdoc', minversion=[14, 4, 0]), PyRequirement(pipname='pdoc', minversion=[14, 4, 0]),
PyRequirement(pipname='PyYAML', minversion=[6, 0, 1]), PyRequirement(pipname='PyYAML', minversion=[6, 0, 1]),
PyRequirement(pipname='black', minversion=[23, 12, 1]), PyRequirement(pipname='black', minversion=[24, 1, 1]),
PyRequirement(pipname='typing_extensions', minversion=[4, 9, 0]), PyRequirement(pipname='typing_extensions', minversion=[4, 9, 0]),
PyRequirement(pipname='types-filelock', minversion=[3, 2, 7]), PyRequirement(pipname='types-filelock', minversion=[3, 2, 7]),
PyRequirement(pipname='types-requests', minversion=[2, 31, 0, 20240106]), PyRequirement(pipname='types-requests', minversion=[2, 31, 0, 20240106]),
@ -614,7 +614,7 @@ def _get_server_config_template_yaml(projroot: str) -> str:
# Ignore indented lines (our few multi-line special cases). # Ignore indented lines (our few multi-line special cases).
continue continue
if line.startswith(']'): if line.startswith(']') or line.startswith(')'):
# Ignore closing lines (our few multi-line special cases). # Ignore closing lines (our few multi-line special cases).
continue continue
@ -643,7 +643,7 @@ def _get_server_config_template_yaml(projroot: str) -> str:
before_equal_sign = before_equal_sign.strip() before_equal_sign = before_equal_sign.strip()
vval_raw = vval_raw.strip() vval_raw = vval_raw.strip()
vname = before_equal_sign.split()[0] vname = before_equal_sign.split()[0]
assert vname.endswith(':') assert vname.endswith(':'), f"'{vname}' does not end with ':'"
vname = vname[:-1] vname = vname[:-1]
vval: Any vval: Any
if vval_raw == 'field(default_factory=list)': if vval_raw == 'field(default_factory=list)':

View File

@ -83,57 +83,46 @@ if TYPE_CHECKING:
class _CallNoArgs(Generic[OutT]): class _CallNoArgs(Generic[OutT]):
"""Single argument variant of call wrapper.""" """Single argument variant of call wrapper."""
def __init__(self, _call: Callable[[], OutT]): def __init__(self, _call: Callable[[], OutT]): ...
...
def __call__(self) -> OutT: def __call__(self) -> OutT: ...
...
class _Call1Arg(Generic[In1T, OutT]): class _Call1Arg(Generic[In1T, OutT]):
"""Single argument variant of call wrapper.""" """Single argument variant of call wrapper."""
def __init__(self, _call: Callable[[In1T], OutT]): def __init__(self, _call: Callable[[In1T], OutT]): ...
...
def __call__(self, _arg1: In1T) -> OutT: def __call__(self, _arg1: In1T) -> OutT: ...
...
class _Call2Args(Generic[In1T, In2T, OutT]): class _Call2Args(Generic[In1T, In2T, OutT]):
"""Two argument variant of call wrapper""" """Two argument variant of call wrapper"""
def __init__(self, _call: Callable[[In1T, In2T], OutT]): def __init__(self, _call: Callable[[In1T, In2T], OutT]): ...
...
def __call__(self, _arg1: In1T, _arg2: In2T) -> OutT: def __call__(self, _arg1: In1T, _arg2: In2T) -> OutT: ...
...
class _Call3Args(Generic[In1T, In2T, In3T, OutT]): class _Call3Args(Generic[In1T, In2T, In3T, OutT]):
"""Three argument variant of call wrapper""" """Three argument variant of call wrapper"""
def __init__(self, _call: Callable[[In1T, In2T, In3T], OutT]): def __init__(self, _call: Callable[[In1T, In2T, In3T], OutT]): ...
...
def __call__(self, _arg1: In1T, _arg2: In2T, _arg3: In3T) -> OutT: def __call__(self, _arg1: In1T, _arg2: In2T, _arg3: In3T) -> OutT: ...
...
class _Call4Args(Generic[In1T, In2T, In3T, In4T, OutT]): class _Call4Args(Generic[In1T, In2T, In3T, In4T, OutT]):
"""Four argument variant of call wrapper""" """Four argument variant of call wrapper"""
def __init__(self, _call: Callable[[In1T, In2T, In3T, In4T], OutT]): def __init__(self, _call: Callable[[In1T, In2T, In3T, In4T], OutT]): ...
...
def __call__( def __call__(
self, _arg1: In1T, _arg2: In2T, _arg3: In3T, _arg4: In4T self, _arg1: In1T, _arg2: In2T, _arg3: In3T, _arg4: In4T
) -> OutT: ) -> OutT: ...
...
class _Call5Args(Generic[In1T, In2T, In3T, In4T, In5T, OutT]): class _Call5Args(Generic[In1T, In2T, In3T, In4T, In5T, OutT]):
"""Five argument variant of call wrapper""" """Five argument variant of call wrapper"""
def __init__( def __init__(
self, _call: Callable[[In1T, In2T, In3T, In4T, In5T], OutT] self, _call: Callable[[In1T, In2T, In3T, In4T, In5T], OutT]
): ): ...
...
def __call__( def __call__(
self, self,
@ -142,16 +131,14 @@ if TYPE_CHECKING:
_arg3: In3T, _arg3: In3T,
_arg4: In4T, _arg4: In4T,
_arg5: In5T, _arg5: In5T,
) -> OutT: ) -> OutT: ...
...
class _Call6Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, OutT]): class _Call6Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, OutT]):
"""Six argument variant of call wrapper""" """Six argument variant of call wrapper"""
def __init__( def __init__(
self, _call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T], OutT] self, _call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T], OutT]
): ): ...
...
def __call__( def __call__(
self, self,
@ -161,8 +148,7 @@ if TYPE_CHECKING:
_arg4: In4T, _arg4: In4T,
_arg5: In5T, _arg5: In5T,
_arg6: In6T, _arg6: In6T,
) -> OutT: ) -> OutT: ...
...
class _Call7Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, In7T, OutT]): class _Call7Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, In7T, OutT]):
"""Seven argument variant of call wrapper""" """Seven argument variant of call wrapper"""
@ -170,8 +156,7 @@ if TYPE_CHECKING:
def __init__( def __init__(
self, self,
_call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T, In7T], OutT], _call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T, In7T], OutT],
): ): ...
...
def __call__( def __call__(
self, self,
@ -182,50 +167,43 @@ if TYPE_CHECKING:
_arg5: In5T, _arg5: In5T,
_arg6: In6T, _arg6: In6T,
_arg7: In7T, _arg7: In7T,
) -> OutT: ) -> OutT: ...
...
# No arg call; no args bundled. # No arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call(call: Callable[[], OutT]) -> _CallNoArgs[OutT]: def Call(call: Callable[[], OutT]) -> _CallNoArgs[OutT]: ...
...
# 1 arg call; 1 arg bundled. # 1 arg call; 1 arg bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call(call: Callable[[In1T], OutT], arg1: In1T) -> _CallNoArgs[OutT]: def Call(call: Callable[[In1T], OutT], arg1: In1T) -> _CallNoArgs[OutT]: ...
...
# 1 arg call; no args bundled. # 1 arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call(call: Callable[[In1T], OutT]) -> _Call1Arg[In1T, OutT]: def Call(call: Callable[[In1T], OutT]) -> _Call1Arg[In1T, OutT]: ...
...
# 2 arg call; 2 args bundled. # 2 arg call; 2 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T], OutT], arg1: In1T, arg2: In2T call: Callable[[In1T, In2T], OutT], arg1: In1T, arg2: In2T
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 2 arg call; 1 arg bundled. # 2 arg call; 1 arg bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T], OutT], arg1: In1T call: Callable[[In1T, In2T], OutT], arg1: In1T
) -> _Call1Arg[In2T, OutT]: ) -> _Call1Arg[In2T, OutT]: ...
...
# 2 arg call; no args bundled. # 2 arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T], OutT] call: Callable[[In1T, In2T], OutT]
) -> _Call2Args[In1T, In2T, OutT]: ) -> _Call2Args[In1T, In2T, OutT]: ...
...
# 3 arg call; 3 args bundled. # 3 arg call; 3 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -235,32 +213,28 @@ if TYPE_CHECKING:
arg1: In1T, arg1: In1T,
arg2: In2T, arg2: In2T,
arg3: In3T, arg3: In3T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 3 arg call; 2 args bundled. # 3 arg call; 2 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T, arg2: In2T call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T, arg2: In2T
) -> _Call1Arg[In3T, OutT]: ) -> _Call1Arg[In3T, OutT]: ...
...
# 3 arg call; 1 arg bundled. # 3 arg call; 1 arg bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T
) -> _Call2Args[In2T, In3T, OutT]: ) -> _Call2Args[In2T, In3T, OutT]: ...
...
# 3 arg call; no args bundled. # 3 arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T, In3T], OutT] call: Callable[[In1T, In2T, In3T], OutT]
) -> _Call3Args[In1T, In2T, In3T, OutT]: ) -> _Call3Args[In1T, In2T, In3T, OutT]: ...
...
# 4 arg call; 4 args bundled. # 4 arg call; 4 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -271,8 +245,7 @@ if TYPE_CHECKING:
arg2: In2T, arg2: In2T,
arg3: In3T, arg3: In3T,
arg4: In4T, arg4: In4T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 4 arg call; 3 args bundled. # 4 arg call; 3 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -282,8 +255,7 @@ if TYPE_CHECKING:
arg1: In1T, arg1: In1T,
arg2: In2T, arg2: In2T,
arg3: In3T, arg3: In3T,
) -> _Call1Arg[In4T, OutT]: ) -> _Call1Arg[In4T, OutT]: ...
...
# 4 arg call; 2 args bundled. # 4 arg call; 2 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -292,8 +264,7 @@ if TYPE_CHECKING:
call: Callable[[In1T, In2T, In3T, In4T], OutT], call: Callable[[In1T, In2T, In3T, In4T], OutT],
arg1: In1T, arg1: In1T,
arg2: In2T, arg2: In2T,
) -> _Call2Args[In3T, In4T, OutT]: ) -> _Call2Args[In3T, In4T, OutT]: ...
...
# 4 arg call; 1 arg bundled. # 4 arg call; 1 arg bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -301,16 +272,14 @@ if TYPE_CHECKING:
def Call( def Call(
call: Callable[[In1T, In2T, In3T, In4T], OutT], call: Callable[[In1T, In2T, In3T, In4T], OutT],
arg1: In1T, arg1: In1T,
) -> _Call3Args[In2T, In3T, In4T, OutT]: ) -> _Call3Args[In2T, In3T, In4T, OutT]: ...
...
# 4 arg call; no args bundled. # 4 arg call; no args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@overload @overload
def Call( def Call(
call: Callable[[In1T, In2T, In3T, In4T], OutT], call: Callable[[In1T, In2T, In3T, In4T], OutT],
) -> _Call4Args[In1T, In2T, In3T, In4T, OutT]: ) -> _Call4Args[In1T, In2T, In3T, In4T, OutT]: ...
...
# 5 arg call; 5 args bundled. # 5 arg call; 5 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -322,8 +291,7 @@ if TYPE_CHECKING:
arg3: In3T, arg3: In3T,
arg4: In4T, arg4: In4T,
arg5: In5T, arg5: In5T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 6 arg call; 6 args bundled. # 6 arg call; 6 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -336,8 +304,7 @@ if TYPE_CHECKING:
arg4: In4T, arg4: In4T,
arg5: In5T, arg5: In5T,
arg6: In6T, arg6: In6T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# 7 arg call; 7 args bundled. # 7 arg call; 7 args bundled.
# noinspection PyPep8Naming # noinspection PyPep8Naming
@ -351,12 +318,10 @@ if TYPE_CHECKING:
arg5: In5T, arg5: In5T,
arg6: In6T, arg6: In6T,
arg7: In7T, arg7: In7T,
) -> _CallNoArgs[OutT]: ) -> _CallNoArgs[OutT]: ...
...
# noinspection PyPep8Naming # noinspection PyPep8Naming
def Call(*_args: Any, **_keywds: Any) -> Any: def Call(*_args: Any, **_keywds: Any) -> Any: ...
...
# (Type-safe Partial) # (Type-safe Partial)
# A convenient wrapper around functools.partial which adds type-safety # A convenient wrapper around functools.partial which adds type-safety

View File

@ -11,7 +11,13 @@ data formats in a nondestructive manner.
from __future__ import annotations from __future__ import annotations
from efro.util import set_canonical_module_names from efro.util import set_canonical_module_names
from efro.dataclassio._base import Codec, IOAttrs, IOExtendedData from efro.dataclassio._base import (
Codec,
IOAttrs,
IOExtendedData,
IOMultiType,
EXTRA_ATTRS_ATTR,
)
from efro.dataclassio._prep import ( from efro.dataclassio._prep import (
ioprep, ioprep,
ioprepped, ioprepped,
@ -29,20 +35,22 @@ from efro.dataclassio._api import (
) )
__all__ = [ __all__ = [
'JsonStyle',
'Codec', 'Codec',
'DataclassFieldLookup',
'EXTRA_ATTRS_ATTR',
'IOAttrs', 'IOAttrs',
'IOExtendedData', 'IOExtendedData',
'ioprep', 'IOMultiType',
'ioprepped', 'JsonStyle',
'will_ioprep',
'is_ioprepped_dataclass',
'DataclassFieldLookup',
'dataclass_to_dict',
'dataclass_to_json',
'dataclass_from_dict', 'dataclass_from_dict',
'dataclass_from_json', 'dataclass_from_json',
'dataclass_to_dict',
'dataclass_to_json',
'dataclass_validate', 'dataclass_validate',
'ioprep',
'ioprepped',
'is_ioprepped_dataclass',
'will_ioprep',
] ]
# Have these things present themselves cleanly as 'thismodule.SomeClass' # Have these things present themselves cleanly as 'thismodule.SomeClass'

View File

@ -27,7 +27,7 @@ class JsonStyle(Enum):
"""Different style types for json.""" """Different style types for json."""
# Single line, no spaces, no sorting. Not deterministic. # Single line, no spaces, no sorting. Not deterministic.
# Use this for most storage purposes. # Use this where speed is more important than determinism.
FAST = 'fast' FAST = 'fast'
# Single line, no spaces, sorted keys. Deterministic. # Single line, no spaces, sorted keys. Deterministic.
@ -40,7 +40,9 @@ class JsonStyle(Enum):
def dataclass_to_dict( def dataclass_to_dict(
obj: Any, codec: Codec = Codec.JSON, coerce_to_float: bool = True obj: Any,
codec: Codec = Codec.JSON,
coerce_to_float: bool = True,
) -> dict: ) -> dict:
"""Given a dataclass object, return a json-friendly dict. """Given a dataclass object, return a json-friendly dict.
@ -101,32 +103,36 @@ def dataclass_from_dict(
The dict must be formatted to match the specified codec (generally The dict must be formatted to match the specified codec (generally
json-friendly object types). This means that sequence values such as json-friendly object types). This means that sequence values such as
tuples or sets should be passed as lists, enums should be passed as their tuples or sets should be passed as lists, enums should be passed as
associated values, nested dataclasses should be passed as dicts, etc. their associated values, nested dataclasses should be passed as dicts,
etc.
All values are checked to ensure their types/values are valid. All values are checked to ensure their types/values are valid.
Data for attributes of type Any will be checked to ensure they match Data for attributes of type Any will be checked to ensure they match
types supported directly by json. This does not include types such types supported directly by json. This does not include types such
as tuples which are implicitly translated by Python's json module as tuples which are implicitly translated by Python's json module
(as this would break the ability to do a lossless round-trip with data). (as this would break the ability to do a lossless round-trip with
data).
If coerce_to_float is True, int values passed for float typed fields If coerce_to_float is True, int values passed for float typed fields
will be converted to float values. Otherwise, a TypeError is raised. will be converted to float values. Otherwise, a TypeError is raised.
If allow_unknown_attrs is False, AttributeErrors will be raised for If `allow_unknown_attrs` is False, AttributeErrors will be raised for
attributes present in the dict but not on the data class. Otherwise, they attributes present in the dict but not on the data class. Otherwise,
will be preserved as part of the instance and included if it is they will be preserved as part of the instance and included if it is
exported back to a dict, unless discard_unknown_attrs is True, in which exported back to a dict, unless `discard_unknown_attrs` is True, in
case they will simply be discarded. which case they will simply be discarded.
""" """
return _Inputter( val = _Inputter(
cls, cls,
codec=codec, codec=codec,
coerce_to_float=coerce_to_float, coerce_to_float=coerce_to_float,
allow_unknown_attrs=allow_unknown_attrs, allow_unknown_attrs=allow_unknown_attrs,
discard_unknown_attrs=discard_unknown_attrs, discard_unknown_attrs=discard_unknown_attrs,
).run(values) ).run(values)
assert isinstance(val, cls)
return val
def dataclass_from_json( def dataclass_from_json(

View File

@ -8,39 +8,23 @@ import dataclasses
import typing import typing
import datetime import datetime
from enum import Enum from enum import Enum
from typing import TYPE_CHECKING, get_args from typing import TYPE_CHECKING, get_args, TypeVar, Generic
# noinspection PyProtectedMember # noinspection PyProtectedMember
from typing import _AnnotatedAlias # type: ignore from typing import _AnnotatedAlias # type: ignore
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Any, Callable from typing import Any, Callable, Literal, ClassVar, Self
# Types which we can pass through as-is. # Types which we can pass through as-is.
SIMPLE_TYPES = {int, bool, str, float, type(None)} SIMPLE_TYPES = {int, bool, str, float, type(None)}
# Attr name for dict of extra attributes included on dataclass instances. # Attr name for dict of extra attributes included on dataclass
# Note that this is only added if extra attributes are present. # instances. Note that this is only added if extra attributes are
# present.
EXTRA_ATTRS_ATTR = '_DCIOEXATTRS' EXTRA_ATTRS_ATTR = '_DCIOEXATTRS'
def _raise_type_error(
fieldpath: str, valuetype: type, expected: tuple[type, ...]
) -> None:
"""Raise an error when a field value's type does not match expected."""
assert isinstance(expected, tuple)
assert all(isinstance(e, type) for e in expected)
if len(expected) == 1:
expected_str = expected[0].__name__
else:
expected_str = ' | '.join(t.__name__ for t in expected)
raise TypeError(
f'Invalid value type for "{fieldpath}";'
f' expected "{expected_str}", got'
f' "{valuetype.__name__}".'
)
class Codec(Enum): class Codec(Enum):
"""Specifies expected data format exported to or imported from.""" """Specifies expected data format exported to or imported from."""
@ -78,32 +62,46 @@ class IOExtendedData:
""" """
def _is_valid_for_codec(obj: Any, codec: Codec) -> bool: EnumT = TypeVar('EnumT', bound=Enum)
"""Return whether a value consists solely of json-supported types.
Note that this does not include things like tuples which are
implicitly translated to lists by python's json module. class IOMultiType(Generic[EnumT]):
"""A base class for types that can map to multiple dataclass types.
This enables usage of high level base classes (for example
a 'Message' type) in annotations, with dataclassio automatically
serializing & deserializing dataclass subclasses based on their
type ('MessagePing', 'MessageChat', etc.)
Standard usage involves creating a class which inherits from this
one which acts as a 'registry', and then creating dataclass classes
inheriting from that registry class. Dataclassio will then do the
right thing when that registry class is used in type annotations.
See tests/test_efro/test_dataclassio.py for examples.
""" """
if obj is None:
return True
objtype = type(obj) # Dataclasses inheriting from an IOMultiType will store a type-id
if objtype in (int, float, str, bool): # with this key in their serialized data. This value can be
return True # overridden in IOMultiType subclasses as desired.
if objtype is dict: ID_STORAGE_NAME = '_dciotype'
# JSON 'objects' supports only string dict keys, but all value types.
return all(
isinstance(k, str) and _is_valid_for_codec(v, codec)
for k, v in obj.items()
)
if objtype is list:
return all(_is_valid_for_codec(elem, codec) for elem in obj)
# A few things are valid in firestore but not json. @classmethod
if issubclass(objtype, datetime.datetime) or objtype is bytes: def get_type(cls, type_id: EnumT) -> type[Self]:
return codec is Codec.FIRESTORE """Return a specific subclass given a type-id."""
raise NotImplementedError()
return False @classmethod
def get_type_id(cls) -> EnumT:
"""Return the type-id for this subclass."""
raise NotImplementedError()
@classmethod
def get_type_id_type(cls) -> type[EnumT]:
"""Return the Enum type this class uses as its type-id."""
out: type[EnumT] = cls.__orig_bases__[0].__args__[0] # type: ignore
assert issubclass(out, Enum)
return out
class IOAttrs: class IOAttrs:
@ -192,7 +190,7 @@ class IOAttrs:
"""Ensure the IOAttrs instance is ok to use with the provided field.""" """Ensure the IOAttrs instance is ok to use with the provided field."""
# Turning off store_default requires the field to have either # Turning off store_default requires the field to have either
# a default or a a default_factory or for us to have soft equivalents. # a default or a default_factory or for us to have soft equivalents.
if not self.store_default: if not self.store_default:
field_default_factory: Any = field.default_factory field_default_factory: Any = field.default_factory
@ -241,6 +239,52 @@ class IOAttrs:
) )
def _raise_type_error(
fieldpath: str, valuetype: type, expected: tuple[type, ...]
) -> None:
"""Raise an error when a field value's type does not match expected."""
assert isinstance(expected, tuple)
assert all(isinstance(e, type) for e in expected)
if len(expected) == 1:
expected_str = expected[0].__name__
else:
expected_str = ' | '.join(t.__name__ for t in expected)
raise TypeError(
f'Invalid value type for "{fieldpath}";'
f' expected "{expected_str}", got'
f' "{valuetype.__name__}".'
)
def _is_valid_for_codec(obj: Any, codec: Codec) -> bool:
"""Return whether a value consists solely of json-supported types.
Note that this does not include things like tuples which are
implicitly translated to lists by python's json module.
"""
if obj is None:
return True
objtype = type(obj)
if objtype in (int, float, str, bool):
return True
if objtype is dict:
# JSON 'objects' supports only string dict keys, but all value
# types.
return all(
isinstance(k, str) and _is_valid_for_codec(v, codec)
for k, v in obj.items()
)
if objtype is list:
return all(_is_valid_for_codec(elem, codec) for elem in obj)
# A few things are valid in firestore but not json.
if issubclass(objtype, datetime.datetime) or objtype is bytes:
return codec is Codec.FIRESTORE
return False
def _get_origin(anntype: Any) -> Any: def _get_origin(anntype: Any) -> Any:
"""Given a type annotation, return its origin or itself if there is none. """Given a type annotation, return its origin or itself if there is none.
@ -255,9 +299,9 @@ def _get_origin(anntype: Any) -> Any:
def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]: def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
"""Parse Annotated() constructs, returning annotated type & IOAttrs.""" """Parse Annotated() constructs, returning annotated type & IOAttrs."""
# If we get an Annotated[foo, bar, eep] we take # If we get an Annotated[foo, bar, eep] we take foo as the actual
# foo as the actual type, and we look for IOAttrs instances in # type, and we look for IOAttrs instances in bar/eep to affect our
# bar/eep to affect our behavior. # behavior.
ioattrs: IOAttrs | None = None ioattrs: IOAttrs | None = None
if isinstance(anntype, _AnnotatedAlias): if isinstance(anntype, _AnnotatedAlias):
annargs = get_args(anntype) annargs = get_args(anntype)
@ -270,8 +314,8 @@ def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
) )
ioattrs = annarg ioattrs = annarg
# I occasionally just throw a 'x' down when I mean IOAttrs('x'); # I occasionally just throw a 'x' down when I mean
# catch these mistakes. # IOAttrs('x'); catch these mistakes.
elif isinstance(annarg, (str, int, float, bool)): elif isinstance(annarg, (str, int, float, bool)):
raise RuntimeError( raise RuntimeError(
f'Raw {type(annarg)} found in Annotated[] entry:' f'Raw {type(annarg)} found in Annotated[] entry:'
@ -279,3 +323,21 @@ def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
) )
anntype = annargs[0] anntype = annargs[0]
return anntype, ioattrs return anntype, ioattrs
def _get_multitype_type(
cls: type[IOMultiType], fieldpath: str, val: Any
) -> type[Any]:
if not isinstance(val, dict):
raise ValueError(
f"Found a {type(val)} at '{fieldpath}'; expected a dict."
)
storename = cls.ID_STORAGE_NAME
id_val = val.get(storename)
if id_val is None:
raise ValueError(
f"Expected a '{storename}'" f" value for object at '{fieldpath}'."
)
id_enum_type = cls.get_type_id_type()
id_enum = id_enum_type(id_val)
return cls.get_type(id_enum)

View File

@ -13,7 +13,7 @@ import dataclasses
import typing import typing
import types import types
import datetime import datetime
from typing import TYPE_CHECKING, Generic, TypeVar from typing import TYPE_CHECKING
from efro.util import enum_by_value, check_utc from efro.util import enum_by_value, check_utc
from efro.dataclassio._base import ( from efro.dataclassio._base import (
@ -25,6 +25,8 @@ from efro.dataclassio._base import (
SIMPLE_TYPES, SIMPLE_TYPES,
_raise_type_error, _raise_type_error,
IOExtendedData, IOExtendedData,
_get_multitype_type,
IOMultiType,
) )
from efro.dataclassio._prep import PrepSession from efro.dataclassio._prep import PrepSession
@ -34,13 +36,11 @@ if TYPE_CHECKING:
from efro.dataclassio._base import IOAttrs from efro.dataclassio._base import IOAttrs
from efro.dataclassio._outputter import _Outputter from efro.dataclassio._outputter import _Outputter
T = TypeVar('T')
class _Inputter:
class _Inputter(Generic[T]):
def __init__( def __init__(
self, self,
cls: type[T], cls: type[Any],
codec: Codec, codec: Codec,
coerce_to_float: bool, coerce_to_float: bool,
allow_unknown_attrs: bool = True, allow_unknown_attrs: bool = True,
@ -59,27 +59,45 @@ class _Inputter(Generic[T]):
' when allow_unknown_attrs is False.' ' when allow_unknown_attrs is False.'
) )
def run(self, values: dict) -> T: def run(self, values: dict) -> Any:
"""Do the thing.""" """Do the thing."""
# For special extended data types, call their 'will_output' callback. outcls: type[Any]
tcls = self._cls
if issubclass(tcls, IOExtendedData): # If we're dealing with a multi-type subclass which is NOT a
# dataclass, we must rely on its stored type to figure out
# what type of dataclass we're going to. If we are a dataclass
# then we already know what type we're going to so we can
# survive without this, which is often necessary when reading
# old data that doesn't have a type id attr yet.
if issubclass(self._cls, IOMultiType) and not dataclasses.is_dataclass(
self._cls
):
type_id_val = values.get(self._cls.ID_STORAGE_NAME)
if type_id_val is None:
raise ValueError(
f'No type id value present for multi-type object:'
f' {values}.'
)
type_id_enum = self._cls.get_type_id_type()
enum_val = type_id_enum(type_id_val)
outcls = self._cls.get_type(enum_val)
else:
outcls = self._cls
# FIXME - should probably move this into _dataclass_from_input
# so it can work on nested values.
if issubclass(outcls, IOExtendedData):
is_ext = True is_ext = True
tcls.will_input(values) outcls.will_input(values)
else: else:
is_ext = False is_ext = False
out = self._dataclass_from_input(self._cls, '', values) out = self._dataclass_from_input(outcls, '', values)
assert isinstance(out, self._cls) assert isinstance(out, outcls)
if is_ext: if is_ext:
# mypy complains that we're no longer returning a T out.did_input()
# if we operate on out directly.
out2 = out
assert isinstance(out2, IOExtendedData)
out2.did_input()
return out return out
@ -111,8 +129,8 @@ class _Inputter(Generic[T]):
# noinspection PyPep8 # noinspection PyPep8
if origin is typing.Union or origin is types.UnionType: if origin is typing.Union or origin is types.UnionType:
# Currently, the only unions we support are None/Value # Currently, the only unions we support are None/Value
# (translated from Optional), which we verified on prep. # (translated from Optional), which we verified on prep. So
# So let's treat this as a simple optional case. # let's treat this as a simple optional case.
if value is None: if value is None:
return None return None
childanntypes_l = [ childanntypes_l = [
@ -123,13 +141,15 @@ class _Inputter(Generic[T]):
cls, fieldpath, childanntypes_l[0], value, ioattrs cls, fieldpath, childanntypes_l[0], value, ioattrs
) )
# Everything below this point assumes the annotation type resolves # Everything below this point assumes the annotation type
# to a concrete type. (This should have been verified at prep time). # resolves to a concrete type. (This should have been verified
# at prep time).
assert isinstance(origin, type) assert isinstance(origin, type)
if origin in SIMPLE_TYPES: if origin in SIMPLE_TYPES:
if type(value) is not origin: if type(value) is not origin:
# Special case: if they want to coerce ints to floats, do so. # Special case: if they want to coerce ints to floats,
# do so.
if ( if (
self._coerce_to_float self._coerce_to_float
and origin is float and origin is float
@ -157,6 +177,16 @@ class _Inputter(Generic[T]):
if dataclasses.is_dataclass(origin): if dataclasses.is_dataclass(origin):
return self._dataclass_from_input(origin, fieldpath, value) return self._dataclass_from_input(origin, fieldpath, value)
# ONLY consider something as a multi-type when it's not a
# dataclass (all dataclasses inheriting from the multi-type
# should just be processed as dataclasses).
if issubclass(origin, IOMultiType):
return self._dataclass_from_input(
_get_multitype_type(anntype, fieldpath, value),
fieldpath,
value,
)
if issubclass(origin, Enum): if issubclass(origin, Enum):
return enum_by_value(origin, value) return enum_by_value(origin, value)
@ -228,10 +258,23 @@ class _Inputter(Generic[T]):
f.name: _parse_annotated(prep.annotations[f.name]) for f in fields f.name: _parse_annotated(prep.annotations[f.name]) for f in fields
} }
# Special case: if this is a multi-type class it probably has a
# type attr. Ignore that while parsing since we already have a
# definite type and it will just pollute extra-attrs otherwise.
if issubclass(cls, IOMultiType):
type_id_store_name = cls.ID_STORAGE_NAME
else:
type_id_store_name = None
# Go through all data in the input, converting it to either dataclass # Go through all data in the input, converting it to either dataclass
# args or extra data. # args or extra data.
args: dict[str, Any] = {} args: dict[str, Any] = {}
for rawkey, value in values.items(): for rawkey, value in values.items():
# Ignore _dciotype or whatnot.
if type_id_store_name is not None and rawkey == type_id_store_name:
continue
key = prep.storage_names_to_attr_names.get(rawkey, rawkey) key = prep.storage_names_to_attr_names.get(rawkey, rawkey)
field = fields_by_name.get(key) field = fields_by_name.get(key)
@ -473,6 +516,19 @@ class _Inputter(Generic[T]):
# We contain elements of some specified type. # We contain elements of some specified type.
assert len(childanntypes) == 1 assert len(childanntypes) == 1
childanntype = childanntypes[0] childanntype = childanntypes[0]
# If our annotation type inherits from IOMultiType, use type-id
# values to determine which type to load for each element.
if issubclass(childanntype, IOMultiType):
return seqtype(
self._dataclass_from_input(
_get_multitype_type(childanntype, fieldpath, i),
fieldpath,
i,
)
for i in value
)
return seqtype( return seqtype(
self._value_from_input(cls, fieldpath, childanntype, i, ioattrs) self._value_from_input(cls, fieldpath, childanntype, i, ioattrs)
for i in value for i in value

View File

@ -25,6 +25,7 @@ from efro.dataclassio._base import (
SIMPLE_TYPES, SIMPLE_TYPES,
_raise_type_error, _raise_type_error,
IOExtendedData, IOExtendedData,
IOMultiType,
) )
from efro.dataclassio._prep import PrepSession from efro.dataclassio._prep import PrepSession
@ -49,6 +50,8 @@ class _Outputter:
assert dataclasses.is_dataclass(self._obj) assert dataclasses.is_dataclass(self._obj)
# For special extended data types, call their 'will_output' callback. # For special extended data types, call their 'will_output' callback.
# FIXME - should probably move this into _process_dataclass so it
# can work on nested values.
if isinstance(self._obj, IOExtendedData): if isinstance(self._obj, IOExtendedData):
self._obj.will_output() self._obj.will_output()
@ -69,6 +72,7 @@ class _Outputter:
def _process_dataclass(self, cls: type, obj: Any, fieldpath: str) -> Any: def _process_dataclass(self, cls: type, obj: Any, fieldpath: str) -> Any:
# pylint: disable=too-many-locals # pylint: disable=too-many-locals
# pylint: disable=too-many-branches # pylint: disable=too-many-branches
# pylint: disable=too-many-statements
prep = PrepSession(explicit=False).prep_dataclass( prep = PrepSession(explicit=False).prep_dataclass(
type(obj), recursion_level=0 type(obj), recursion_level=0
) )
@ -139,6 +143,25 @@ class _Outputter:
if self._create: if self._create:
assert out is not None assert out is not None
out.update(extra_attrs) out.update(extra_attrs)
# If this obj inherits from multi-type, store its type id.
if isinstance(obj, IOMultiType):
type_id = obj.get_type_id()
# Sanity checks; make sure looking up this id gets us this
# type.
assert isinstance(type_id.value, str)
if obj.get_type(type_id) is not type(obj):
raise RuntimeError(
f'dataclassio: object of type {type(obj)}'
f' gives type-id {type_id} but that id gives type'
f' {obj.get_type(type_id)}. Something is out of sync.'
)
assert obj.get_type(type_id) is type(obj)
if self._create:
assert out is not None
out[obj.ID_STORAGE_NAME] = type_id.value
return out return out
def _process_value( def _process_value(
@ -231,6 +254,7 @@ class _Outputter:
f'Expected a list for {fieldpath};' f'Expected a list for {fieldpath};'
f' found a {type(value)}' f' found a {type(value)}'
) )
childanntypes = typing.get_args(anntype) childanntypes = typing.get_args(anntype)
# 'Any' type children; make sure they are valid values for # 'Any' type children; make sure they are valid values for
@ -246,8 +270,37 @@ class _Outputter:
# Hmm; should we do a copy here? # Hmm; should we do a copy here?
return value if self._create else None return value if self._create else None
# We contain elements of some specified type. # We contain elements of some single specified type.
assert len(childanntypes) == 1 assert len(childanntypes) == 1
childanntype = childanntypes[0]
# If that type is a multi-type, we determine our type per-object.
if issubclass(childanntype, IOMultiType):
# In the multi-type case, we use each object's own type
# to do its conversion, but lets at least make sure each
# of those types inherits from the annotated multi-type
# class.
for x in value:
if not isinstance(x, childanntype):
raise ValueError(
f"Found a {type(x)} value under '{fieldpath}'."
f' Everything must inherit from'
f' {childanntype}.'
)
if self._create:
out: list[Any] = []
for x in value:
# We know these are dataclasses so no need to do
# the generic _process_value.
out.append(self._process_dataclass(cls, x, fieldpath))
return out
for x in value:
# We know these are dataclasses so no need to do
# the generic _process_value.
self._process_dataclass(cls, x, fieldpath)
# Normal non-multitype case; everything's got the same type.
if self._create: if self._create:
return [ return [
self._process_value( self._process_value(
@ -307,6 +360,21 @@ class _Outputter:
) )
return self._process_dataclass(cls, value, fieldpath) return self._process_dataclass(cls, value, fieldpath)
# ONLY consider something as a multi-type when it's not a
# dataclass (all dataclasses inheriting from the multi-type should
# just be processed as dataclasses).
if issubclass(origin, IOMultiType):
# In the multi-type case, we use each object's own type to
# do its conversion, but lets at least make sure each of
# those types inherits from the annotated multi-type class.
if not isinstance(value, origin):
raise ValueError(
f"Found a {type(value)} value at '{fieldpath}'."
f' It is expected to inherit from {origin}.'
)
return self._process_dataclass(cls, value, fieldpath)
if issubclass(origin, Enum): if issubclass(origin, Enum):
if not isinstance(value, origin): if not isinstance(value, origin):
raise TypeError( raise TypeError(

View File

@ -17,7 +17,12 @@ import datetime
from typing import TYPE_CHECKING, TypeVar, get_type_hints from typing import TYPE_CHECKING, TypeVar, get_type_hints
# noinspection PyProtectedMember # noinspection PyProtectedMember
from efro.dataclassio._base import _parse_annotated, _get_origin, SIMPLE_TYPES from efro.dataclassio._base import (
_parse_annotated,
_get_origin,
SIMPLE_TYPES,
IOMultiType,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Any from typing import Any
@ -260,6 +265,13 @@ class PrepSession:
origin = _get_origin(anntype) origin = _get_origin(anntype)
# If we inherit from IOMultiType, we use its type map to
# determine which type we're going to instead of the annotation.
# And we can't really check those types because they are
# lazy-loaded. So I guess we're done here.
if issubclass(origin, IOMultiType):
return
# noinspection PyPep8 # noinspection PyPep8
if origin is typing.Union or origin is types.UnionType: if origin is typing.Union or origin is types.UnionType:
self.prep_union( self.prep_union(

View File

@ -278,9 +278,7 @@ def _desc(obj: Any) -> str:
tpss = ( tpss = (
f', contains [{tpsj}, ...]' f', contains [{tpsj}, ...]'
if len(obj) > 3 if len(obj) > 3
else f', contains [{tpsj}]' else f', contains [{tpsj}]' if tps else ''
if tps
else ''
) )
extra = f' (len {len(obj)}{tpss})' extra = f' (len {len(obj)}{tpss})'
elif isinstance(obj, dict): elif isinstance(obj, dict):
@ -299,9 +297,7 @@ def _desc(obj: Any) -> str:
pairss = ( pairss = (
f', contains {{{pairsj}, ...}}' f', contains {{{pairsj}, ...}}'
if len(obj) > 3 if len(obj) > 3
else f', contains {{{pairsj}}}' else f', contains {{{pairsj}}}' if pairs else ''
if pairs
else ''
) )
extra = f' (len {len(obj)}{pairss})' extra = f' (len {len(obj)}{pairss})'
if extra is None: if extra is None:

View File

@ -92,9 +92,9 @@ class LogEntry:
# incorporated into custom log processing. To populate this, our # incorporated into custom log processing. To populate this, our
# LogHandler class looks for a 'labels' dict passed in the optional # LogHandler class looks for a 'labels' dict passed in the optional
# 'extra' dict arg to standard Python log calls. # 'extra' dict arg to standard Python log calls.
labels: Annotated[ labels: Annotated[dict[str, str], IOAttrs('la', store_default=False)] = (
dict[str, str], IOAttrs('la', store_default=False) field(default_factory=dict)
] = field(default_factory=dict) )
@ioprepped @ioprepped
@ -483,11 +483,11 @@ class LogHandler(logging.Handler):
# after a short bit if we never get a newline. # after a short bit if we never get a newline.
ship_task = self._file_chunk_ship_task[name] ship_task = self._file_chunk_ship_task[name]
if ship_task is None: if ship_task is None:
self._file_chunk_ship_task[ self._file_chunk_ship_task[name] = (
name self._event_loop.create_task(
] = self._event_loop.create_task( self._ship_chunks_task(name),
self._ship_chunks_task(name), name='log ship file chunks',
name='log ship file chunks', )
) )
except Exception: except Exception:

View File

@ -499,8 +499,7 @@ class MessageProtocol:
f' @overload\n' f' @overload\n'
f' {pfx}def send{sfx}(self,' f' {pfx}def send{sfx}(self,'
f' message: {msgtypevar})' f' message: {msgtypevar})'
f' -> {rtypevar}:\n' f' -> {rtypevar}: ...\n'
f' ...\n'
) )
rtypevar = 'Response | None' rtypevar = 'Response | None'
if async_pass: if async_pass:
@ -607,8 +606,7 @@ class MessageProtocol:
f' call: Callable[[Any, {msgtypevar}], ' f' call: Callable[[Any, {msgtypevar}], '
f'{rtypevar}],\n' f'{rtypevar}],\n'
f' )' f' )'
f' -> Callable[[Any, {msgtypevar}], {rtypevar}]:\n' f' -> Callable[[Any, {msgtypevar}], {rtypevar}]: ...\n'
f' ...\n'
) )
out += ( out += (
'\n' '\n'

View File

@ -55,12 +55,13 @@ class MessageReceiver:
def __init__(self, protocol: MessageProtocol) -> None: def __init__(self, protocol: MessageProtocol) -> None:
self.protocol = protocol self.protocol = protocol
self._handlers: dict[type[Message], Callable] = {} self._handlers: dict[type[Message], Callable] = {}
self._decode_filter_call: Callable[ self._decode_filter_call: (
[Any, dict, Message], None Callable[[Any, dict, Message], None] | None
] | None = None ) = None
self._encode_filter_call: Callable[ self._encode_filter_call: (
[Any, Message | None, Response | SysResponse, dict], None Callable[[Any, Message | None, Response | SysResponse, dict], None]
] | None = None | None
) = None
# noinspection PyProtectedMember # noinspection PyProtectedMember
def register_handler( def register_handler(

View File

@ -41,18 +41,18 @@ class MessageSender:
def __init__(self, protocol: MessageProtocol) -> None: def __init__(self, protocol: MessageProtocol) -> None:
self.protocol = protocol self.protocol = protocol
self._send_raw_message_call: Callable[[Any, str], str] | None = None self._send_raw_message_call: Callable[[Any, str], str] | None = None
self._send_async_raw_message_call: Callable[ self._send_async_raw_message_call: (
[Any, str], Awaitable[str] Callable[[Any, str], Awaitable[str]] | None
] | None = None ) = None
self._send_async_raw_message_ex_call: Callable[ self._send_async_raw_message_ex_call: (
[Any, str, Message], Awaitable[str] Callable[[Any, str, Message], Awaitable[str]] | None
] | None = None ) = None
self._encode_filter_call: Callable[ self._encode_filter_call: (
[Any, Message, dict], None Callable[[Any, Message, dict], None] | None
] | None = None ) = None
self._decode_filter_call: Callable[ self._decode_filter_call: (
[Any, Message, dict, Response | SysResponse], None Callable[[Any, Message, dict, Response | SysResponse], None] | None
] | None = None ) = None
self._peer_desc_call: Callable[[Any], str] | None = None self._peer_desc_call: Callable[[Any], str] | None = None
def send_method( def send_method(

View File

@ -317,8 +317,6 @@ _envval = os.environ.get('EFRO_TERMCOLORS')
color_enabled: bool = ( color_enabled: bool = (
True True
if _envval == '1' if _envval == '1'
else False else False if _envval == '0' else _default_color_enabled()
if _envval == '0'
else _default_color_enabled()
) )
Clr: type[ClrBase] = ClrAlways if color_enabled else ClrNever Clr: type[ClrBase] = ClrAlways if color_enabled else ClrNever

View File

@ -459,8 +459,7 @@ if TYPE_CHECKING:
class ValueDispatcherMethod(Generic[ValT, RetT]): class ValueDispatcherMethod(Generic[ValT, RetT]):
"""Used by the valuedispatchmethod decorator.""" """Used by the valuedispatchmethod decorator."""
def __call__(self, value: ValT) -> RetT: def __call__(self, value: ValT) -> RetT: ...
...
def register( def register(
self, value: ValT self, value: ValT

View File

@ -105,13 +105,11 @@ def extract_flag(args: list[str], name: str) -> bool:
@overload @overload
def extract_arg( def extract_arg(
args: list[str], name: str, required: Literal[False] = False args: list[str], name: str, required: Literal[False] = False
) -> str | None: ) -> str | None: ...
...
@overload @overload
def extract_arg(args: list[str], name: str, required: Literal[True]) -> str: def extract_arg(args: list[str], name: str, required: Literal[True]) -> str: ...
...
def extract_arg( def extract_arg(

View File

@ -120,9 +120,11 @@ def clientprint(
assert _g_thread_local_storage is not None assert _g_thread_local_storage is not None
print( print(
*args, *args,
file=_g_thread_local_storage.stderr file=(
if stderr _g_thread_local_storage.stderr
else _g_thread_local_storage.stdout, if stderr
else _g_thread_local_storage.stdout
),
end=end, end=end,
) )
else: else:

View File

@ -39,7 +39,6 @@ def _valid_filename(fname: str) -> bool:
'requirements.txt', 'requirements.txt',
'pylintrc', 'pylintrc',
'clang-format', 'clang-format',
'pycheckers',
'style.yapf', 'style.yapf',
'test_task_bin', 'test_task_bin',
'.editorconfig', '.editorconfig',

View File

@ -47,7 +47,6 @@ def install_tool_config(projroot: Path, src: Path, dst: Path) -> None:
comment = ';;' comment = ';;'
elif dst.name in [ elif dst.name in [
'.mypy.ini', '.mypy.ini',
'.pycheckers',
'.pylintrc', '.pylintrc',
'.style.yapf', '.style.yapf',
'.clang-format', '.clang-format',