mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-02-04 22:43:17 +08:00
Heavily revamped efro.dataclasses functionality for improved communication with master-servers
This commit is contained in:
parent
d79683a5b3
commit
a931a856b2
@ -3932,26 +3932,26 @@
|
|||||||
"assets/build/windows/Win32/ucrtbased.dll": "https://files.ballistica.net/cache/ba1/b5/85/f8b6d0558ddb87267f34254b1450",
|
"assets/build/windows/Win32/ucrtbased.dll": "https://files.ballistica.net/cache/ba1/b5/85/f8b6d0558ddb87267f34254b1450",
|
||||||
"assets/build/windows/Win32/vc_redist.x86.exe": "https://files.ballistica.net/cache/ba1/1c/e1/4a1a2eddda2f4aebd5f8b64ab08e",
|
"assets/build/windows/Win32/vc_redist.x86.exe": "https://files.ballistica.net/cache/ba1/1c/e1/4a1a2eddda2f4aebd5f8b64ab08e",
|
||||||
"assets/build/windows/Win32/vcruntime140d.dll": "https://files.ballistica.net/cache/ba1/50/8d/bc2600ac9491f1b14d659709451f",
|
"assets/build/windows/Win32/vcruntime140d.dll": "https://files.ballistica.net/cache/ba1/50/8d/bc2600ac9491f1b14d659709451f",
|
||||||
"build/prefab/full/linux_arm64/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/ce/05/ca46e7d32db9365adf5b8bfba176",
|
"build/prefab/full/linux_arm64/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/7e/96/6114ce142d4d7835caa063c7a4d8",
|
||||||
"build/prefab/full/linux_arm64/release/ballisticacore": "https://files.ballistica.net/cache/ba1/ef/94/d59852c21cc8fbd77a5f3fcd3874",
|
"build/prefab/full/linux_arm64/release/ballisticacore": "https://files.ballistica.net/cache/ba1/13/94/7c0904f5dc691958ff15de93dcbf",
|
||||||
"build/prefab/full/linux_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/c4/4b/30e4b3a62d6ddc31200bf68bed96",
|
"build/prefab/full/linux_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/8a/09/369d1fda1da9ec28c0c7e8809dd2",
|
||||||
"build/prefab/full/linux_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/6b/d9/ea08e2b748b7abb7ab9b1022fb6d",
|
"build/prefab/full/linux_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/d6/5c/2c9a4df3a585ef82a1746e59e65f",
|
||||||
"build/prefab/full/linux_x86_64/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/d7/21/97fbbbae1b0c1187f0aa024dc5cf",
|
"build/prefab/full/linux_x86_64/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/5b/e6/db835eaffda8f695f39d10fc5f14",
|
||||||
"build/prefab/full/linux_x86_64/release/ballisticacore": "https://files.ballistica.net/cache/ba1/7a/a2/df6f4a0821e58c08cbf5ffc09fcc",
|
"build/prefab/full/linux_x86_64/release/ballisticacore": "https://files.ballistica.net/cache/ba1/11/c5/e0bf2d2b8f49ca5f37251c92b27d",
|
||||||
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/2e/de/b8faaef1cdf126911bd1af5fe04c",
|
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/e0/14/b977cd5aeebf9911060c7a5e89b4",
|
||||||
"build/prefab/full/linux_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/4b/ed/f39044c89a5ce40a8999536f640c",
|
"build/prefab/full/linux_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/e9/fc/7bd6a5e9415a29eb5c05f5ef9330",
|
||||||
"build/prefab/full/mac_arm64/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/ad/58/28e31902f10e078b04537370a46b",
|
"build/prefab/full/mac_arm64/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/9c/aa/48c51a02a45debc7dba4e08497e4",
|
||||||
"build/prefab/full/mac_arm64/release/ballisticacore": "https://files.ballistica.net/cache/ba1/c5/bd/7944d5d24cdf46207013c36374f6",
|
"build/prefab/full/mac_arm64/release/ballisticacore": "https://files.ballistica.net/cache/ba1/83/db/c41d3815f8218bc07de270a89976",
|
||||||
"build/prefab/full/mac_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/11/39/4f979e4ac2c3bf9a484771cb31e9",
|
"build/prefab/full/mac_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/2c/a4/427e8fdf20a81d46b0fdb1de8f76",
|
||||||
"build/prefab/full/mac_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/86/3f/ac7523ec734c09b637f45adc73b3",
|
"build/prefab/full/mac_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/5b/9a/97ecfd01f46e5d60110fce5154e5",
|
||||||
"build/prefab/full/mac_x86_64/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/2b/d4/8faf40e2e036f69ac96408a14a5a",
|
"build/prefab/full/mac_x86_64/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/a7/9b/22091a33113871fe9178d4745a2a",
|
||||||
"build/prefab/full/mac_x86_64/release/ballisticacore": "https://files.ballistica.net/cache/ba1/8d/71/9ffd5ced8c3834c73ca8b6a3f04d",
|
"build/prefab/full/mac_x86_64/release/ballisticacore": "https://files.ballistica.net/cache/ba1/e1/33/4a76b4caf23ccc7e052f8dbc9319",
|
||||||
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/12/20/1145c90452a44bf985cf2feca3dc",
|
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/f6/d0/7b87d34b13f5cb7950a0ffa45fe9",
|
||||||
"build/prefab/full/mac_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/86/ad/810a83e0b053550f730d02061845",
|
"build/prefab/full/mac_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/8d/10/bc0420b13a490468a9b0c17c7bd6",
|
||||||
"build/prefab/full/windows_x86/debug/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/9d/a4/418733dbbd677aa0cd28cd5101c9",
|
"build/prefab/full/windows_x86/debug/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/34/5f/31c444bae9e4addaf62868e6d544",
|
||||||
"build/prefab/full/windows_x86/release/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/c8/f3/ad02c475133e4a82620bd840a8c3",
|
"build/prefab/full/windows_x86/release/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/76/61/ac6b5aacb3b4ba410a5730bef602",
|
||||||
"build/prefab/full/windows_x86_server/debug/dist/ballisticacore_headless.exe": "https://files.ballistica.net/cache/ba1/41/c9/dc5db16b82429639ad51b4bd2248",
|
"build/prefab/full/windows_x86_server/debug/dist/ballisticacore_headless.exe": "https://files.ballistica.net/cache/ba1/37/a0/396efd9512c58e15776258a6dbaa",
|
||||||
"build/prefab/full/windows_x86_server/release/dist/ballisticacore_headless.exe": "https://files.ballistica.net/cache/ba1/ef/ff/2e12d7939319a700a5561ddeec18",
|
"build/prefab/full/windows_x86_server/release/dist/ballisticacore_headless.exe": "https://files.ballistica.net/cache/ba1/c1/29/61eaddf05361c973917645544193",
|
||||||
"build/prefab/lib/linux_arm64/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/05/1d/03a2b2487e67010afff2a855fa2e",
|
"build/prefab/lib/linux_arm64/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/05/1d/03a2b2487e67010afff2a855fa2e",
|
||||||
"build/prefab/lib/linux_arm64/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/7c/8e/4be856155b5485c6a2ed8a605253",
|
"build/prefab/lib/linux_arm64/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/7c/8e/4be856155b5485c6a2ed8a605253",
|
||||||
"build/prefab/lib/linux_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/a6/7d/a267f67eb9c9eecc0c6c5d4381c4",
|
"build/prefab/lib/linux_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/a6/7d/a267f67eb9c9eecc0c6c5d4381c4",
|
||||||
@ -3960,12 +3960,12 @@
|
|||||||
"build/prefab/lib/linux_x86_64/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/98/24/86dfc03d0985b358527f1abbaca5",
|
"build/prefab/lib/linux_x86_64/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/98/24/86dfc03d0985b358527f1abbaca5",
|
||||||
"build/prefab/lib/linux_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/20/82/4544f70ada88097fba6c34c23b77",
|
"build/prefab/lib/linux_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/20/82/4544f70ada88097fba6c34c23b77",
|
||||||
"build/prefab/lib/linux_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/be/05/4767c22c4e4821de606a12a9b2a6",
|
"build/prefab/lib/linux_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/be/05/4767c22c4e4821de606a12a9b2a6",
|
||||||
"build/prefab/lib/mac_arm64/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/cc/ce/fd07fcfde4f1a64a776e68c372e6",
|
"build/prefab/lib/mac_arm64/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/d7/e1/1dae62eb42fd7b66325278b84dba",
|
||||||
"build/prefab/lib/mac_arm64/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/0b/c0/3b1abad1b5944134705eb123785a",
|
"build/prefab/lib/mac_arm64/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/22/e2/769d0b3e06b73edc6ad712f9a821",
|
||||||
"build/prefab/lib/mac_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/d6/b5/8f0b3137a8f9006571cf819f39e3",
|
"build/prefab/lib/mac_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/b5/61/43b9fe96846c0ff72598a6f9ab9c",
|
||||||
"build/prefab/lib/mac_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/38/f1/57af13726ad18b1c71c321d157e8",
|
"build/prefab/lib/mac_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/a4/9d/f30a305efea862cc1b483efc79d3",
|
||||||
"build/prefab/lib/mac_x86_64/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/f5/35/e6a7a0a5b6810a77a531bdb78e7a",
|
"build/prefab/lib/mac_x86_64/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/ee/bf/ebc6b503082cb03575d2927c0fc6",
|
||||||
"build/prefab/lib/mac_x86_64/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/94/80/0bb53ea8d7b3243fb308c9c39328",
|
"build/prefab/lib/mac_x86_64/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/85/d6/6664a0ce02bc4e513ed86ac1d61b",
|
||||||
"build/prefab/lib/mac_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/9d/c8/cc354640a95c15030d8aa0a6af9a",
|
"build/prefab/lib/mac_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/38/67/b85a805950491a0bb322be8497b5",
|
||||||
"build/prefab/lib/mac_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/e1/92/0de698b07113f55e90a45b19ad83"
|
"build/prefab/lib/mac_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/2e/9f/c537ed38c978b7fe030d3f06f421"
|
||||||
}
|
}
|
||||||
22
.idea/dictionaries/ericf.xml
generated
22
.idea/dictionaries/ericf.xml
generated
@ -67,11 +67,13 @@
|
|||||||
<w>androidstudiocode</w>
|
<w>androidstudiocode</w>
|
||||||
<w>anim</w>
|
<w>anim</w>
|
||||||
<w>animcurve</w>
|
<w>animcurve</w>
|
||||||
|
<w>anntype</w>
|
||||||
<w>anota</w>
|
<w>anota</w>
|
||||||
<w>anroid</w>
|
<w>anroid</w>
|
||||||
<w>ansiwrap</w>
|
<w>ansiwrap</w>
|
||||||
<w>antigravity</w>
|
<w>antigravity</w>
|
||||||
<w>anyofallof</w>
|
<w>anyofallof</w>
|
||||||
|
<w>anyval</w>
|
||||||
<w>aosp</w>
|
<w>aosp</w>
|
||||||
<w>apichanges</w>
|
<w>apichanges</w>
|
||||||
<w>apis</w>
|
<w>apis</w>
|
||||||
@ -338,7 +340,11 @@
|
|||||||
<w>checkroundover</w>
|
<w>checkroundover</w>
|
||||||
<w>checksums</w>
|
<w>checksums</w>
|
||||||
<w>checktype</w>
|
<w>checktype</w>
|
||||||
|
<w>childanntype</w>
|
||||||
|
<w>childanntypes</w>
|
||||||
<w>childnode</w>
|
<w>childnode</w>
|
||||||
|
<w>childtype</w>
|
||||||
|
<w>childtypes</w>
|
||||||
<w>chinesetraditional</w>
|
<w>chinesetraditional</w>
|
||||||
<w>chipfork</w>
|
<w>chipfork</w>
|
||||||
<w>chosenone</w>
|
<w>chosenone</w>
|
||||||
@ -478,6 +484,7 @@
|
|||||||
<w>darwiin</w>
|
<w>darwiin</w>
|
||||||
<w>darwiinremote</w>
|
<w>darwiinremote</w>
|
||||||
<w>datab</w>
|
<w>datab</w>
|
||||||
|
<w>dataclassio</w>
|
||||||
<w>dataclassutils</w>
|
<w>dataclassutils</w>
|
||||||
<w>datamodule</w>
|
<w>datamodule</w>
|
||||||
<w>dataname</w>
|
<w>dataname</w>
|
||||||
@ -491,6 +498,8 @@
|
|||||||
<w>dbgsfx</w>
|
<w>dbgsfx</w>
|
||||||
<w>dbgstr</w>
|
<w>dbgstr</w>
|
||||||
<w>dbpath</w>
|
<w>dbpath</w>
|
||||||
|
<w>dcioexattrs</w>
|
||||||
|
<w>dcioprep</w>
|
||||||
<w>dcls</w>
|
<w>dcls</w>
|
||||||
<w>dcmake</w>
|
<w>dcmake</w>
|
||||||
<w>deathmatch</w>
|
<w>deathmatch</w>
|
||||||
@ -515,6 +524,7 @@
|
|||||||
<w>dgram</w>
|
<w>dgram</w>
|
||||||
<w>dhave</w>
|
<w>dhave</w>
|
||||||
<w>dheadless</w>
|
<w>dheadless</w>
|
||||||
|
<w>dictval</w>
|
||||||
<w>diemessages</w>
|
<w>diemessages</w>
|
||||||
<w>difflib</w>
|
<w>difflib</w>
|
||||||
<w>dilateerode</w>
|
<w>dilateerode</w>
|
||||||
@ -596,6 +606,7 @@
|
|||||||
<w>dummytoken</w>
|
<w>dummytoken</w>
|
||||||
<w>dummyval</w>
|
<w>dummyval</w>
|
||||||
<w>dups</w>
|
<w>dups</w>
|
||||||
|
<w>dval</w>
|
||||||
<w>dxml</w>
|
<w>dxml</w>
|
||||||
<w>dynload</w>
|
<w>dynload</w>
|
||||||
<w>eachother</w>
|
<w>eachother</w>
|
||||||
@ -647,6 +658,7 @@
|
|||||||
<w>entrytypeselect</w>
|
<w>entrytypeselect</w>
|
||||||
<w>enumtype</w>
|
<w>enumtype</w>
|
||||||
<w>enumval</w>
|
<w>enumval</w>
|
||||||
|
<w>enumvaltype</w>
|
||||||
<w>enumvalue</w>
|
<w>enumvalue</w>
|
||||||
<w>enval</w>
|
<w>enval</w>
|
||||||
<w>envcfg</w>
|
<w>envcfg</w>
|
||||||
@ -919,6 +931,7 @@
|
|||||||
<w>gitignores</w>
|
<w>gitignores</w>
|
||||||
<w>gitlog</w>
|
<w>gitlog</w>
|
||||||
<w>gitlogcore</w>
|
<w>gitlogcore</w>
|
||||||
|
<w>globalns</w>
|
||||||
<w>globalsnode</w>
|
<w>globalsnode</w>
|
||||||
<w>gmake</w>
|
<w>gmake</w>
|
||||||
<w>gname</w>
|
<w>gname</w>
|
||||||
@ -1109,7 +1122,9 @@
|
|||||||
<w>keepalives</w>
|
<w>keepalives</w>
|
||||||
<w>keepaway</w>
|
<w>keepaway</w>
|
||||||
<w>keeprefs</w>
|
<w>keeprefs</w>
|
||||||
|
<w>keyanntype</w>
|
||||||
<w>keyfilt</w>
|
<w>keyfilt</w>
|
||||||
|
<w>keyint</w>
|
||||||
<w>keylayout</w>
|
<w>keylayout</w>
|
||||||
<w>keypresses</w>
|
<w>keypresses</w>
|
||||||
<w>keystr</w>
|
<w>keystr</w>
|
||||||
@ -1223,6 +1238,7 @@
|
|||||||
<w>loadpackage</w>
|
<w>loadpackage</w>
|
||||||
<w>localconfig</w>
|
<w>localconfig</w>
|
||||||
<w>localdir</w>
|
<w>localdir</w>
|
||||||
|
<w>localns</w>
|
||||||
<w>localuser</w>
|
<w>localuser</w>
|
||||||
<w>locationgroup</w>
|
<w>locationgroup</w>
|
||||||
<w>locationgroups</w>
|
<w>locationgroups</w>
|
||||||
@ -1521,6 +1537,7 @@
|
|||||||
<w>outname</w>
|
<w>outname</w>
|
||||||
<w>outpath</w>
|
<w>outpath</w>
|
||||||
<w>outputter</w>
|
<w>outputter</w>
|
||||||
|
<w>outval</w>
|
||||||
<w>outvalue</w>
|
<w>outvalue</w>
|
||||||
<w>ouya</w>
|
<w>ouya</w>
|
||||||
<w>overloadsigs</w>
|
<w>overloadsigs</w>
|
||||||
@ -1556,6 +1573,7 @@
|
|||||||
<w>pchild</w>
|
<w>pchild</w>
|
||||||
<w>pcommand</w>
|
<w>pcommand</w>
|
||||||
<w>pcstr</w>
|
<w>pcstr</w>
|
||||||
|
<w>pdataclass</w>
|
||||||
<w>pedit</w>
|
<w>pedit</w>
|
||||||
<w>peditui</w>
|
<w>peditui</w>
|
||||||
<w>pentry</w>
|
<w>pentry</w>
|
||||||
@ -1651,6 +1669,7 @@
|
|||||||
<w>premult</w>
|
<w>premult</w>
|
||||||
<w>premultiplied</w>
|
<w>premultiplied</w>
|
||||||
<w>premultiply</w>
|
<w>premultiply</w>
|
||||||
|
<w>prepdata</w>
|
||||||
<w>preprocessing</w>
|
<w>preprocessing</w>
|
||||||
<w>prereq</w>
|
<w>prereq</w>
|
||||||
<w>prereqs</w>
|
<w>prereqs</w>
|
||||||
@ -2343,9 +2362,12 @@
|
|||||||
<w>userfunctions</w>
|
<w>userfunctions</w>
|
||||||
<w>utcnow</w>
|
<w>utcnow</w>
|
||||||
<w>utimensat</w>
|
<w>utimensat</w>
|
||||||
|
<w>uval</w>
|
||||||
|
<w>valanntype</w>
|
||||||
<w>validpgpkeys</w>
|
<w>validpgpkeys</w>
|
||||||
<w>valnew</w>
|
<w>valnew</w>
|
||||||
<w>vals</w>
|
<w>vals</w>
|
||||||
|
<w>valtype</w>
|
||||||
<w>valuedispatch</w>
|
<w>valuedispatch</w>
|
||||||
<w>valuedispatchmethod</w>
|
<w>valuedispatchmethod</w>
|
||||||
<w>valueerror</w>
|
<w>valueerror</w>
|
||||||
|
|||||||
2
Makefile
2
Makefile
@ -663,7 +663,7 @@ test-assetmanager:
|
|||||||
# Individual test with extra output enabled.
|
# Individual test with extra output enabled.
|
||||||
test-dataclasses:
|
test-dataclasses:
|
||||||
@tools/pcommand pytest -o log_cli=true -o log_cli_level=debug -s -vv \
|
@tools/pcommand pytest -o log_cli=true -o log_cli_level=debug -s -vv \
|
||||||
tests/test_efro/test_dataclasses.py
|
tests/test_efro/test_dataclassio.py
|
||||||
|
|
||||||
# Individual test with extra output enabled.
|
# Individual test with extra output enabled.
|
||||||
test-entity:
|
test-entity:
|
||||||
|
|||||||
@ -494,13 +494,13 @@
|
|||||||
"ba_data/python/efro/__init__.py",
|
"ba_data/python/efro/__init__.py",
|
||||||
"ba_data/python/efro/__pycache__/__init__.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/__pycache__/__init__.cpython-38.opt-1.pyc",
|
||||||
"ba_data/python/efro/__pycache__/call.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/__pycache__/call.cpython-38.opt-1.pyc",
|
||||||
"ba_data/python/efro/__pycache__/dataclasses.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/__pycache__/dataclassio.cpython-38.opt-1.pyc",
|
||||||
"ba_data/python/efro/__pycache__/error.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/__pycache__/error.cpython-38.opt-1.pyc",
|
||||||
"ba_data/python/efro/__pycache__/json.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/__pycache__/json.cpython-38.opt-1.pyc",
|
||||||
"ba_data/python/efro/__pycache__/terminal.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/__pycache__/terminal.cpython-38.opt-1.pyc",
|
||||||
"ba_data/python/efro/__pycache__/util.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/__pycache__/util.cpython-38.opt-1.pyc",
|
||||||
"ba_data/python/efro/call.py",
|
"ba_data/python/efro/call.py",
|
||||||
"ba_data/python/efro/dataclasses.py",
|
"ba_data/python/efro/dataclassio.py",
|
||||||
"ba_data/python/efro/entity/__init__.py",
|
"ba_data/python/efro/entity/__init__.py",
|
||||||
"ba_data/python/efro/entity/__pycache__/__init__.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/entity/__pycache__/__init__.cpython-38.opt-1.pyc",
|
||||||
"ba_data/python/efro/entity/__pycache__/_base.cpython-38.opt-1.pyc",
|
"ba_data/python/efro/entity/__pycache__/_base.cpython-38.opt-1.pyc",
|
||||||
|
|||||||
@ -640,7 +640,7 @@ SCRIPT_TARGETS_PY_PUBLIC_TOOLS = \
|
|||||||
build/ba_data/python/bacommon/servermanager.py \
|
build/ba_data/python/bacommon/servermanager.py \
|
||||||
build/ba_data/python/efro/__init__.py \
|
build/ba_data/python/efro/__init__.py \
|
||||||
build/ba_data/python/efro/call.py \
|
build/ba_data/python/efro/call.py \
|
||||||
build/ba_data/python/efro/dataclasses.py \
|
build/ba_data/python/efro/dataclassio.py \
|
||||||
build/ba_data/python/efro/entity/__init__.py \
|
build/ba_data/python/efro/entity/__init__.py \
|
||||||
build/ba_data/python/efro/entity/_base.py \
|
build/ba_data/python/efro/entity/_base.py \
|
||||||
build/ba_data/python/efro/entity/_entity.py \
|
build/ba_data/python/efro/entity/_entity.py \
|
||||||
@ -661,7 +661,7 @@ SCRIPT_TARGETS_PYC_PUBLIC_TOOLS = \
|
|||||||
build/ba_data/python/bacommon/__pycache__/servermanager.cpython-38.opt-1.pyc \
|
build/ba_data/python/bacommon/__pycache__/servermanager.cpython-38.opt-1.pyc \
|
||||||
build/ba_data/python/efro/__pycache__/__init__.cpython-38.opt-1.pyc \
|
build/ba_data/python/efro/__pycache__/__init__.cpython-38.opt-1.pyc \
|
||||||
build/ba_data/python/efro/__pycache__/call.cpython-38.opt-1.pyc \
|
build/ba_data/python/efro/__pycache__/call.cpython-38.opt-1.pyc \
|
||||||
build/ba_data/python/efro/__pycache__/dataclasses.cpython-38.opt-1.pyc \
|
build/ba_data/python/efro/__pycache__/dataclassio.cpython-38.opt-1.pyc \
|
||||||
build/ba_data/python/efro/entity/__pycache__/__init__.cpython-38.opt-1.pyc \
|
build/ba_data/python/efro/entity/__pycache__/__init__.cpython-38.opt-1.pyc \
|
||||||
build/ba_data/python/efro/entity/__pycache__/_base.cpython-38.opt-1.pyc \
|
build/ba_data/python/efro/entity/__pycache__/_base.cpython-38.opt-1.pyc \
|
||||||
build/ba_data/python/efro/entity/__pycache__/_entity.cpython-38.opt-1.pyc \
|
build/ba_data/python/efro/entity/__pycache__/_entity.cpython-38.opt-1.pyc \
|
||||||
|
|||||||
@ -13,7 +13,9 @@ from typing import TYPE_CHECKING, cast
|
|||||||
|
|
||||||
import ba
|
import ba
|
||||||
import _ba
|
import _ba
|
||||||
from efro.dataclasses import dataclass_from_dict
|
from efro.dataclassio import dataclass_from_dict
|
||||||
|
from bacommon.net import (PrivateHostingState, PrivateHostingConfig,
|
||||||
|
PrivatePartyConnectResult)
|
||||||
from bastd.ui.gather import GatherTab
|
from bastd.ui.gather import GatherTab
|
||||||
from bastd.ui import getcurrency
|
from bastd.ui import getcurrency
|
||||||
|
|
||||||
@ -37,37 +39,6 @@ class State:
|
|||||||
sub_tab: SubTabType = SubTabType.JOIN
|
sub_tab: SubTabType = SubTabType.JOIN
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ConnectResult:
|
|
||||||
"""Info about a server we get back when connecting."""
|
|
||||||
error: Optional[str] = None
|
|
||||||
addr: Optional[str] = None
|
|
||||||
port: Optional[int] = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class HostingState:
|
|
||||||
"""Our combined state of whether we're hosting, whether we can, etc."""
|
|
||||||
unavailable_error: Optional[str] = None
|
|
||||||
party_code: Optional[str] = None
|
|
||||||
able_to_host: bool = False
|
|
||||||
tickets_to_host_now: int = 0
|
|
||||||
minutes_until_free_host: Optional[float] = None
|
|
||||||
free_host_minutes_remaining: Optional[float] = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class HostingConfig:
|
|
||||||
"""Config we provide when hosting."""
|
|
||||||
session_type: str = 'ffa'
|
|
||||||
playlist_name: str = 'Unknown'
|
|
||||||
randomize: bool = False
|
|
||||||
tutorial: bool = False
|
|
||||||
custom_team_names: Optional[List[str]] = None
|
|
||||||
custom_team_colors: Optional[List[List[float]]] = None
|
|
||||||
playlist: Optional[List[Dict[str, Any]]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class PrivateGatherTab(GatherTab):
|
class PrivateGatherTab(GatherTab):
|
||||||
"""The private tab in the gather UI"""
|
"""The private tab in the gather UI"""
|
||||||
|
|
||||||
@ -75,7 +46,7 @@ class PrivateGatherTab(GatherTab):
|
|||||||
super().__init__(window)
|
super().__init__(window)
|
||||||
self._container: Optional[ba.Widget] = None
|
self._container: Optional[ba.Widget] = None
|
||||||
self._state: State = State()
|
self._state: State = State()
|
||||||
self._hostingstate = HostingState()
|
self._hostingstate = PrivateHostingState()
|
||||||
self._join_sub_tab_text: Optional[ba.Widget] = None
|
self._join_sub_tab_text: Optional[ba.Widget] = None
|
||||||
self._host_sub_tab_text: Optional[ba.Widget] = None
|
self._host_sub_tab_text: Optional[ba.Widget] = None
|
||||||
self._update_timer: Optional[ba.Timer] = None
|
self._update_timer: Optional[ba.Timer] = None
|
||||||
@ -99,7 +70,7 @@ class PrivateGatherTab(GatherTab):
|
|||||||
self._hostingconfig = self._build_hosting_config()
|
self._hostingconfig = self._build_hosting_config()
|
||||||
except Exception:
|
except Exception:
|
||||||
ba.print_exception('Error building hosting config')
|
ba.print_exception('Error building hosting config')
|
||||||
self._hostingconfig = HostingConfig()
|
self._hostingconfig = PrivateHostingConfig()
|
||||||
|
|
||||||
def on_activate(
|
def on_activate(
|
||||||
self,
|
self,
|
||||||
@ -178,10 +149,10 @@ class PrivateGatherTab(GatherTab):
|
|||||||
|
|
||||||
return self._container
|
return self._container
|
||||||
|
|
||||||
def _build_hosting_config(self) -> HostingConfig:
|
def _build_hosting_config(self) -> PrivateHostingConfig:
|
||||||
from bastd.ui.playlist import PlaylistTypeVars
|
from bastd.ui.playlist import PlaylistTypeVars
|
||||||
from ba.internal import filter_playlist
|
from ba.internal import filter_playlist
|
||||||
hcfg = HostingConfig()
|
hcfg = PrivateHostingConfig()
|
||||||
cfg = ba.app.config
|
cfg = ba.app.config
|
||||||
sessiontypestr = cfg.get('Private Party Host Session Type', 'ffa')
|
sessiontypestr = cfg.get('Private Party Host Session Type', 'ffa')
|
||||||
if not isinstance(sessiontypestr, str):
|
if not isinstance(sessiontypestr, str):
|
||||||
@ -205,10 +176,13 @@ class PrivateGatherTab(GatherTab):
|
|||||||
if playlist_name == '__default__' else
|
if playlist_name == '__default__' else
|
||||||
playlist_name)
|
playlist_name)
|
||||||
|
|
||||||
if playlist_name == '__default__':
|
playlist: Optional[List[Dict[str, Any]]] = None
|
||||||
|
if playlist_name != '__default__':
|
||||||
|
playlist = (cfg.get(f'{pvars.config_name} Playlists',
|
||||||
|
{}).get(playlist_name))
|
||||||
|
if playlist is None:
|
||||||
playlist = pvars.get_default_list_call()
|
playlist = pvars.get_default_list_call()
|
||||||
else:
|
|
||||||
playlist = cfg[f'{pvars.config_name} Playlists'][playlist_name]
|
|
||||||
hcfg.playlist = filter_playlist(playlist, sessiontype)
|
hcfg.playlist = filter_playlist(playlist, sessiontype)
|
||||||
|
|
||||||
randomize = cfg.get(f'{pvars.config_name} Playlist Randomize')
|
randomize = cfg.get(f'{pvars.config_name} Playlist Randomize')
|
||||||
@ -297,13 +271,15 @@ class PrivateGatherTab(GatherTab):
|
|||||||
if not self._container:
|
if not self._container:
|
||||||
return
|
return
|
||||||
|
|
||||||
state: Optional[HostingState] = None
|
state: Optional[PrivateHostingState] = None
|
||||||
if result is not None:
|
if result is not None:
|
||||||
self._debug_server_comm('got private party state response')
|
self._debug_server_comm('got private party state response')
|
||||||
try:
|
try:
|
||||||
state = dataclass_from_dict(HostingState, result)
|
state = dataclass_from_dict(PrivateHostingState,
|
||||||
|
result,
|
||||||
|
discard_unknown_attrs=True)
|
||||||
except Exception:
|
except Exception:
|
||||||
ba.print_exception('Got invalid HostingState data')
|
ba.print_exception('Got invalid PrivateHostingState data')
|
||||||
else:
|
else:
|
||||||
self._debug_server_comm('private party state response errored')
|
self._debug_server_comm('private party state response errored')
|
||||||
|
|
||||||
@ -808,7 +784,8 @@ class PrivateGatherTab(GatherTab):
|
|||||||
_ba.add_transaction(
|
_ba.add_transaction(
|
||||||
{
|
{
|
||||||
'type': 'PRIVATE_PARTY_START',
|
'type': 'PRIVATE_PARTY_START',
|
||||||
'config': asdict(self._hostingconfig)
|
'config': asdict(self._hostingconfig),
|
||||||
|
'region_pings': ba.app.net.region_pings,
|
||||||
},
|
},
|
||||||
callback=ba.WeakCall(self._hosting_state_response))
|
callback=ba.WeakCall(self._hosting_state_response))
|
||||||
_ba.run_transactions()
|
_ba.run_transactions()
|
||||||
@ -844,7 +821,9 @@ class PrivateGatherTab(GatherTab):
|
|||||||
self._connect_press_time = None
|
self._connect_press_time = None
|
||||||
if result is None:
|
if result is None:
|
||||||
raise RuntimeError()
|
raise RuntimeError()
|
||||||
cresult = dataclass_from_dict(ConnectResult, result)
|
cresult = dataclass_from_dict(PrivatePartyConnectResult,
|
||||||
|
result,
|
||||||
|
discard_unknown_attrs=True)
|
||||||
if cresult.error is not None:
|
if cresult.error is not None:
|
||||||
self._debug_server_comm('got error connect response')
|
self._debug_server_comm('got error connect response')
|
||||||
ba.screenmessage(
|
ba.screenmessage(
|
||||||
|
|||||||
@ -23,7 +23,7 @@ sys.path += [
|
|||||||
]
|
]
|
||||||
|
|
||||||
from bacommon.servermanager import ServerConfig, StartServerModeCommand
|
from bacommon.servermanager import ServerConfig, StartServerModeCommand
|
||||||
from efro.dataclasses import dataclass_from_dict, dataclass_validate
|
from efro.dataclassio import dataclass_from_dict, dataclass_validate
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
|
|
||||||
|
|||||||
21
ballisticacore-cmake/.idea/dictionaries/ericf.xml
generated
21
ballisticacore-cmake/.idea/dictionaries/ericf.xml
generated
@ -41,8 +41,10 @@
|
|||||||
<w>anchorx</w>
|
<w>anchorx</w>
|
||||||
<w>animcurve</w>
|
<w>animcurve</w>
|
||||||
<w>aniso</w>
|
<w>aniso</w>
|
||||||
|
<w>anntype</w>
|
||||||
<w>ansiwrap</w>
|
<w>ansiwrap</w>
|
||||||
<w>anyofallof</w>
|
<w>anyofallof</w>
|
||||||
|
<w>anyval</w>
|
||||||
<w>aosp</w>
|
<w>aosp</w>
|
||||||
<w>apientry</w>
|
<w>apientry</w>
|
||||||
<w>appconfig</w>
|
<w>appconfig</w>
|
||||||
@ -158,6 +160,10 @@
|
|||||||
<w>chdir</w>
|
<w>chdir</w>
|
||||||
<w>checkboxwidget</w>
|
<w>checkboxwidget</w>
|
||||||
<w>checkchisel</w>
|
<w>checkchisel</w>
|
||||||
|
<w>childanntype</w>
|
||||||
|
<w>childanntypes</w>
|
||||||
|
<w>childtype</w>
|
||||||
|
<w>childtypes</w>
|
||||||
<w>chrono</w>
|
<w>chrono</w>
|
||||||
<w>chunksize</w>
|
<w>chunksize</w>
|
||||||
<w>cjief</w>
|
<w>cjief</w>
|
||||||
@ -208,6 +214,7 @@
|
|||||||
<w>cutef</w>
|
<w>cutef</w>
|
||||||
<w>cvar</w>
|
<w>cvar</w>
|
||||||
<w>data</w>
|
<w>data</w>
|
||||||
|
<w>dataclassio</w>
|
||||||
<w>datadata</w>
|
<w>datadata</w>
|
||||||
<w>dataout</w>
|
<w>dataout</w>
|
||||||
<w>datas</w>
|
<w>datas</w>
|
||||||
@ -215,6 +222,8 @@
|
|||||||
<w>datavec</w>
|
<w>datavec</w>
|
||||||
<w>dbgstr</w>
|
<w>dbgstr</w>
|
||||||
<w>dbias</w>
|
<w>dbias</w>
|
||||||
|
<w>dcioexattrs</w>
|
||||||
|
<w>dcioprep</w>
|
||||||
<w>dcol</w>
|
<w>dcol</w>
|
||||||
<w>ddcaps</w>
|
<w>ddcaps</w>
|
||||||
<w>ddpf</w>
|
<w>ddpf</w>
|
||||||
@ -237,6 +246,7 @@
|
|||||||
<w>dfba</w>
|
<w>dfba</w>
|
||||||
<w>dfff</w>
|
<w>dfff</w>
|
||||||
<w>dfmt</w>
|
<w>dfmt</w>
|
||||||
|
<w>dictval</w>
|
||||||
<w>diffbit</w>
|
<w>diffbit</w>
|
||||||
<w>dirslash</w>
|
<w>dirslash</w>
|
||||||
<w>dlfcn</w>
|
<w>dlfcn</w>
|
||||||
@ -262,6 +272,7 @@
|
|||||||
<w>dtest</w>
|
<w>dtest</w>
|
||||||
<w>dummyval</w>
|
<w>dummyval</w>
|
||||||
<w>dummyvalid</w>
|
<w>dummyvalid</w>
|
||||||
|
<w>dval</w>
|
||||||
<w>dxgi</w>
|
<w>dxgi</w>
|
||||||
<w>dynamicdata</w>
|
<w>dynamicdata</w>
|
||||||
<w>echidna</w>
|
<w>echidna</w>
|
||||||
@ -283,6 +294,7 @@
|
|||||||
<w>endline</w>
|
<w>endline</w>
|
||||||
<w>endtime</w>
|
<w>endtime</w>
|
||||||
<w>entrypoint</w>
|
<w>entrypoint</w>
|
||||||
|
<w>enumvaltype</w>
|
||||||
<w>enumvalue</w>
|
<w>enumvalue</w>
|
||||||
<w>enval</w>
|
<w>enval</w>
|
||||||
<w>envcfg</w>
|
<w>envcfg</w>
|
||||||
@ -408,6 +420,7 @@
|
|||||||
<w>gettotalrefcount</w>
|
<w>gettotalrefcount</w>
|
||||||
<w>gles</w>
|
<w>gles</w>
|
||||||
<w>glext</w>
|
<w>glext</w>
|
||||||
|
<w>globalns</w>
|
||||||
<w>googleplaytab</w>
|
<w>googleplaytab</w>
|
||||||
<w>gpgs</w>
|
<w>gpgs</w>
|
||||||
<w>gqualstr</w>
|
<w>gqualstr</w>
|
||||||
@ -491,8 +504,10 @@
|
|||||||
<w>jcjwf</w>
|
<w>jcjwf</w>
|
||||||
<w>jmessage</w>
|
<w>jmessage</w>
|
||||||
<w>keepalives</w>
|
<w>keepalives</w>
|
||||||
|
<w>keyanntype</w>
|
||||||
<w>keycode</w>
|
<w>keycode</w>
|
||||||
<w>keyfilt</w>
|
<w>keyfilt</w>
|
||||||
|
<w>keyint</w>
|
||||||
<w>keysyms</w>
|
<w>keysyms</w>
|
||||||
<w>keywds</w>
|
<w>keywds</w>
|
||||||
<w>khronos</w>
|
<w>khronos</w>
|
||||||
@ -521,6 +536,7 @@
|
|||||||
<w>linearstep</w>
|
<w>linearstep</w>
|
||||||
<w>listobj</w>
|
<w>listobj</w>
|
||||||
<w>llock</w>
|
<w>llock</w>
|
||||||
|
<w>localns</w>
|
||||||
<w>lockpath</w>
|
<w>lockpath</w>
|
||||||
<w>lockstr</w>
|
<w>lockstr</w>
|
||||||
<w>locktype</w>
|
<w>locktype</w>
|
||||||
@ -693,6 +709,7 @@
|
|||||||
<w>parameteriv</w>
|
<w>parameteriv</w>
|
||||||
<w>passcode</w>
|
<w>passcode</w>
|
||||||
<w>pausable</w>
|
<w>pausable</w>
|
||||||
|
<w>pdataclass</w>
|
||||||
<w>pdst</w>
|
<w>pdst</w>
|
||||||
<w>persp</w>
|
<w>persp</w>
|
||||||
<w>pflag</w>
|
<w>pflag</w>
|
||||||
@ -719,6 +736,7 @@
|
|||||||
<w>preloaded</w>
|
<w>preloaded</w>
|
||||||
<w>preloads</w>
|
<w>preloads</w>
|
||||||
<w>premult</w>
|
<w>premult</w>
|
||||||
|
<w>prepdata</w>
|
||||||
<w>prereq</w>
|
<w>prereq</w>
|
||||||
<w>prerun</w>
|
<w>prerun</w>
|
||||||
<w>printf</w>
|
<w>printf</w>
|
||||||
@ -1007,12 +1025,15 @@
|
|||||||
<w>userspace</w>
|
<w>userspace</w>
|
||||||
<w>uther</w>
|
<w>uther</w>
|
||||||
<w>uuids</w>
|
<w>uuids</w>
|
||||||
|
<w>uval</w>
|
||||||
<w>uxxxx</w>
|
<w>uxxxx</w>
|
||||||
<w>uxxxxxxxx</w>
|
<w>uxxxxxxxx</w>
|
||||||
|
<w>valanntype</w>
|
||||||
<w>valign</w>
|
<w>valign</w>
|
||||||
<w>valobj</w>
|
<w>valobj</w>
|
||||||
<w>vals</w>
|
<w>vals</w>
|
||||||
<w>valtab</w>
|
<w>valtab</w>
|
||||||
|
<w>valtype</w>
|
||||||
<w>valuedispatchmethod</w>
|
<w>valuedispatchmethod</w>
|
||||||
<w>valuedouble</w>
|
<w>valuedouble</w>
|
||||||
<w>valueint</w>
|
<w>valueint</w>
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->
|
<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->
|
||||||
<h4><em>last updated on 2021-04-28 for Ballistica version 1.6.0 build 20350</em></h4>
|
<h4><em>last updated on 2021-04-30 for Ballistica version 1.6.0 build 20352</em></h4>
|
||||||
<p>This page documents the Python classes and functions in the 'ba' module,
|
<p>This page documents the Python classes and functions in the 'ba' module,
|
||||||
which are the ones most relevant to modding in Ballistica. If you come across something you feel should be included here or could be better explained, please <a href="mailto:support@froemling.net">let me know</a>. Happy modding!</p>
|
which are the ones most relevant to modding in Ballistica. If you come across something you feel should be included here or could be better explained, please <a href="mailto:support@froemling.net">let me know</a>. Happy modding!</p>
|
||||||
<hr>
|
<hr>
|
||||||
|
|||||||
@ -21,7 +21,7 @@
|
|||||||
namespace ballistica {
|
namespace ballistica {
|
||||||
|
|
||||||
// These are set automatically via script; don't change here.
|
// These are set automatically via script; don't change here.
|
||||||
const int kAppBuildNumber = 20351;
|
const int kAppBuildNumber = 20353;
|
||||||
const char* kAppVersion = "1.6.0";
|
const char* kAppVersion = "1.6.0";
|
||||||
|
|
||||||
// Our standalone globals.
|
// Our standalone globals.
|
||||||
|
|||||||
@ -5,16 +5,16 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import field, dataclass
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from efro.dataclasses import (dataclass_validate, dataclass_from_dict,
|
from efro.dataclassio import (dataclass_validate, dataclass_from_dict,
|
||||||
dataclass_to_dict)
|
dataclass_to_dict, prepped)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Optional, List, Set
|
from typing import Optional, List, Set, Any, Dict, Sequence, Union
|
||||||
|
|
||||||
|
|
||||||
class _EnumTest(Enum):
|
class _EnumTest(Enum):
|
||||||
@ -22,10 +22,30 @@ class _EnumTest(Enum):
|
|||||||
TEST2 = 'test2'
|
TEST2 = 'test2'
|
||||||
|
|
||||||
|
|
||||||
|
class _GoodEnum(Enum):
|
||||||
|
VAL1 = 'val1'
|
||||||
|
VAL2 = 'val2'
|
||||||
|
|
||||||
|
|
||||||
|
class _GoodEnum2(Enum):
|
||||||
|
VAL1 = 1
|
||||||
|
VAL2 = 2
|
||||||
|
|
||||||
|
|
||||||
|
class _BadEnum1(Enum):
|
||||||
|
VAL1 = 1.23
|
||||||
|
|
||||||
|
|
||||||
|
class _BadEnum2(Enum):
|
||||||
|
VAL1 = 1
|
||||||
|
VAL2 = 'val2'
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class _NestedClass:
|
class _NestedClass:
|
||||||
ival: int = 0
|
ival: int = 0
|
||||||
sval: str = 'foo'
|
sval: str = 'foo'
|
||||||
|
dval: Dict[int, str] = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
def test_assign() -> None:
|
def test_assign() -> None:
|
||||||
@ -33,6 +53,7 @@ def test_assign() -> None:
|
|||||||
|
|
||||||
# pylint: disable=too-many-statements
|
# pylint: disable=too-many-statements
|
||||||
|
|
||||||
|
@prepped
|
||||||
@dataclass
|
@dataclass
|
||||||
class _TestClass:
|
class _TestClass:
|
||||||
ival: int = 0
|
ival: int = 0
|
||||||
@ -52,6 +73,8 @@ def test_assign() -> None:
|
|||||||
lfval: List[float] = field(default_factory=list)
|
lfval: List[float] = field(default_factory=list)
|
||||||
lenval: List[_EnumTest] = field(default_factory=list)
|
lenval: List[_EnumTest] = field(default_factory=list)
|
||||||
ssval: Set[str] = field(default_factory=set)
|
ssval: Set[str] = field(default_factory=set)
|
||||||
|
anyval: Any = 1
|
||||||
|
dictval: Dict[int, str] = field(default_factory=dict)
|
||||||
|
|
||||||
class _TestClass2:
|
class _TestClass2:
|
||||||
pass
|
pass
|
||||||
@ -66,10 +89,6 @@ def test_assign() -> None:
|
|||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
dataclass_from_dict(_TestClass, None) # type: ignore
|
dataclass_from_dict(_TestClass, None) # type: ignore
|
||||||
|
|
||||||
# Passing an attr not in the dataclass should fail.
|
|
||||||
with pytest.raises(AttributeError):
|
|
||||||
dataclass_from_dict(_TestClass, {'nonexistent': 'foo'})
|
|
||||||
|
|
||||||
# A dict containing *ALL* values should match what we
|
# A dict containing *ALL* values should match what we
|
||||||
# get when creating a dataclass and then converting back
|
# get when creating a dataclass and then converting back
|
||||||
# to a dict.
|
# to a dict.
|
||||||
@ -80,7 +99,10 @@ def test_assign() -> None:
|
|||||||
'fval': 2.0,
|
'fval': 2.0,
|
||||||
'nval': {
|
'nval': {
|
||||||
'ival': 1,
|
'ival': 1,
|
||||||
'sval': 'bar'
|
'sval': 'bar',
|
||||||
|
'dval': {
|
||||||
|
'1': 'foof'
|
||||||
|
},
|
||||||
},
|
},
|
||||||
'enval': 'test1',
|
'enval': 'test1',
|
||||||
'oival': 1,
|
'oival': 1,
|
||||||
@ -93,7 +115,19 @@ def test_assign() -> None:
|
|||||||
'lbval': [False],
|
'lbval': [False],
|
||||||
'lfval': [1.0],
|
'lfval': [1.0],
|
||||||
'lenval': ['test1', 'test2'],
|
'lenval': ['test1', 'test2'],
|
||||||
'ssval': ['foo']
|
'ssval': ['foo'],
|
||||||
|
'dval': {
|
||||||
|
'k': 123
|
||||||
|
},
|
||||||
|
'anyval': {
|
||||||
|
'foo': [1, 2, {
|
||||||
|
'bar': 'eep',
|
||||||
|
'rah': 1
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
'dictval': {
|
||||||
|
'1': 'foo'
|
||||||
|
}
|
||||||
}
|
}
|
||||||
dc1 = dataclass_from_dict(_TestClass, dict1)
|
dc1 = dataclass_from_dict(_TestClass, dict1)
|
||||||
assert dataclass_to_dict(dc1) == dict1
|
assert dataclass_to_dict(dc1) == dict1
|
||||||
@ -165,6 +199,20 @@ def test_assign() -> None:
|
|||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
dataclass_from_dict(_TestClass, {'ssval': set()})
|
dataclass_from_dict(_TestClass, {'ssval': set()})
|
||||||
|
|
||||||
|
# Fields with type Any should accept all types which are directly
|
||||||
|
# supported by json, but not ones such as tuples or non-string dict keys
|
||||||
|
# which get implicitly translated by python's json module.
|
||||||
|
dataclass_from_dict(_TestClass, {'anyval': {}})
|
||||||
|
dataclass_from_dict(_TestClass, {'anyval': None})
|
||||||
|
dataclass_from_dict(_TestClass, {'anyval': []})
|
||||||
|
dataclass_from_dict(_TestClass, {'anyval': [True, {'foo': 'bar'}, None]})
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
dataclass_from_dict(_TestClass, {'anyval': {1: 'foo'}})
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
dataclass_from_dict(_TestClass, {'anyval': set()})
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
dataclass_from_dict(_TestClass, {'anyval': (1, 2, 3)})
|
||||||
|
|
||||||
# More subtle attr/type mismatches that should fail
|
# More subtle attr/type mismatches that should fail
|
||||||
# (we currently require EXACT type matches).
|
# (we currently require EXACT type matches).
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
@ -178,10 +226,22 @@ def test_assign() -> None:
|
|||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
dataclass_from_dict(_TestClass, {'lfval': [1]}, coerce_to_float=False)
|
dataclass_from_dict(_TestClass, {'lfval': [1]}, coerce_to_float=False)
|
||||||
|
|
||||||
|
# Coerce-to-float should only work on ints; not bools or other types.
|
||||||
|
dataclass_from_dict(_TestClass, {'fval': 1}, coerce_to_float=True)
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
dataclass_from_dict(_TestClass, {'fval': 1}, coerce_to_float=False)
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
dataclass_from_dict(_TestClass, {'fval': True}, coerce_to_float=True)
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
dataclass_from_dict(_TestClass, {'fval': None}, coerce_to_float=True)
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
dataclass_from_dict(_TestClass, {'fval': []}, coerce_to_float=True)
|
||||||
|
|
||||||
|
|
||||||
def test_coerce() -> None:
|
def test_coerce() -> None:
|
||||||
"""Test value coercion."""
|
"""Test value coercion."""
|
||||||
|
|
||||||
|
@prepped
|
||||||
@dataclass
|
@dataclass
|
||||||
class _TestClass:
|
class _TestClass:
|
||||||
ival: int = 0
|
ival: int = 0
|
||||||
@ -216,9 +276,95 @@ def test_coerce() -> None:
|
|||||||
dataclass_from_dict(_TestClass, {'ival': 1.0}, coerce_to_float=False)
|
dataclass_from_dict(_TestClass, {'ival': 1.0}, coerce_to_float=False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_prep() -> None:
|
||||||
|
"""Test the prepping process."""
|
||||||
|
|
||||||
|
# We currently don't support Sequence; can revisit if there is
|
||||||
|
# a strong use case.
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass:
|
||||||
|
ival: Sequence[int]
|
||||||
|
|
||||||
|
# We currently only support Unions with exactly 2 members; one of
|
||||||
|
# which is None. (Optional types get transformed into this by
|
||||||
|
# get_type_hints() so we need to support at least that).
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass2:
|
||||||
|
ival: Union[int, str]
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass3:
|
||||||
|
uval: Union[int, None]
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass4:
|
||||||
|
ival: Union[int, str]
|
||||||
|
|
||||||
|
# This will get simplified down to simply int by get_type_hints so is ok.
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass5:
|
||||||
|
ival: Union[int]
|
||||||
|
|
||||||
|
# This will get simplified down to a valid 2 member union so is ok
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass6:
|
||||||
|
ival: Union[int, None, int, None]
|
||||||
|
|
||||||
|
# Disallow dict entries with types other than str, int, or enums
|
||||||
|
# having those value types.
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass7:
|
||||||
|
dval: Dict[float, int]
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass8:
|
||||||
|
dval: Dict[str, int]
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass9:
|
||||||
|
dval: Dict[_GoodEnum, int]
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass10:
|
||||||
|
dval: Dict[_GoodEnum2, int]
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass11:
|
||||||
|
dval: Dict[_BadEnum1, int]
|
||||||
|
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass12:
|
||||||
|
dval: Dict[_BadEnum2, int]
|
||||||
|
|
||||||
|
|
||||||
def test_validate() -> None:
|
def test_validate() -> None:
|
||||||
"""Testing validation."""
|
"""Testing validation."""
|
||||||
|
|
||||||
|
@prepped
|
||||||
@dataclass
|
@dataclass
|
||||||
class _TestClass:
|
class _TestClass:
|
||||||
ival: int = 0
|
ival: int = 0
|
||||||
@ -248,3 +394,98 @@ def test_validate() -> None:
|
|||||||
tclass.ival = None # type: ignore
|
tclass.ival = None # type: ignore
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
dataclass_validate(tclass)
|
dataclass_validate(tclass)
|
||||||
|
|
||||||
|
|
||||||
|
def test_extra_data() -> None:
|
||||||
|
"""Test handling of data that doesn't map to dataclass attrs."""
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass:
|
||||||
|
ival: int = 0
|
||||||
|
sval: str = ''
|
||||||
|
|
||||||
|
# Passing an attr not in the dataclass should fail if we ask it to.
|
||||||
|
with pytest.raises(AttributeError):
|
||||||
|
dataclass_from_dict(_TestClass, {'nonexistent': 'foo'},
|
||||||
|
allow_unknown_attrs=False)
|
||||||
|
|
||||||
|
# But normally it should be preserved and present in re-export.
|
||||||
|
obj = dataclass_from_dict(_TestClass, {'nonexistent': 'foo'})
|
||||||
|
assert isinstance(obj, _TestClass)
|
||||||
|
out = dataclass_to_dict(obj)
|
||||||
|
assert out.get('nonexistent') == 'foo'
|
||||||
|
|
||||||
|
# But not if we ask it to discard unknowns.
|
||||||
|
obj = dataclass_from_dict(_TestClass, {'nonexistent': 'foo'},
|
||||||
|
discard_unknown_attrs=True)
|
||||||
|
assert isinstance(obj, _TestClass)
|
||||||
|
out = dataclass_to_dict(obj)
|
||||||
|
assert 'nonexistent' not in out
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict() -> None:
|
||||||
|
"""Test various dict related bits."""
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass:
|
||||||
|
dval: dict
|
||||||
|
|
||||||
|
obj = _TestClass(dval={})
|
||||||
|
|
||||||
|
# 'Any' dicts should only support values directly compatible with json.
|
||||||
|
obj.dval['foo'] = 5
|
||||||
|
dataclass_to_dict(obj)
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
obj.dval[5] = 5
|
||||||
|
dataclass_to_dict(obj)
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
obj.dval['foo'] = _GoodEnum.VAL1
|
||||||
|
dataclass_to_dict(obj)
|
||||||
|
|
||||||
|
# Int dict-keys should actually be stored as strings internally
|
||||||
|
# (for json compatibility).
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass2:
|
||||||
|
dval: Dict[int, float]
|
||||||
|
|
||||||
|
obj2 = _TestClass2(dval={1: 2.34})
|
||||||
|
out = dataclass_to_dict(obj2)
|
||||||
|
assert '1' in out['dval']
|
||||||
|
assert 1 not in out['dval']
|
||||||
|
out['dval']['1'] = 2.35
|
||||||
|
obj2 = dataclass_from_dict(_TestClass2, out)
|
||||||
|
assert isinstance(obj2, _TestClass2)
|
||||||
|
assert obj2.dval[1] == 2.35
|
||||||
|
|
||||||
|
# Same with enum keys (we support enums with str and int values)
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass3:
|
||||||
|
dval: Dict[_GoodEnum, int]
|
||||||
|
|
||||||
|
obj3 = _TestClass3(dval={_GoodEnum.VAL1: 123})
|
||||||
|
out = dataclass_to_dict(obj3)
|
||||||
|
assert out['dval']['val1'] == 123
|
||||||
|
out['dval']['val1'] = 124
|
||||||
|
obj3 = dataclass_from_dict(_TestClass3, out)
|
||||||
|
assert obj3.dval[_GoodEnum.VAL1] == 124
|
||||||
|
|
||||||
|
@prepped
|
||||||
|
@dataclass
|
||||||
|
class _TestClass4:
|
||||||
|
dval: Dict[_GoodEnum2, int]
|
||||||
|
|
||||||
|
obj4 = _TestClass4(dval={_GoodEnum2.VAL1: 125})
|
||||||
|
out = dataclass_to_dict(obj4)
|
||||||
|
assert out['dval']['1'] == 125
|
||||||
|
out['dval']['1'] = 126
|
||||||
|
obj4 = dataclass_from_dict(_TestClass4, out)
|
||||||
|
assert obj4.dval[_GoodEnum2.VAL1] == 126
|
||||||
|
|
||||||
|
# The wrong enum type as a key should error.
|
||||||
|
obj4.dval = {_GoodEnum.VAL1: 999} # type: ignore
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
dataclass_to_dict(obj4)
|
||||||
@ -5,11 +5,12 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from efro import entity
|
from efro import entity
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
pass
|
from typing import Optional, Any, List, Dict
|
||||||
|
|
||||||
|
|
||||||
class ServerNodeEntry(entity.CompoundValue):
|
class ServerNodeEntry(entity.CompoundValue):
|
||||||
@ -29,3 +30,35 @@ class ServerNodeQueryResponse(entity.Entity):
|
|||||||
servers = entity.CompoundListField('s',
|
servers = entity.CompoundListField('s',
|
||||||
ServerNodeEntry(),
|
ServerNodeEntry(),
|
||||||
store_default=False)
|
store_default=False)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PrivateHostingState:
|
||||||
|
"""Combined state of whether we're hosting, whether we can, etc."""
|
||||||
|
unavailable_error: Optional[str] = None
|
||||||
|
party_code: Optional[str] = None
|
||||||
|
able_to_host: bool = False
|
||||||
|
tickets_to_host_now: int = 0
|
||||||
|
minutes_until_free_host: Optional[float] = None
|
||||||
|
free_host_minutes_remaining: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PrivateHostingConfig:
|
||||||
|
"""Config provided when hosting a private party."""
|
||||||
|
session_type: str = 'ffa'
|
||||||
|
playlist_name: str = 'Unknown'
|
||||||
|
randomize: bool = False
|
||||||
|
tutorial: bool = False
|
||||||
|
custom_team_names: Optional[List[str]] = None
|
||||||
|
custom_team_colors: Optional[List[List[float]]] = None
|
||||||
|
playlist: Optional[List[Dict[str, Any]]] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PrivatePartyConnectResult:
|
||||||
|
"""Info about a server we get back when connecting."""
|
||||||
|
error: Optional[str] = None
|
||||||
|
addr: Optional[str] = None
|
||||||
|
port: Optional[int] = None
|
||||||
|
password: Optional[str] = None
|
||||||
|
|||||||
@ -4,13 +4,16 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import field, dataclass
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from efro.dataclassio import prepped
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Optional, Tuple, List
|
from typing import Optional, Tuple, List
|
||||||
|
|
||||||
|
|
||||||
|
@prepped
|
||||||
@dataclass
|
@dataclass
|
||||||
class ServerConfig:
|
class ServerConfig:
|
||||||
"""Configuration for the server manager app (<appname>_server)."""
|
"""Configuration for the server manager app (<appname>_server)."""
|
||||||
|
|||||||
@ -1,295 +0,0 @@
|
|||||||
# Released under the MIT License. See LICENSE for details.
|
|
||||||
#
|
|
||||||
"""Custom functionality for dealing with dataclasses."""
|
|
||||||
# Note: We do lots of comparing of exact types here which is normally
|
|
||||||
# frowned upon (stuff like isinstance() is usually encouraged).
|
|
||||||
# pylint: disable=unidiomatic-typecheck
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import dataclasses
|
|
||||||
import inspect
|
|
||||||
from enum import Enum
|
|
||||||
from typing import TYPE_CHECKING, TypeVar, Generic
|
|
||||||
|
|
||||||
from efro.util import enum_by_value
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from typing import Any, Dict, Type, Tuple, Optional
|
|
||||||
|
|
||||||
T = TypeVar('T')
|
|
||||||
|
|
||||||
SIMPLE_NAMES_TO_TYPES: Dict[str, Type] = {
|
|
||||||
'int': int,
|
|
||||||
'bool': bool,
|
|
||||||
'str': str,
|
|
||||||
'float': float,
|
|
||||||
}
|
|
||||||
SIMPLE_TYPES_TO_NAMES = {tp: nm for nm, tp in SIMPLE_NAMES_TO_TYPES.items()}
|
|
||||||
|
|
||||||
|
|
||||||
def dataclass_to_dict(obj: Any, coerce_to_float: bool = True) -> dict:
|
|
||||||
"""Given a dataclass object, emit a json-friendly dict.
|
|
||||||
|
|
||||||
All values will be checked to ensure they match the types specified
|
|
||||||
on fields. Note that only a limited set of types is supported.
|
|
||||||
|
|
||||||
If coerce_to_float is True, integer values present on float typed fields
|
|
||||||
will be converted to floats in the dict output. If False, a TypeError
|
|
||||||
will be triggered.
|
|
||||||
"""
|
|
||||||
|
|
||||||
out = _Outputter(obj, create=True, coerce_to_float=coerce_to_float).run()
|
|
||||||
assert isinstance(out, dict)
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def dataclass_from_dict(cls: Type[T],
|
|
||||||
values: dict,
|
|
||||||
coerce_to_float: bool = True) -> T:
|
|
||||||
"""Given a dict, instantiates a dataclass of the given type.
|
|
||||||
|
|
||||||
The dict must be in the json-friendly format as emitted from
|
|
||||||
dataclass_to_dict. This means that sequence values such as tuples or
|
|
||||||
sets should be passed as lists, enums should be passed as their
|
|
||||||
associated values, and nested dataclasses should be passed as dicts.
|
|
||||||
|
|
||||||
If coerce_to_float is True, int values passed for float typed fields
|
|
||||||
will be converted to float values. Otherwise a TypeError is raised.
|
|
||||||
"""
|
|
||||||
return _Inputter(cls, coerce_to_float=coerce_to_float).run(values)
|
|
||||||
|
|
||||||
|
|
||||||
def dataclass_validate(obj: Any, coerce_to_float: bool = True) -> None:
|
|
||||||
"""Ensure that current values in a dataclass are the correct types."""
|
|
||||||
_Outputter(obj, create=False, coerce_to_float=coerce_to_float).run()
|
|
||||||
|
|
||||||
|
|
||||||
def _field_type_str(cls: Type, field: dataclasses.Field) -> str:
|
|
||||||
# We expect to be operating under 'from __future__ import annotations'
|
|
||||||
# so field types should always be strings for us; not actual types.
|
|
||||||
# (Can pull this check out once we get to Python 3.10)
|
|
||||||
typestr: str = field.type # type: ignore
|
|
||||||
|
|
||||||
if not isinstance(typestr, str):
|
|
||||||
raise RuntimeError(
|
|
||||||
f'Dataclass {cls.__name__} seems to have'
|
|
||||||
f' been created without "from __future__ import annotations";'
|
|
||||||
f' those dataclasses are unsupported here.')
|
|
||||||
return typestr
|
|
||||||
|
|
||||||
|
|
||||||
def _raise_type_error(fieldpath: str, valuetype: Type,
|
|
||||||
expected: Tuple[Type, ...]) -> None:
|
|
||||||
"""Raise an error when a field value's type does not match expected."""
|
|
||||||
assert isinstance(expected, tuple)
|
|
||||||
assert all(isinstance(e, type) for e in expected)
|
|
||||||
if len(expected) == 1:
|
|
||||||
expected_str = expected[0].__name__
|
|
||||||
else:
|
|
||||||
names = ', '.join(t.__name__ for t in expected)
|
|
||||||
expected_str = f'Union[{names}]'
|
|
||||||
raise TypeError(f'Invalid value type for "{fieldpath}";'
|
|
||||||
f' expected "{expected_str}", got'
|
|
||||||
f' "{valuetype.__name__}".')
|
|
||||||
|
|
||||||
|
|
||||||
class _Outputter:
|
|
||||||
|
|
||||||
def __init__(self, obj: Any, create: bool, coerce_to_float: bool) -> None:
|
|
||||||
self._obj = obj
|
|
||||||
self._create = create
|
|
||||||
self._coerce_to_float = coerce_to_float
|
|
||||||
|
|
||||||
def run(self) -> Any:
|
|
||||||
"""Do the thing."""
|
|
||||||
return self._dataclass_to_output(self._obj, '')
|
|
||||||
|
|
||||||
def _value_to_output(self, fieldpath: str, typestr: str,
|
|
||||||
value: Any) -> Any:
|
|
||||||
# pylint: disable=too-many-return-statements
|
|
||||||
# pylint: disable=too-many-branches
|
|
||||||
|
|
||||||
# For simple flat types, look for exact matches:
|
|
||||||
simpletype = SIMPLE_NAMES_TO_TYPES.get(typestr)
|
|
||||||
if simpletype is not None:
|
|
||||||
if type(value) is not simpletype:
|
|
||||||
# Special case: if they want to coerce ints to floats, do so.
|
|
||||||
if (self._coerce_to_float and simpletype is float
|
|
||||||
and type(value) is int):
|
|
||||||
return float(value) if self._create else None
|
|
||||||
_raise_type_error(fieldpath, type(value), (simpletype, ))
|
|
||||||
return value
|
|
||||||
|
|
||||||
if typestr.startswith('Optional[') and typestr.endswith(']'):
|
|
||||||
subtypestr = typestr[9:-1]
|
|
||||||
# Handle the 'None' case special and do the default otherwise.
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
return self._value_to_output(fieldpath, subtypestr, value)
|
|
||||||
|
|
||||||
if typestr.startswith('List[') and typestr.endswith(']'):
|
|
||||||
subtypestr = typestr[5:-1]
|
|
||||||
if not isinstance(value, list):
|
|
||||||
raise TypeError(f'Expected a list for {fieldpath};'
|
|
||||||
f' found a {type(value)}')
|
|
||||||
if self._create:
|
|
||||||
return [
|
|
||||||
self._value_to_output(fieldpath, subtypestr, x)
|
|
||||||
for x in value
|
|
||||||
]
|
|
||||||
for x in value:
|
|
||||||
self._value_to_output(fieldpath, subtypestr, x)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if typestr.startswith('Set[') and typestr.endswith(']'):
|
|
||||||
subtypestr = typestr[4:-1]
|
|
||||||
if not isinstance(value, set):
|
|
||||||
raise TypeError(f'Expected a set for {fieldpath};'
|
|
||||||
f' found a {type(value)}')
|
|
||||||
if self._create:
|
|
||||||
# Note: we output json-friendly values so this becomes a list.
|
|
||||||
return [
|
|
||||||
self._value_to_output(fieldpath, subtypestr, x)
|
|
||||||
for x in value
|
|
||||||
]
|
|
||||||
for x in value:
|
|
||||||
self._value_to_output(fieldpath, subtypestr, x)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if dataclasses.is_dataclass(value):
|
|
||||||
return self._dataclass_to_output(value, fieldpath)
|
|
||||||
|
|
||||||
if isinstance(value, Enum):
|
|
||||||
enumvalue = value.value
|
|
||||||
if type(enumvalue) not in SIMPLE_TYPES_TO_NAMES:
|
|
||||||
raise TypeError(f'Invalid enum value type {type(enumvalue)}'
|
|
||||||
f' for "{fieldpath}".')
|
|
||||||
return enumvalue
|
|
||||||
|
|
||||||
raise TypeError(
|
|
||||||
f"Field '{fieldpath}' of type '{typestr}' is unsupported here.")
|
|
||||||
|
|
||||||
def _dataclass_to_output(self, obj: Any, fieldpath: str) -> Any:
|
|
||||||
if not dataclasses.is_dataclass(obj):
|
|
||||||
raise TypeError(f'Passed obj {obj} is not a dataclass.')
|
|
||||||
fields = dataclasses.fields(obj)
|
|
||||||
out: Optional[Dict[str, Any]] = {} if self._create else None
|
|
||||||
|
|
||||||
for field in fields:
|
|
||||||
fieldname = field.name
|
|
||||||
|
|
||||||
if fieldpath:
|
|
||||||
subfieldpath = f'{fieldpath}.{fieldname}'
|
|
||||||
else:
|
|
||||||
subfieldpath = fieldname
|
|
||||||
typestr = _field_type_str(type(obj), field)
|
|
||||||
value = getattr(obj, fieldname)
|
|
||||||
outvalue = self._value_to_output(subfieldpath, typestr, value)
|
|
||||||
if self._create:
|
|
||||||
assert out is not None
|
|
||||||
out[fieldname] = outvalue
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
class _Inputter(Generic[T]):
|
|
||||||
|
|
||||||
def __init__(self, cls: Type[T], coerce_to_float: bool):
|
|
||||||
self._cls = cls
|
|
||||||
self._coerce_to_float = coerce_to_float
|
|
||||||
|
|
||||||
def run(self, values: dict) -> T:
|
|
||||||
"""Do the thing."""
|
|
||||||
return self._dataclass_from_input( # type: ignore
|
|
||||||
self._cls, '', values)
|
|
||||||
|
|
||||||
def _value_from_input(self, cls: Type, fieldpath: str, typestr: str,
|
|
||||||
value: Any) -> Any:
|
|
||||||
"""Convert an assigned value to what a dataclass field expects."""
|
|
||||||
# pylint: disable=too-many-return-statements
|
|
||||||
|
|
||||||
simpletype = SIMPLE_NAMES_TO_TYPES.get(typestr)
|
|
||||||
if simpletype is not None:
|
|
||||||
if type(value) is not simpletype:
|
|
||||||
# Special case: if they want to coerce ints to floats, do so.
|
|
||||||
if (self._coerce_to_float and simpletype is float
|
|
||||||
and type(value) is int):
|
|
||||||
return float(value)
|
|
||||||
_raise_type_error(fieldpath, type(value), (simpletype, ))
|
|
||||||
return value
|
|
||||||
if typestr.startswith('List[') and typestr.endswith(']'):
|
|
||||||
return self._sequence_from_input(cls, fieldpath, typestr, value,
|
|
||||||
'List', list)
|
|
||||||
if typestr.startswith('Set[') and typestr.endswith(']'):
|
|
||||||
return self._sequence_from_input(cls, fieldpath, typestr, value,
|
|
||||||
'Set', set)
|
|
||||||
if typestr.startswith('Optional[') and typestr.endswith(']'):
|
|
||||||
subtypestr = typestr[9:-1]
|
|
||||||
# Handle the 'None' case special and do the default
|
|
||||||
# thing otherwise.
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
return self._value_from_input(cls, fieldpath, subtypestr, value)
|
|
||||||
|
|
||||||
# Ok, its not a builtin type. It might be an enum or nested dataclass.
|
|
||||||
cls2 = getattr(inspect.getmodule(cls), typestr, None)
|
|
||||||
if cls2 is None:
|
|
||||||
raise RuntimeError(f"Unable to resolve '{typestr}'"
|
|
||||||
f" used by class '{cls.__name__}';"
|
|
||||||
f' make sure all nested types are declared'
|
|
||||||
f' in the global namespace of the module where'
|
|
||||||
f" '{cls.__name__} is defined.")
|
|
||||||
|
|
||||||
if dataclasses.is_dataclass(cls2):
|
|
||||||
return self._dataclass_from_input(cls2, fieldpath, value)
|
|
||||||
|
|
||||||
if issubclass(cls2, Enum):
|
|
||||||
return enum_by_value(cls2, value)
|
|
||||||
|
|
||||||
raise TypeError(
|
|
||||||
f"Field '{fieldpath}' of type '{typestr}' is unsupported here.")
|
|
||||||
|
|
||||||
def _dataclass_from_input(self, cls: Type, fieldpath: str,
|
|
||||||
values: dict) -> Any:
|
|
||||||
"""Given a dict, instantiates a dataclass of the given type.
|
|
||||||
|
|
||||||
The dict must be in the json-friendly format as emitted from
|
|
||||||
dataclass_to_dict. This means that sequence values such as tuples or
|
|
||||||
sets should be passed as lists, enums should be passed as their
|
|
||||||
associated values, and nested dataclasses should be passed as dicts.
|
|
||||||
"""
|
|
||||||
if not dataclasses.is_dataclass(cls):
|
|
||||||
raise TypeError(f'Passed class {cls} is not a dataclass.')
|
|
||||||
if not isinstance(values, dict):
|
|
||||||
raise TypeError("Expected a dict for 'values' arg.")
|
|
||||||
|
|
||||||
# noinspection PyDataclass
|
|
||||||
fields = dataclasses.fields(cls)
|
|
||||||
fields_by_name = {f.name: f for f in fields}
|
|
||||||
args: Dict[str, Any] = {}
|
|
||||||
for key, value in values.items():
|
|
||||||
field = fields_by_name.get(key)
|
|
||||||
if field is None:
|
|
||||||
raise AttributeError(f"'{cls.__name__}' has no '{key}' field.")
|
|
||||||
|
|
||||||
typestr = _field_type_str(cls, field)
|
|
||||||
|
|
||||||
subfieldpath = (f'{fieldpath}.{field.name}'
|
|
||||||
if fieldpath else field.name)
|
|
||||||
args[key] = self._value_from_input(cls, subfieldpath, typestr,
|
|
||||||
value)
|
|
||||||
|
|
||||||
return cls(**args)
|
|
||||||
|
|
||||||
def _sequence_from_input(self, cls: Type, fieldpath: str, typestr: str,
|
|
||||||
value: Any, seqtypestr: str,
|
|
||||||
seqtype: Type) -> Any:
|
|
||||||
# Because we are json-centric, we expect a list for all sequences.
|
|
||||||
if type(value) is not list:
|
|
||||||
raise TypeError(f'Invalid input value for "{fieldpath}";'
|
|
||||||
f' expected a list, got a {type(value).__name__}')
|
|
||||||
subtypestr = typestr[len(seqtypestr) + 1:-1]
|
|
||||||
return seqtype(
|
|
||||||
self._value_from_input(cls, fieldpath, subtypestr, i)
|
|
||||||
for i in value)
|
|
||||||
874
tools/efro/dataclassio.py
Normal file
874
tools/efro/dataclassio.py
Normal file
@ -0,0 +1,874 @@
|
|||||||
|
# Released under the MIT License. See LICENSE for details.
|
||||||
|
#
|
||||||
|
"""Functionality for importing, exporting, and validating dataclasses.
|
||||||
|
|
||||||
|
This allows complex nested dataclasses to be flattened to json-compatible
|
||||||
|
data and restored from said data. It also gracefully handles and preserves
|
||||||
|
unrecognized attribute data, allowing older clients to interact with newer
|
||||||
|
data formats in a nondestructive manner.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Note: We do lots of comparing of exact types here which is normally
|
||||||
|
# frowned upon (stuff like isinstance() is usually encouraged).
|
||||||
|
# pylint: disable=unidiomatic-typecheck
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from enum import Enum
|
||||||
|
import dataclasses
|
||||||
|
import typing
|
||||||
|
from typing import TYPE_CHECKING, TypeVar, Generic, get_type_hints
|
||||||
|
|
||||||
|
from efro.util import enum_by_value
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Any, Dict, Type, Tuple, Optional
|
||||||
|
|
||||||
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
# Types which we can pass through as-is.
|
||||||
|
SIMPLE_TYPES = {int, bool, str, float, type(None)}
|
||||||
|
|
||||||
|
# How deep we go when prepping nested types
|
||||||
|
# (basically for detecting recursive types)
|
||||||
|
MAX_RECURSION = 10
|
||||||
|
|
||||||
|
# Attr name for data we store on dataclass types as part of prep.
|
||||||
|
PREP_ATTR = '_DCIOPREP'
|
||||||
|
|
||||||
|
# Attr name for dict of extra attributes included on dataclass instances.
|
||||||
|
# Note that this is only added if extra attributes are present.
|
||||||
|
EXTRA_ATTRS_ATTR = '_DCIOEXATTRS'
|
||||||
|
|
||||||
|
|
||||||
|
def dataclass_to_dict(obj: Any, coerce_to_float: bool = True) -> dict:
|
||||||
|
"""Given a dataclass object, return a json-friendly dict.
|
||||||
|
|
||||||
|
All values will be checked to ensure they match the types specified
|
||||||
|
on fields. Note that a limited set of types and data configurations is
|
||||||
|
supported.
|
||||||
|
|
||||||
|
Values with type Any will be checked to ensure they match types supported
|
||||||
|
directly by json. This does not include types such as tuples which are
|
||||||
|
implicitly translated by Python's json module (as this would break
|
||||||
|
the ability to do a lossless round-trip with data).
|
||||||
|
|
||||||
|
If coerce_to_float is True, integer values present on float typed fields
|
||||||
|
will be converted to floats in the dict output. If False, a TypeError
|
||||||
|
will be triggered.
|
||||||
|
"""
|
||||||
|
|
||||||
|
out = _Outputter(obj, create=True, coerce_to_float=coerce_to_float).run()
|
||||||
|
assert isinstance(out, dict)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def dataclass_from_dict(cls: Type[T],
|
||||||
|
values: dict,
|
||||||
|
coerce_to_float: bool = True,
|
||||||
|
allow_unknown_attrs: bool = True,
|
||||||
|
discard_unknown_attrs: bool = False) -> T:
|
||||||
|
"""Given a dict, return a dataclass of a given type.
|
||||||
|
|
||||||
|
The dict must be in the json-friendly format as emitted from
|
||||||
|
dataclass_to_dict. This means that sequence values such as tuples or
|
||||||
|
sets should be passed as lists, enums should be passed as their
|
||||||
|
associated values, nested dataclasses should be passed as dicts, etc.
|
||||||
|
|
||||||
|
All values are checked to ensure their types/values are valid.
|
||||||
|
|
||||||
|
Data for attributes of type Any will be checked to ensure they match
|
||||||
|
types supported directly by json. This does not include types such
|
||||||
|
as tuples which are implicitly translated by Python's json module
|
||||||
|
(as this would break the ability to do a lossless round-trip with data).
|
||||||
|
|
||||||
|
If coerce_to_float is True, int values passed for float typed fields
|
||||||
|
will be converted to float values. Otherwise a TypeError is raised.
|
||||||
|
|
||||||
|
If allow_unknown_attrs is False, AttributeErrors will be raised for
|
||||||
|
attributes present in the dict but not on the data class. Otherwise they
|
||||||
|
will be preserved as part of the instance and included if it is
|
||||||
|
exported back to a dict, unless discard_unknown_attrs is True, in which
|
||||||
|
case they will simply be discarded.
|
||||||
|
"""
|
||||||
|
return _Inputter(cls,
|
||||||
|
coerce_to_float=coerce_to_float,
|
||||||
|
allow_unknown_attrs=allow_unknown_attrs,
|
||||||
|
discard_unknown_attrs=discard_unknown_attrs).run(values)
|
||||||
|
|
||||||
|
|
||||||
|
def dataclass_validate(obj: Any, coerce_to_float: bool = True) -> None:
|
||||||
|
"""Ensure that values in a dataclass instance are the correct types."""
|
||||||
|
|
||||||
|
# Simply run an output pass but tell it not to generate data;
|
||||||
|
# only run validation.
|
||||||
|
_Outputter(obj, create=False, coerce_to_float=coerce_to_float).run()
|
||||||
|
|
||||||
|
|
||||||
|
def dataclass_prep(cls: Type, extra_types: Dict[str, Type] = None) -> None:
|
||||||
|
"""Prep a dataclass type for use with this module's functionality.
|
||||||
|
|
||||||
|
Prepping ensures that all types contained in a data class as well as
|
||||||
|
the usage of said types are supported by this module and pre-builds
|
||||||
|
necessary constructs needed for encoding/decoding/etc.
|
||||||
|
|
||||||
|
Prepping will happen on-the-fly as needed, but a warning will be
|
||||||
|
emitted in such cases, as it is better to explicitly prep all used types
|
||||||
|
early in a process to ensure any invalid types or configuration are caught
|
||||||
|
immediately.
|
||||||
|
|
||||||
|
Prepping a dataclass involves evaluating its type annotations, which,
|
||||||
|
as of PEP 563, are stored simply as strings. This evaluation is done
|
||||||
|
in the module namespace containing the class, so all referenced types
|
||||||
|
must be defined at that level. The exception is Typing types (Optional,
|
||||||
|
Union, etc.) which are often defined under an 'if TYPE_CHECKING'
|
||||||
|
conditional and thus not available at runtime, so are explicitly made
|
||||||
|
available during annotation evaluation.
|
||||||
|
"""
|
||||||
|
PrepSession(explicit=True,
|
||||||
|
extra_types=extra_types).prep_dataclass(cls, recursion_level=0)
|
||||||
|
|
||||||
|
|
||||||
|
def prepped(cls: Type[T]) -> Type[T]:
|
||||||
|
"""Class decorator to easily prep a dataclass at definition time.
|
||||||
|
|
||||||
|
Note that in some cases it may not be possible to prep a dataclass
|
||||||
|
immediately (such as when its type annotations refer to forward-declared
|
||||||
|
types). In these cases, dataclass_prep() should be explicitly called for
|
||||||
|
the class once it is safe to do so.
|
||||||
|
"""
|
||||||
|
dataclass_prep(cls)
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class PrepData:
|
||||||
|
"""Data we prepare and cache for a class during prep.
|
||||||
|
|
||||||
|
This data is used as part of the encoding/decoding/validating process.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Resolved annotation data with 'live' classes.
|
||||||
|
annotations: Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
class PrepSession:
|
||||||
|
"""Context for a prep."""
|
||||||
|
|
||||||
|
def __init__(self, explicit: bool, extra_types: Optional[Dict[str, Type]]):
|
||||||
|
self.explicit = explicit
|
||||||
|
self.extra_types = extra_types
|
||||||
|
|
||||||
|
def prep_dataclass(self, cls: Type, recursion_level: int) -> PrepData:
|
||||||
|
"""Run prep on a dataclass if necessary and return its prep data."""
|
||||||
|
|
||||||
|
existing_data = getattr(cls, PREP_ATTR, None)
|
||||||
|
if existing_data is not None:
|
||||||
|
assert isinstance(existing_data, PrepData)
|
||||||
|
return existing_data
|
||||||
|
|
||||||
|
# If we run into classes containing themselves, we may have
|
||||||
|
# to do something smarter to handle it.
|
||||||
|
if recursion_level > MAX_RECURSION:
|
||||||
|
raise RuntimeError('Max recursion exceeded.')
|
||||||
|
|
||||||
|
# We should only be passed classes which are dataclasses.
|
||||||
|
if not isinstance(cls, type) or not dataclasses.is_dataclass(cls):
|
||||||
|
raise TypeError(f'Passed arg {cls} is not a dataclass type.')
|
||||||
|
|
||||||
|
# Generate a warning on non-explicit preps; we prefer prep to
|
||||||
|
# happen explicitly at runtime so errors can be detected early on.
|
||||||
|
if not self.explicit:
|
||||||
|
logging.warning(
|
||||||
|
'efro.dataclassio: implicitly prepping dataclass: %s.'
|
||||||
|
' It is highly recommended to explicitly prep dataclasses'
|
||||||
|
' as soon as possible after definition (via'
|
||||||
|
' efro.dataclassio.dataclass_prep() or the'
|
||||||
|
' @efro.dataclassio.prepped decorator).', cls)
|
||||||
|
|
||||||
|
localns: Dict[str, Any] = {
|
||||||
|
'Optional': typing.Optional,
|
||||||
|
'Union': typing.Union,
|
||||||
|
'List': typing.List,
|
||||||
|
'Tuple': typing.Tuple,
|
||||||
|
'Sequence': typing.Sequence,
|
||||||
|
'Set': typing.Set,
|
||||||
|
'Any': typing.Any,
|
||||||
|
'Dict': typing.Dict,
|
||||||
|
}
|
||||||
|
if self.extra_types is not None:
|
||||||
|
localns.update(self.extra_types)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Use default globalns which should be the class' module,
|
||||||
|
# but provide our own locals to cover things like typing.*
|
||||||
|
# which are generally not actually present at runtime for us.
|
||||||
|
resolved_annotations = get_type_hints(cls, localns=localns)
|
||||||
|
except Exception as exc:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'Dataclass prep failed with error: {exc}.') from exc
|
||||||
|
|
||||||
|
# Ok; we've resolved actual types for this dataclass.
|
||||||
|
# now recurse through them, verifying that we support all contained
|
||||||
|
# types and prepping any contained dataclass types.
|
||||||
|
for attrname, attrtype in resolved_annotations.items():
|
||||||
|
self.prep_type(cls,
|
||||||
|
attrname,
|
||||||
|
attrtype,
|
||||||
|
recursion_level=recursion_level + 1)
|
||||||
|
|
||||||
|
# Success! Store our resolved stuff with the class and we're done.
|
||||||
|
prepdata = PrepData(annotations=resolved_annotations)
|
||||||
|
setattr(cls, PREP_ATTR, prepdata)
|
||||||
|
return prepdata
|
||||||
|
|
||||||
|
def prep_type(self, cls: Type, attrname: str, anntype: Any,
|
||||||
|
recursion_level: int) -> None:
|
||||||
|
"""Run prep on a dataclass."""
|
||||||
|
# pylint: disable=too-many-return-statements
|
||||||
|
# pylint: disable=too-many-branches
|
||||||
|
|
||||||
|
# If we run into classes containing themselves, we may have
|
||||||
|
# to do something smarter to handle it.
|
||||||
|
if recursion_level > MAX_RECURSION:
|
||||||
|
raise RuntimeError('Max recursion exceeded.')
|
||||||
|
|
||||||
|
origin = _get_origin(anntype)
|
||||||
|
|
||||||
|
if origin is typing.Union:
|
||||||
|
self.prep_union(cls,
|
||||||
|
attrname,
|
||||||
|
anntype,
|
||||||
|
recursion_level=recursion_level + 1)
|
||||||
|
return
|
||||||
|
|
||||||
|
if anntype is typing.Any:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Everything below this point assumes the annotation type resolves
|
||||||
|
# to a concrete type.
|
||||||
|
if not isinstance(origin, type):
|
||||||
|
raise TypeError(
|
||||||
|
f'Unsupported type found for \'{attrname}\' on {cls}:'
|
||||||
|
f' {anntype}')
|
||||||
|
|
||||||
|
if origin in SIMPLE_TYPES:
|
||||||
|
return
|
||||||
|
|
||||||
|
# For sets and lists, check out their single contained type (if any).
|
||||||
|
if origin in (list, set):
|
||||||
|
childtypes = typing.get_args(anntype)
|
||||||
|
if len(childtypes) == 0:
|
||||||
|
# This is equivalent to Any; nothing else needs checking.
|
||||||
|
return
|
||||||
|
if len(childtypes) > 1:
|
||||||
|
raise TypeError(
|
||||||
|
f'Unrecognized typing arg count {len(childtypes)}'
|
||||||
|
f" for {anntype} attr '{attrname}' on {cls}")
|
||||||
|
self.prep_type(cls,
|
||||||
|
attrname,
|
||||||
|
childtypes[0],
|
||||||
|
recursion_level=recursion_level + 1)
|
||||||
|
return
|
||||||
|
|
||||||
|
if origin is dict:
|
||||||
|
childtypes = typing.get_args(anntype)
|
||||||
|
assert len(childtypes) in (0, 2)
|
||||||
|
|
||||||
|
# For key types we support Any, str, int,
|
||||||
|
# and Enums with uniform str/int values.
|
||||||
|
if not childtypes or childtypes[0] is typing.Any:
|
||||||
|
# 'Any' needs no further checks (just checked per-instance).
|
||||||
|
pass
|
||||||
|
elif childtypes[0] in (str, int):
|
||||||
|
# str and int are all good as keys.
|
||||||
|
pass
|
||||||
|
elif issubclass(childtypes[0], Enum):
|
||||||
|
# Allow our usual str or int enum types as keys.
|
||||||
|
self.prep_enum(childtypes[0])
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f'Dict key type {childtypes[0]} for \'{attrname}\''
|
||||||
|
f' on {cls} is not supported by dataclassio.')
|
||||||
|
|
||||||
|
# For value types we support any of our normal types.
|
||||||
|
if not childtypes or _get_origin(childtypes[1]) is typing.Any:
|
||||||
|
# 'Any' needs no further checks (just checked per-instance).
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self.prep_type(cls,
|
||||||
|
attrname,
|
||||||
|
childtypes[1],
|
||||||
|
recursion_level=recursion_level + 1)
|
||||||
|
return
|
||||||
|
|
||||||
|
if issubclass(origin, Enum):
|
||||||
|
self.prep_enum(origin)
|
||||||
|
return
|
||||||
|
|
||||||
|
if dataclasses.is_dataclass(origin):
|
||||||
|
self.prep_dataclass(origin, recursion_level=recursion_level + 1)
|
||||||
|
return
|
||||||
|
|
||||||
|
raise TypeError(f"Attr '{attrname}' on {cls} contains type '{anntype}'"
|
||||||
|
f' which is not supported by dataclassio.')
|
||||||
|
|
||||||
|
def prep_union(self, cls: Type, attrname: str, anntype: Any,
|
||||||
|
recursion_level: int) -> None:
|
||||||
|
"""Run prep on a Union type."""
|
||||||
|
typeargs = typing.get_args(anntype)
|
||||||
|
if (len(typeargs) != 2
|
||||||
|
or len([c for c in typeargs if c is type(None)]) != 1):
|
||||||
|
raise TypeError(f'Union {anntype} for attr \'{attrname}\' on'
|
||||||
|
f' {cls} is not supported by dataclassio;'
|
||||||
|
f' only 2 member Unions with one type being None'
|
||||||
|
f' are supported.')
|
||||||
|
for childtype in typeargs:
|
||||||
|
self.prep_type(cls,
|
||||||
|
attrname,
|
||||||
|
childtype,
|
||||||
|
recursion_level=recursion_level + 1)
|
||||||
|
|
||||||
|
def prep_enum(self, enumtype: Type[Enum]) -> None:
|
||||||
|
"""Run prep on an enum type."""
|
||||||
|
|
||||||
|
valtype: Any = None
|
||||||
|
|
||||||
|
# We currently support enums with str or int values; fail if we
|
||||||
|
# find any others.
|
||||||
|
for enumval in enumtype:
|
||||||
|
if not isinstance(enumval.value, (str, int)):
|
||||||
|
raise TypeError(f'Enum value {enumval} has value type'
|
||||||
|
f' {type(enumval.value)}; only str and int is'
|
||||||
|
f' supported by dataclassio.')
|
||||||
|
if valtype is None:
|
||||||
|
valtype = type(enumval.value)
|
||||||
|
else:
|
||||||
|
if type(enumval.value) is not valtype:
|
||||||
|
raise TypeError(f'Enum type {enumtype} has multiple'
|
||||||
|
f' value types; dataclassio requires'
|
||||||
|
f' them to be uniform.')
|
||||||
|
|
||||||
|
|
||||||
|
def _is_valid_json(obj: Any) -> bool:
|
||||||
|
"""Return whether a value consists solely of json-supported types.
|
||||||
|
|
||||||
|
Note that this does not include things like tuples which are
|
||||||
|
implicitly translated to lists by python's json module.
|
||||||
|
"""
|
||||||
|
if obj is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
objtype = type(obj)
|
||||||
|
if objtype in (int, float, str, bool):
|
||||||
|
return True
|
||||||
|
if objtype is dict:
|
||||||
|
# JSON 'objects' supports only string dict keys, but all value types.
|
||||||
|
return all(
|
||||||
|
type(k) is str and _is_valid_json(v) for k, v in obj.items())
|
||||||
|
if objtype is list:
|
||||||
|
return all(_is_valid_json(elem) for elem in obj)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _raise_type_error(fieldpath: str, valuetype: Type,
|
||||||
|
expected: Tuple[Type, ...]) -> None:
|
||||||
|
"""Raise an error when a field value's type does not match expected."""
|
||||||
|
assert isinstance(expected, tuple)
|
||||||
|
assert all(isinstance(e, type) for e in expected)
|
||||||
|
if len(expected) == 1:
|
||||||
|
expected_str = expected[0].__name__
|
||||||
|
else:
|
||||||
|
names = ', '.join(t.__name__ for t in expected)
|
||||||
|
expected_str = f'Union[{names}]'
|
||||||
|
raise TypeError(f'Invalid value type for "{fieldpath}";'
|
||||||
|
f' expected "{expected_str}", got'
|
||||||
|
f' "{valuetype.__name__}".')
|
||||||
|
|
||||||
|
|
||||||
|
def _get_origin(anntype: Any) -> Any:
|
||||||
|
"""Given a type annotation, return its origin or itself if there is none.
|
||||||
|
|
||||||
|
This differs from typing.get_origin in that it will never return None.
|
||||||
|
This lets us use the same code path for handling typing.List
|
||||||
|
that we do for handling list, which is good since they can be used
|
||||||
|
interchangeably in annotations.
|
||||||
|
"""
|
||||||
|
origin = typing.get_origin(anntype)
|
||||||
|
return anntype if origin is None else origin
|
||||||
|
|
||||||
|
|
||||||
|
class _Outputter:
|
||||||
|
"""Validates or exports data contained in a dataclass instance."""
|
||||||
|
|
||||||
|
def __init__(self, obj: Any, create: bool, coerce_to_float: bool) -> None:
|
||||||
|
self._obj = obj
|
||||||
|
self._create = create
|
||||||
|
self._coerce_to_float = coerce_to_float
|
||||||
|
|
||||||
|
def run(self) -> Any:
|
||||||
|
"""Do the thing."""
|
||||||
|
return self._process_dataclass(type(self._obj), self._obj, '')
|
||||||
|
|
||||||
|
def _process_dataclass(self, cls: Type, obj: Any, fieldpath: str) -> Any:
|
||||||
|
prep = PrepSession(explicit=False,
|
||||||
|
extra_types=None).prep_dataclass(type(obj),
|
||||||
|
recursion_level=0)
|
||||||
|
fields = dataclasses.fields(obj)
|
||||||
|
out: Optional[Dict[str, Any]] = {} if self._create else None
|
||||||
|
for field in fields:
|
||||||
|
fieldname = field.name
|
||||||
|
if fieldpath:
|
||||||
|
subfieldpath = f'{fieldpath}.{fieldname}'
|
||||||
|
else:
|
||||||
|
subfieldpath = fieldname
|
||||||
|
fieldtype = prep.annotations[fieldname]
|
||||||
|
value = getattr(obj, fieldname)
|
||||||
|
outvalue = self._process_value(cls, subfieldpath, fieldtype, value)
|
||||||
|
if self._create:
|
||||||
|
assert out is not None
|
||||||
|
out[fieldname] = outvalue
|
||||||
|
|
||||||
|
# If there's extra-attrs stored on us, check/include them.
|
||||||
|
extra_attrs = getattr(obj, EXTRA_ATTRS_ATTR, None)
|
||||||
|
if isinstance(extra_attrs, dict):
|
||||||
|
if not _is_valid_json(extra_attrs):
|
||||||
|
raise TypeError(
|
||||||
|
f'Extra attrs on {fieldpath} contains data type(s)'
|
||||||
|
f' not supported by json.')
|
||||||
|
if self._create:
|
||||||
|
assert out is not None
|
||||||
|
out.update(extra_attrs)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _process_value(self, cls: Type, fieldpath: str, anntype: Any,
|
||||||
|
value: Any) -> Any:
|
||||||
|
# pylint: disable=too-many-return-statements
|
||||||
|
# pylint: disable=too-many-branches
|
||||||
|
|
||||||
|
origin = _get_origin(anntype)
|
||||||
|
|
||||||
|
if origin is typing.Any:
|
||||||
|
if not _is_valid_json(value):
|
||||||
|
raise TypeError(f'Invalid value type for \'{fieldpath}\';'
|
||||||
|
f" 'Any' typed values must be types directly"
|
||||||
|
f' supported by json; got'
|
||||||
|
f" '{type(value).__name__}'.")
|
||||||
|
return value if self._create else None
|
||||||
|
|
||||||
|
if origin is typing.Union:
|
||||||
|
# Currently the only unions we support are None/Value
|
||||||
|
# (translated from Optional), which we verified on prep.
|
||||||
|
# So let's treat this as a simple optional case.
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
childanntypes_l = [
|
||||||
|
c for c in typing.get_args(anntype) if c is not type(None)
|
||||||
|
]
|
||||||
|
assert len(childanntypes_l) == 1
|
||||||
|
return self._process_value(cls, fieldpath, childanntypes_l[0],
|
||||||
|
value)
|
||||||
|
|
||||||
|
# Everything below this point assumes the annotation type resolves
|
||||||
|
# to a concrete type. (This should have been verified at prep time).
|
||||||
|
assert isinstance(origin, type)
|
||||||
|
|
||||||
|
# For simple flat types, look for exact matches:
|
||||||
|
if origin in SIMPLE_TYPES:
|
||||||
|
if type(value) is not origin:
|
||||||
|
# Special case: if they want to coerce ints to floats, do so.
|
||||||
|
if (self._coerce_to_float and origin is float
|
||||||
|
and type(value) is int):
|
||||||
|
return float(value) if self._create else None
|
||||||
|
_raise_type_error(fieldpath, type(value), (origin, ))
|
||||||
|
return value if self._create else None
|
||||||
|
|
||||||
|
if origin is list:
|
||||||
|
if not isinstance(value, list):
|
||||||
|
raise TypeError(f'Expected a list for {fieldpath};'
|
||||||
|
f' found a {type(value)}')
|
||||||
|
childanntypes = typing.get_args(anntype)
|
||||||
|
|
||||||
|
# 'Any' type children; make sure they are valid json values.
|
||||||
|
if len(childanntypes) == 0 or childanntypes[0] is typing.Any:
|
||||||
|
for i, child in enumerate(value):
|
||||||
|
if not _is_valid_json(child):
|
||||||
|
raise TypeError(
|
||||||
|
f'Item {i} of {fieldpath} contains'
|
||||||
|
f' data type(s) not supported by json.')
|
||||||
|
# Hmm; should we do a copy here?
|
||||||
|
return value if self._create else None
|
||||||
|
|
||||||
|
# We contain elements of some specified type.
|
||||||
|
assert len(childanntypes) == 1
|
||||||
|
if self._create:
|
||||||
|
return [
|
||||||
|
self._process_value(cls, fieldpath, childanntypes[0], x)
|
||||||
|
for x in value
|
||||||
|
]
|
||||||
|
for x in value:
|
||||||
|
self._process_value(cls, fieldpath, childanntypes[0], x)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if origin is set:
|
||||||
|
if not isinstance(value, set):
|
||||||
|
raise TypeError(f'Expected a set for {fieldpath};'
|
||||||
|
f' found a {type(value)}')
|
||||||
|
childanntypes = typing.get_args(anntype)
|
||||||
|
|
||||||
|
# 'Any' type children; make sure they are valid Any values.
|
||||||
|
if len(childanntypes) == 0 or childanntypes[0] is typing.Any:
|
||||||
|
for child in value:
|
||||||
|
if not _is_valid_json(child):
|
||||||
|
raise TypeError(
|
||||||
|
f'Set at {fieldpath} contains'
|
||||||
|
f' data type(s) not supported by json.')
|
||||||
|
return list(value) if self._create else None
|
||||||
|
|
||||||
|
# We contain elements of some specified type.
|
||||||
|
assert len(childanntypes) == 1
|
||||||
|
if self._create:
|
||||||
|
# Note: we output json-friendly values so this becomes
|
||||||
|
# a list.
|
||||||
|
return [
|
||||||
|
self._process_value(cls, fieldpath, childanntypes[0], x)
|
||||||
|
for x in value
|
||||||
|
]
|
||||||
|
for x in value:
|
||||||
|
self._process_value(cls, fieldpath, childanntypes[0], x)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if origin is dict:
|
||||||
|
return self._process_dict(cls, fieldpath, anntype, value)
|
||||||
|
|
||||||
|
if dataclasses.is_dataclass(origin):
|
||||||
|
if not isinstance(value, origin):
|
||||||
|
raise TypeError(f'Expected a {origin} for {fieldpath};'
|
||||||
|
f' found a {type(value)}.')
|
||||||
|
return self._process_dataclass(cls, value, fieldpath)
|
||||||
|
|
||||||
|
if issubclass(origin, Enum):
|
||||||
|
if not isinstance(value, origin):
|
||||||
|
raise TypeError(f'Expected a {origin} for {fieldpath};'
|
||||||
|
f' found a {type(value)}.')
|
||||||
|
# At prep-time we verified that these enums had valid value
|
||||||
|
# types, so we can blindly return it here.
|
||||||
|
return value.value if self._create else None
|
||||||
|
|
||||||
|
raise TypeError(
|
||||||
|
f"Field '{fieldpath}' of type '{anntype}' is unsupported here.")
|
||||||
|
|
||||||
|
def _process_dict(self, cls: Type, fieldpath: str, anntype: Any,
|
||||||
|
value: dict) -> Any:
|
||||||
|
# pylint: disable=too-many-branches
|
||||||
|
if not isinstance(value, dict):
|
||||||
|
raise TypeError(f'Expected a dict for {fieldpath};'
|
||||||
|
f' found a {type(value)}.')
|
||||||
|
childtypes = typing.get_args(anntype)
|
||||||
|
assert len(childtypes) in (0, 2)
|
||||||
|
|
||||||
|
# We treat 'Any' dicts simply as json; we don't do any translating.
|
||||||
|
if not childtypes or childtypes[0] is typing.Any:
|
||||||
|
if not isinstance(value, dict) or not _is_valid_json(value):
|
||||||
|
raise TypeError(
|
||||||
|
f'Invalid value for Dict[Any, Any]'
|
||||||
|
f' at \'{fieldpath}\' on {cls}; all keys and values'
|
||||||
|
f' must be json-compatible when dict type is Any.')
|
||||||
|
return value if self._create else None
|
||||||
|
|
||||||
|
# Ok; we've got a definite key type (which we verified as valid
|
||||||
|
# during prep). Make sure all keys match it.
|
||||||
|
out: Optional[Dict] = {} if self._create else None
|
||||||
|
keyanntype, valanntype = childtypes
|
||||||
|
|
||||||
|
# str keys we just export directly since that's supported by json.
|
||||||
|
if keyanntype is str:
|
||||||
|
for key, val in value.items():
|
||||||
|
if not isinstance(key, str):
|
||||||
|
raise TypeError(f'Got invalid key type {type(key)} for'
|
||||||
|
f' dict key at \'{fieldpath}\' on {cls};'
|
||||||
|
f' expected {keyanntype}.')
|
||||||
|
outval = self._process_value(cls, fieldpath, valanntype, val)
|
||||||
|
if self._create:
|
||||||
|
assert out is not None
|
||||||
|
out[key] = outval
|
||||||
|
|
||||||
|
# int keys are stored in json as str versions of themselves.
|
||||||
|
elif keyanntype is int:
|
||||||
|
for key, val in value.items():
|
||||||
|
if not isinstance(key, int):
|
||||||
|
raise TypeError(f'Got invalid key type {type(key)} for'
|
||||||
|
f' dict key at \'{fieldpath}\' on {cls};'
|
||||||
|
f' expected an int.')
|
||||||
|
outval = self._process_value(cls, fieldpath, valanntype, val)
|
||||||
|
if self._create:
|
||||||
|
assert out is not None
|
||||||
|
out[str(key)] = outval
|
||||||
|
|
||||||
|
elif issubclass(keyanntype, Enum):
|
||||||
|
for key, val in value.items():
|
||||||
|
if not isinstance(key, keyanntype):
|
||||||
|
raise TypeError(f'Got invalid key type {type(key)} for'
|
||||||
|
f' dict key at \'{fieldpath}\' on {cls};'
|
||||||
|
f' expected a {keyanntype}.')
|
||||||
|
outval = self._process_value(cls, fieldpath, valanntype, val)
|
||||||
|
if self._create:
|
||||||
|
assert out is not None
|
||||||
|
out[str(key.value)] = outval
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f'Unhandled dict out-key-type {keyanntype}')
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
class _Inputter(Generic[T]):
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
cls: Type[T],
|
||||||
|
coerce_to_float: bool,
|
||||||
|
allow_unknown_attrs: bool = True,
|
||||||
|
discard_unknown_attrs: bool = False):
|
||||||
|
self._cls = cls
|
||||||
|
self._coerce_to_float = coerce_to_float
|
||||||
|
self._allow_unknown_attrs = allow_unknown_attrs
|
||||||
|
self._discard_unknown_attrs = discard_unknown_attrs
|
||||||
|
|
||||||
|
if not allow_unknown_attrs and discard_unknown_attrs:
|
||||||
|
raise ValueError('discard_unknown_attrs cannot be True'
|
||||||
|
' when allow_unknown_attrs is False.')
|
||||||
|
|
||||||
|
def run(self, values: dict) -> T:
|
||||||
|
"""Do the thing."""
|
||||||
|
out = self._dataclass_from_input(self._cls, '', values)
|
||||||
|
assert isinstance(out, self._cls)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _value_from_input(self, cls: Type, fieldpath: str, anntype: Any,
|
||||||
|
value: Any) -> Any:
|
||||||
|
"""Convert an assigned value to what a dataclass field expects."""
|
||||||
|
# pylint: disable=too-many-return-statements
|
||||||
|
|
||||||
|
origin = _get_origin(anntype)
|
||||||
|
|
||||||
|
if origin is typing.Any:
|
||||||
|
if not _is_valid_json(value):
|
||||||
|
raise TypeError(f'Invalid value type for \'{fieldpath}\';'
|
||||||
|
f' \'Any\' typed values must be types directly'
|
||||||
|
f' supported by json; got'
|
||||||
|
f' \'{type(value).__name__}\'.')
|
||||||
|
return value
|
||||||
|
|
||||||
|
if origin is typing.Union:
|
||||||
|
# Currently the only unions we support are None/Value
|
||||||
|
# (translated from Optional), which we verified on prep.
|
||||||
|
# So let's treat this as a simple optional case.
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
childanntypes_l = [
|
||||||
|
c for c in typing.get_args(anntype) if c is not type(None)
|
||||||
|
]
|
||||||
|
assert len(childanntypes_l) == 1
|
||||||
|
return self._value_from_input(cls, fieldpath, childanntypes_l[0],
|
||||||
|
value)
|
||||||
|
|
||||||
|
# Everything below this point assumes the annotation type resolves
|
||||||
|
# to a concrete type. (This should have been verified at prep time).
|
||||||
|
assert isinstance(origin, type)
|
||||||
|
|
||||||
|
if origin in SIMPLE_TYPES:
|
||||||
|
if type(value) is not origin:
|
||||||
|
# Special case: if they want to coerce ints to floats, do so.
|
||||||
|
if (self._coerce_to_float and origin is float
|
||||||
|
and type(value) is int):
|
||||||
|
return float(value)
|
||||||
|
_raise_type_error(fieldpath, type(value), (origin, ))
|
||||||
|
return value
|
||||||
|
|
||||||
|
if origin in {list, set}:
|
||||||
|
return self._sequence_from_input(cls, fieldpath, anntype, value,
|
||||||
|
origin)
|
||||||
|
|
||||||
|
if origin is dict:
|
||||||
|
return self._dict_from_input(cls, fieldpath, anntype, value)
|
||||||
|
|
||||||
|
if dataclasses.is_dataclass(origin):
|
||||||
|
return self._dataclass_from_input(origin, fieldpath, value)
|
||||||
|
|
||||||
|
if issubclass(origin, Enum):
|
||||||
|
return enum_by_value(origin, value)
|
||||||
|
|
||||||
|
raise TypeError(
|
||||||
|
f"Field '{fieldpath}' of type '{anntype}' is unsupported here.")
|
||||||
|
|
||||||
|
def _dataclass_from_input(self, cls: Type, fieldpath: str,
|
||||||
|
values: dict) -> Any:
|
||||||
|
"""Given a dict, instantiates a dataclass of the given type.
|
||||||
|
|
||||||
|
The dict must be in the json-friendly format as emitted from
|
||||||
|
dataclass_to_dict. This means that sequence values such as tuples or
|
||||||
|
sets should be passed as lists, enums should be passed as their
|
||||||
|
associated values, and nested dataclasses should be passed as dicts.
|
||||||
|
"""
|
||||||
|
# pylint: disable=too-many-locals
|
||||||
|
if not isinstance(values, dict):
|
||||||
|
raise TypeError("Expected a dict for 'values' arg.")
|
||||||
|
|
||||||
|
prep = PrepSession(explicit=False,
|
||||||
|
extra_types=None).prep_dataclass(cls,
|
||||||
|
recursion_level=0)
|
||||||
|
|
||||||
|
extra_attrs = {}
|
||||||
|
|
||||||
|
# noinspection PyDataclass
|
||||||
|
fields = dataclasses.fields(cls)
|
||||||
|
fields_by_name = {f.name: f for f in fields}
|
||||||
|
args: Dict[str, Any] = {}
|
||||||
|
for key, value in values.items():
|
||||||
|
field = fields_by_name.get(key)
|
||||||
|
if field is None:
|
||||||
|
if self._allow_unknown_attrs:
|
||||||
|
if self._discard_unknown_attrs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Treat this like 'Any' data; ensure that it is valid
|
||||||
|
# raw json.
|
||||||
|
if not _is_valid_json(value):
|
||||||
|
raise TypeError(
|
||||||
|
f'Unknown attr {key}'
|
||||||
|
f' on {fieldpath} contains data type(s)'
|
||||||
|
f' not supported by json.')
|
||||||
|
extra_attrs[key] = value
|
||||||
|
else:
|
||||||
|
raise AttributeError(
|
||||||
|
f"'{cls.__name__}' has no '{key}' field.")
|
||||||
|
else:
|
||||||
|
fieldname = field.name
|
||||||
|
fieldtype = prep.annotations[fieldname]
|
||||||
|
subfieldpath = (f'{fieldpath}.{fieldname}'
|
||||||
|
if fieldpath else fieldname)
|
||||||
|
args[key] = self._value_from_input(cls, subfieldpath,
|
||||||
|
fieldtype, value)
|
||||||
|
out = cls(**args)
|
||||||
|
if extra_attrs:
|
||||||
|
setattr(out, EXTRA_ATTRS_ATTR, extra_attrs)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _dict_from_input(self, cls: Type, fieldpath: str, anntype: Any,
|
||||||
|
value: Any) -> Any:
|
||||||
|
# pylint: disable=too-many-branches
|
||||||
|
|
||||||
|
if not isinstance(value, dict):
|
||||||
|
raise TypeError(f'Expected a dict for \'{fieldpath}\' on {cls};'
|
||||||
|
f' got a {type(value)}.')
|
||||||
|
|
||||||
|
childtypes = typing.get_args(anntype)
|
||||||
|
assert len(childtypes) in (0, 2)
|
||||||
|
|
||||||
|
out: Dict
|
||||||
|
|
||||||
|
# We treat 'Any' dicts simply as json; we don't do any translating.
|
||||||
|
if not childtypes or childtypes[0] is typing.Any:
|
||||||
|
if not isinstance(value, dict) or not _is_valid_json(value):
|
||||||
|
raise TypeError(f'Got invalid value for Dict[Any, Any]'
|
||||||
|
f' at \'{fieldpath}\' on {cls};'
|
||||||
|
f' all keys and values must be'
|
||||||
|
f' json-compatible.')
|
||||||
|
out = value
|
||||||
|
else:
|
||||||
|
out = {}
|
||||||
|
keyanntype, valanntype = childtypes
|
||||||
|
|
||||||
|
# Ok; we've got definite key/value types (which we verified as
|
||||||
|
# valid during prep). Run all keys/values through it.
|
||||||
|
|
||||||
|
# str keys we just take directly since that's supported by json.
|
||||||
|
if keyanntype is str:
|
||||||
|
for key, val in value.items():
|
||||||
|
if not isinstance(key, str):
|
||||||
|
raise TypeError(
|
||||||
|
f'Got invalid key type {type(key)} for'
|
||||||
|
f' dict key at \'{fieldpath}\' on {cls};'
|
||||||
|
f' expected a str.')
|
||||||
|
out[key] = self._value_from_input(cls, fieldpath,
|
||||||
|
valanntype, val)
|
||||||
|
|
||||||
|
# int keys are stored in json as str versions of themselves.
|
||||||
|
elif keyanntype is int:
|
||||||
|
for key, val in value.items():
|
||||||
|
if not isinstance(key, str):
|
||||||
|
raise TypeError(
|
||||||
|
f'Got invalid key type {type(key)} for'
|
||||||
|
f' dict key at \'{fieldpath}\' on {cls};'
|
||||||
|
f' expected a str.')
|
||||||
|
try:
|
||||||
|
keyint = int(key)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise TypeError(
|
||||||
|
f'Got invalid key value {key} for'
|
||||||
|
f' dict key at \'{fieldpath}\' on {cls};'
|
||||||
|
f' expected an int in string form.') from exc
|
||||||
|
out[keyint] = self._value_from_input(
|
||||||
|
cls, fieldpath, valanntype, val)
|
||||||
|
|
||||||
|
elif issubclass(keyanntype, Enum):
|
||||||
|
# In prep we verified that all these enums' values have
|
||||||
|
# the same type, so we can just look at the first to see if
|
||||||
|
# this is a string enum or an int enum.
|
||||||
|
enumvaltype = type(next(iter(keyanntype)).value)
|
||||||
|
assert enumvaltype in (int, str)
|
||||||
|
if enumvaltype is str:
|
||||||
|
for key, val in value.items():
|
||||||
|
try:
|
||||||
|
enumval = enum_by_value(keyanntype, key)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise ValueError(
|
||||||
|
f'Got invalid key value {key} for'
|
||||||
|
f' dict key at \'{fieldpath}\' on {cls};'
|
||||||
|
f' expected a value corresponding to'
|
||||||
|
f' a {keyanntype}.') from exc
|
||||||
|
out[enumval] = self._value_from_input(
|
||||||
|
cls, fieldpath, valanntype, val)
|
||||||
|
else:
|
||||||
|
for key, val in value.items():
|
||||||
|
try:
|
||||||
|
enumval = enum_by_value(keyanntype, int(key))
|
||||||
|
except (ValueError, TypeError) as exc:
|
||||||
|
raise ValueError(
|
||||||
|
f'Got invalid key value {key} for'
|
||||||
|
f' dict key at \'{fieldpath}\' on {cls};'
|
||||||
|
f' expected {keyanntype} value (though'
|
||||||
|
f' in string form).') from exc
|
||||||
|
out[enumval] = self._value_from_input(
|
||||||
|
cls, fieldpath, valanntype, val)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f'Unhandled dict in-key-type {keyanntype}')
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _sequence_from_input(self, cls: Type, fieldpath: str, anntype: Any,
|
||||||
|
value: Any, seqtype: Type) -> Any:
|
||||||
|
|
||||||
|
# Because we are json-centric, we expect a list for all sequences.
|
||||||
|
if type(value) is not list:
|
||||||
|
raise TypeError(f'Invalid input value for "{fieldpath}";'
|
||||||
|
f' expected a list, got a {type(value).__name__}')
|
||||||
|
|
||||||
|
childanntypes = typing.get_args(anntype)
|
||||||
|
|
||||||
|
# 'Any' type children; make sure they are valid json values
|
||||||
|
# and then just grab them.
|
||||||
|
if len(childanntypes) == 0 or childanntypes[0] is typing.Any:
|
||||||
|
for i, child in enumerate(value):
|
||||||
|
if not _is_valid_json(child):
|
||||||
|
raise TypeError(f'Item {i} of {fieldpath} contains'
|
||||||
|
f' data type(s) not supported by json.')
|
||||||
|
return value if type(value) is seqtype else seqtype(value)
|
||||||
|
|
||||||
|
# We contain elements of some specified type.
|
||||||
|
assert len(childanntypes) == 1
|
||||||
|
childanntype = childanntypes[0]
|
||||||
|
return seqtype(
|
||||||
|
self._value_from_input(cls, fieldpath, childanntype, i)
|
||||||
|
for i in value)
|
||||||
Loading…
x
Reference in New Issue
Block a user