mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-01-24 07:53:30 +08:00
Merge branch 'respawn_dec' of https://github.com/3alTemp/ballistica into respawn_dec
This commit is contained in:
commit
08f5979742
118
.efrocachemap
generated
118
.efrocachemap
generated
@ -421,42 +421,42 @@
|
||||
"build/assets/ba_data/audio/zoeOw.ogg": "74befe45a8417e95b6a2233c51992a26",
|
||||
"build/assets/ba_data/audio/zoePickup01.ogg": "48ab8cddfcde36a750856f3f81dd20c8",
|
||||
"build/assets/ba_data/audio/zoeScream01.ogg": "2b468aedfa8741090247f04eb9e6df55",
|
||||
"build/assets/ba_data/data/langdata.json": "750e45f2f19a94a44703e3d31b9a8e96",
|
||||
"build/assets/ba_data/data/langdata.json": "5273cf3bfe2d25d70395690bf3c21825",
|
||||
"build/assets/ba_data/data/languages/arabic.json": "0db32e21b6d5337ccca478381744aa88",
|
||||
"build/assets/ba_data/data/languages/belarussian.json": "a112dfca3e188387516788bd8229c5b0",
|
||||
"build/assets/ba_data/data/languages/belarussian.json": "09954e550d13d3d9cb5a635a1d32a151",
|
||||
"build/assets/ba_data/data/languages/chinese.json": "1360ffde06828b63ce4fe956c3c3cd1d",
|
||||
"build/assets/ba_data/data/languages/chinesetraditional.json": "319565f8a15667488f48dbce59278e39",
|
||||
"build/assets/ba_data/data/languages/croatian.json": "766532c67af5bd0144c2d63cab0516fa",
|
||||
"build/assets/ba_data/data/languages/croatian.json": "e671b9d0c012be1a30f9c15eb1b81860",
|
||||
"build/assets/ba_data/data/languages/czech.json": "7171420af6d662e3a47b64576850a384",
|
||||
"build/assets/ba_data/data/languages/danish.json": "3fd69080783d5c9dcc0af737f02b6f1e",
|
||||
"build/assets/ba_data/data/languages/danish.json": "8e57db30c5250df2abff14a822f83ea7",
|
||||
"build/assets/ba_data/data/languages/dutch.json": "b0900d572c9141897d53d6574c471343",
|
||||
"build/assets/ba_data/data/languages/english.json": "1c4037fea1066d39d6eced419f314f35",
|
||||
"build/assets/ba_data/data/languages/english.json": "28a1c17925aba4f4f908732e5e5cb266",
|
||||
"build/assets/ba_data/data/languages/esperanto.json": "0e397cfa5f3fb8cef5f4a64f21cda880",
|
||||
"build/assets/ba_data/data/languages/filipino.json": "43e838754fe013b8bac75f75aef78cb3",
|
||||
"build/assets/ba_data/data/languages/filipino.json": "fe3f1efcb47efaa23524300d21728933",
|
||||
"build/assets/ba_data/data/languages/french.json": "cc8ac601f5443dd539893728db983f5c",
|
||||
"build/assets/ba_data/data/languages/german.json": "450fa41ae264f29a5d1af22143d0d0ad",
|
||||
"build/assets/ba_data/data/languages/gibberish.json": "b461539243e8efe3137137b886256ba7",
|
||||
"build/assets/ba_data/data/languages/gibberish.json": "ab9571486f703b8d57eab61dbf1d54d8",
|
||||
"build/assets/ba_data/data/languages/greek.json": "287c0ec437b38772284ef9d3e4fb2fc3",
|
||||
"build/assets/ba_data/data/languages/hindi.json": "5b6c8e988ffa84a7e26d120b6cd8e1a4",
|
||||
"build/assets/ba_data/data/languages/hindi.json": "90f54663e15d85a163f1848a8e9d8d07",
|
||||
"build/assets/ba_data/data/languages/hungarian.json": "796a290a8c44a1e7635208c2ff5fdc6e",
|
||||
"build/assets/ba_data/data/languages/indonesian.json": "9103845242b572aa8ba48e24f81ddb68",
|
||||
"build/assets/ba_data/data/languages/italian.json": "f550810b6866ea9bcf1985b7228f8cff",
|
||||
"build/assets/ba_data/data/languages/korean.json": "4e3524327a0174250aff5e1ef4c0c597",
|
||||
"build/assets/ba_data/data/languages/malay.json": "832562ce997fc70704b9234c95fb2e38",
|
||||
"build/assets/ba_data/data/languages/persian.json": "9728d631cf7d9ad3b209ae1244bb59c0",
|
||||
"build/assets/ba_data/data/languages/polish.json": "3a90b2d9e2c59305580c96f8098fc839",
|
||||
"build/assets/ba_data/data/languages/persian.json": "1a4c74ad9089cd746ad6fda4186c2220",
|
||||
"build/assets/ba_data/data/languages/polish.json": "9d22c6643c097c4cb268d0d6b6319cd4",
|
||||
"build/assets/ba_data/data/languages/portuguese.json": "b52164747c6308fc9d054eb6c0ff3c54",
|
||||
"build/assets/ba_data/data/languages/romanian.json": "aeebdd54f65939c2facc6ac50c117826",
|
||||
"build/assets/ba_data/data/languages/romanian.json": "b3e46efd6f869dbd78014570e037c290",
|
||||
"build/assets/ba_data/data/languages/russian.json": "30d5f3d2415088e1fb6558fcd6ccfa98",
|
||||
"build/assets/ba_data/data/languages/serbian.json": "d7452dd72ac0e51680cb39b5ebaa1c69",
|
||||
"build/assets/ba_data/data/languages/slovak.json": "27962d53dc3f7dd4e877cd40faafeeef",
|
||||
"build/assets/ba_data/data/languages/slovak.json": "c00fb27cf982ffad5a4370ad3b16bd21",
|
||||
"build/assets/ba_data/data/languages/spanish.json": "e3e9ac8f96f52302a480c7e955aed71f",
|
||||
"build/assets/ba_data/data/languages/swedish.json": "5142a96597d17d8344be96a603da64ac",
|
||||
"build/assets/ba_data/data/languages/tamil.json": "b4de1a2851afe4869c82e9acd94cd89c",
|
||||
"build/assets/ba_data/data/languages/thai.json": "9c425b420f0488a7f883da98947657ad",
|
||||
"build/assets/ba_data/data/languages/tamil.json": "b9fcc523639f55e05c7f4e7914f3321a",
|
||||
"build/assets/ba_data/data/languages/thai.json": "1d665629361f302693dead39de8fa945",
|
||||
"build/assets/ba_data/data/languages/turkish.json": "2be25c89ca754341f27750e0d595f31e",
|
||||
"build/assets/ba_data/data/languages/ukrainian.json": "b54a38e93deebafa5706ba2d1f626892",
|
||||
"build/assets/ba_data/data/languages/venetian.json": "8e9714d98a85e428ce3543fc49188a46",
|
||||
"build/assets/ba_data/data/languages/venetian.json": "f896fc3df13a42f1bef8813ca80b1a09",
|
||||
"build/assets/ba_data/data/languages/vietnamese.json": "921cd1e50f60fe3e101f246e172750ba",
|
||||
"build/assets/ba_data/data/maps/big_g.json": "1dd301d490643088a435ce75df971054",
|
||||
"build/assets/ba_data/data/maps/bridgit.json": "6aea74805f4880cc11237c5734a24422",
|
||||
@ -4060,50 +4060,50 @@
|
||||
"build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1",
|
||||
"build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae",
|
||||
"build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599",
|
||||
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "26eea64d4509875c9a88da74f49e675c",
|
||||
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "0a39319a89364641f3bb0598821b4288",
|
||||
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "84567063607be0227ef779027e12d19d",
|
||||
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "f4458855192dedd13a28d36dc3962890",
|
||||
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "4c0679b0157c2dd63519e5225d99359d",
|
||||
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "335a3f06dc6dd361d6122fd9143124ae",
|
||||
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "041a300c9fa99c82395e1ebc66e81fe3",
|
||||
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "181145bf30e752991860acd0e44f972c",
|
||||
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "8531542c35242bcbffc0309cef10b2b8",
|
||||
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "48cdebbdea839f6b8fc8f5cb69d7f961",
|
||||
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "159003daac99048702c74120be565bad",
|
||||
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "51c9582a1efaae50e1c435c13c390855",
|
||||
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "d66c11ebe6d9035ea7e86b362f8505a1",
|
||||
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "1f8113ffba1d000120bf83ac268c603b",
|
||||
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "6f2a68c0370061a2913278d97b039ecc",
|
||||
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "471e7f81fac96b4db752c5cdaeed7168",
|
||||
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "94916e80a9d7bc7801db666beceea026",
|
||||
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "1bc098ae93dd18143fb64ae5cbc33c19",
|
||||
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "da99cef03f12a6ff2c0065f4616262f2",
|
||||
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "14b67157a3bf57b9de067089476f79d5",
|
||||
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "8709ad96140d71760c2f493ee8bd7c43",
|
||||
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "ee829cd5488e9750570dc6f602d65589",
|
||||
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "8709ad96140d71760c2f493ee8bd7c43",
|
||||
"build/prefab/lib/linux_arm64_server/release/libballisticaplus.a": "ee829cd5488e9750570dc6f602d65589",
|
||||
"build/prefab/lib/linux_x86_64_gui/debug/libballisticaplus.a": "35fe69d96c154b97b534711dae9d8d3a",
|
||||
"build/prefab/lib/linux_x86_64_gui/release/libballisticaplus.a": "2db876e543b3e93128ec421ea5cbb011",
|
||||
"build/prefab/lib/linux_x86_64_server/debug/libballisticaplus.a": "35fe69d96c154b97b534711dae9d8d3a",
|
||||
"build/prefab/lib/linux_x86_64_server/release/libballisticaplus.a": "2db876e543b3e93128ec421ea5cbb011",
|
||||
"build/prefab/lib/mac_arm64_gui/debug/libballisticaplus.a": "417ea0f30d203d5de0e235550fcd7ab8",
|
||||
"build/prefab/lib/mac_arm64_gui/release/libballisticaplus.a": "72d071e977c88454d0623c4a9fb34361",
|
||||
"build/prefab/lib/mac_arm64_server/debug/libballisticaplus.a": "417ea0f30d203d5de0e235550fcd7ab8",
|
||||
"build/prefab/lib/mac_arm64_server/release/libballisticaplus.a": "72d071e977c88454d0623c4a9fb34361",
|
||||
"build/prefab/lib/mac_x86_64_gui/debug/libballisticaplus.a": "de1b228d95c47a7c296a853778715326",
|
||||
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "79117cbfdf695298e1d9ae997d990c4d",
|
||||
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "984f0990a8e4cca29a382d70e51cc051",
|
||||
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "79117cbfdf695298e1d9ae997d990c4d",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "97a0aee0716397c0394c620b0cdc8cfa",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "5edf5fd129429079b24368da6c792c44",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "e453446a36102733a1f0db636fafb704",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "dfb843bbc924daf7a2e2a2eb6b4811df",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "09bb45bcbfad7c0f63b9494ceca669cc",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "c8d10517d61dc5c4d7c94a5eccecab4a",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "4944d18bb54894b0488cbdaa7b2ef06f",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "d17c4758367051e734601018b081f786",
|
||||
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "a7161a4100172e2bb42b838a9851c353",
|
||||
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "9a63e694db2ed7536374c58a45ce65d3",
|
||||
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "9668ef38ddc59fadf323cf460c8b692c",
|
||||
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "8f58837d238dba248ae2e23e20bc3f06",
|
||||
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "f8a6e20f3fffd198494adfba4e884588",
|
||||
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "255205c95d519a594041fc239e435883",
|
||||
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "36facf256b69a8c0037370e23c82470d",
|
||||
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "d78aef348baf274f476ce9e344b80122",
|
||||
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "f1b9732cc7e7728dcedc39a55d9afea2",
|
||||
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "8cfc0e04c10a315cce91dae041dfc3ff",
|
||||
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "a9fec1930c851f8ed743b08669df2d75",
|
||||
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "a6fcaa9d7eb10412787e4416f3536bb9",
|
||||
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "a39230df064404a3b1dd18a644f2f6d6",
|
||||
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "04e971f62a000383a13eb021e30afa7b",
|
||||
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "a93fe4f0cfb3c2c9061df049068230ac",
|
||||
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "0f08a84bb09589991faaca9250171e3c",
|
||||
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "12c079e62d0125b8a24b16e418405ba9",
|
||||
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "eb0d76fd3be03082572b0d835df05252",
|
||||
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "e4268ef0b50e94747081ee83666d80ab",
|
||||
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "f4646fecfed11f5e2b2ee5c892b2940a",
|
||||
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "ee36a39fd0f524989cb68930c89c8868",
|
||||
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "dbed9145e5db116d92aa47cb9e98da39",
|
||||
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "ee36a39fd0f524989cb68930c89c8868",
|
||||
"build/prefab/lib/linux_arm64_server/release/libballisticaplus.a": "dbed9145e5db116d92aa47cb9e98da39",
|
||||
"build/prefab/lib/linux_x86_64_gui/debug/libballisticaplus.a": "dc078f11a4e93062adc7d210fd4f08fb",
|
||||
"build/prefab/lib/linux_x86_64_gui/release/libballisticaplus.a": "a74bea3380d0fb39f78ac7b7598c1a72",
|
||||
"build/prefab/lib/linux_x86_64_server/debug/libballisticaplus.a": "dc078f11a4e93062adc7d210fd4f08fb",
|
||||
"build/prefab/lib/linux_x86_64_server/release/libballisticaplus.a": "a74bea3380d0fb39f78ac7b7598c1a72",
|
||||
"build/prefab/lib/mac_arm64_gui/debug/libballisticaplus.a": "b397e020f33132c4dd2280cb1222cd14",
|
||||
"build/prefab/lib/mac_arm64_gui/release/libballisticaplus.a": "ff0cb4db976707d25bd401bce80a4882",
|
||||
"build/prefab/lib/mac_arm64_server/debug/libballisticaplus.a": "b397e020f33132c4dd2280cb1222cd14",
|
||||
"build/prefab/lib/mac_arm64_server/release/libballisticaplus.a": "ff0cb4db976707d25bd401bce80a4882",
|
||||
"build/prefab/lib/mac_x86_64_gui/debug/libballisticaplus.a": "c464accef921df1325459bdd10c59b84",
|
||||
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "0896e849885cef50bcf33ce863efa7d2",
|
||||
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "e53c808357cc0a2f0da7b870be147083",
|
||||
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "0896e849885cef50bcf33ce863efa7d2",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "f53601899c23c90c2b7e65836c805d8e",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "f31b348a7612e5fa3a968f3cc81cefcd",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "b8339779a2571b169f9d63c11aa7dfa3",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "511bc23565e830778d5ff183a201579d",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "b02faf2aa2df1de233a0549295e6b0ed",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "2e07aaa6d445caf3b33d79dc40bd2475",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "00f50fb4a3a9bbecd1b1188b78abae4b",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "88fb67cb3f3752f0b0db1d583f90490d",
|
||||
"src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c",
|
||||
"src/assets/ba_data/python/babase/_mgen/enums.py": "b611c090513a21e2fe90e56582724e9d",
|
||||
"src/ballistica/base/mgen/pyembed/binding_base.inc": "72bfed2cce8ff19741989dec28302f3f",
|
||||
|
||||
15
.github/ISSUE_TEMPLATE/bug_report.md
vendored
15
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -9,7 +9,7 @@ assignees: ''
|
||||
|
||||
### Description
|
||||
Describe the bug. Do not forget to fill the title.
|
||||
Make sure you're running game without any modifications (unless you want to report an api bug).
|
||||
Make sure you're running game without any modifications.
|
||||
|
||||
### Steps to reproduce
|
||||
1. Launch BombSquad
|
||||
@ -18,16 +18,17 @@ Make sure you're running game without any modifications (unless you want to repo
|
||||
4. Bug!
|
||||
|
||||
### Expected behavior
|
||||
Describe what you think should happen.
|
||||
Describe what you think should happen if it's not obvious.
|
||||
|
||||
### Machine
|
||||
**Platform**: Windows 10 / Ubuntu 20.04 LTS / AOSP 8.1 / etc.
|
||||
**BombSquad version**: [1.5.27](https://github.com/efroemling/ballistica/releases/tag/v1.5.27)
|
||||
**Commit**: [2642488](https://github.com/efroemling/ballistica/commit/2642488a51b250752169738f5aeeccaafa2bc8de)
|
||||
Select what do you want to use: release version or commit. Please use a hyperlink.
|
||||
**Platform**: Windows 11 / Ubuntu 22.04 LTS / Android 12 / MyToasterOS 7.3 / ... \
|
||||
**BombSquad version**: [1.7.32](https://github.com/efroemling/ballistica/tree/v1.7.32) \
|
||||
**Commit**: https://github.com/efroemling/ballistica/tree/978f32f9f098bd0ff1dc64b496ec31cf493ded09
|
||||
|
||||
You may specify BombSquad version you're running or refer to the latest commit.
|
||||
|
||||
### Screenshots
|
||||
Put some screenshots here if needed.
|
||||
|
||||
### Extra
|
||||
Put some extra information here. For example, describe your assumptions about the cause of the bug.
|
||||
You may put some extra information here. For example, describe your assumptions about the cause of the bug.
|
||||
|
||||
13
CHANGELOG.md
13
CHANGELOG.md
@ -1,4 +1,4 @@
|
||||
### 1.7.33 (build 21762, api 8, 2024-01-24)
|
||||
### 1.7.33 (build 21770, api 8, 2024-03-01)
|
||||
- Stress test input-devices are now a bit smarter; they won't press any buttons
|
||||
while UIs are up (this could cause lots of chaos if it happened).
|
||||
- Added a 'Show Demos When Idle' option in advanced settings. If enabled, the
|
||||
@ -21,7 +21,16 @@
|
||||
catch problems where a base class changes or removes a method and child
|
||||
classes forget to adapt to the change.
|
||||
- Respawn icons now have dotted steps showing decimal progress to assist
|
||||
players on calculating when they are gonna respawn. (Thanks Temp!)
|
||||
players on calculating when they are gonna respawn. (Thanks 3alTemp!)
|
||||
- Custom spaz "curse_time" values now work properly. (Thanks Temp!)
|
||||
- Implemented `efro.dataclassio.IOMultiType` which will make my life a lot
|
||||
easier.
|
||||
- Punches no longer physically affect powerup boxes which should make it easier
|
||||
to grab the powerup (Thanks VinniTR!).
|
||||
- The 'Manual' party tab now supports entering IPv6 addresses (Thanks
|
||||
brostos!).
|
||||
- Fixes a bug where Meteor Shower could make the game-end bell sound twice
|
||||
(Thanks 3alTemp!).
|
||||
|
||||
### 1.7.32 (build 21741, api 8, 2023-12-20)
|
||||
- Fixed a screen message that no one will ever see (Thanks vishal332008?...)
|
||||
|
||||
@ -52,4 +52,7 @@
|
||||
- Created the original "reject_recently_left_players" plugin
|
||||
|
||||
### Temp (3alTemp)
|
||||
- Modder & Bug Fixer
|
||||
- Modder & Bug Fixer
|
||||
|
||||
### brostos
|
||||
- Added support for joining using ipv6 address
|
||||
|
||||
5
Makefile
5
Makefile
@ -49,7 +49,7 @@ endif
|
||||
# Prereq targets that should be safe to run anytime; even if project-files
|
||||
# are out of date.
|
||||
PREREQS_SAFE = .cache/checkenv $(PCOMMANDBATCHBIN) .dir-locals.el .mypy.ini \
|
||||
.pyrightconfig.json .pycheckers .pylintrc .style.yapf .clang-format \
|
||||
.pyrightconfig.json .pylintrc .style.yapf .clang-format \
|
||||
ballisticakit-cmake/.clang-format .editorconfig
|
||||
|
||||
# Prereq targets that may break if the project needs updating should go here.
|
||||
@ -1216,9 +1216,6 @@ ENV_SRC = $(PCOMMAND) tools/batools/build.py
|
||||
.pyrightconfig.json: config/toolconfigsrc/pyrightconfig.yaml $(TOOL_CFG_SRC)
|
||||
@$(TOOL_CFG_INST) $< $@
|
||||
|
||||
.pycheckers: config/toolconfigsrc/pycheckers $(TOOL_CFG_SRC)
|
||||
@$(TOOL_CFG_INST) $< $@
|
||||
|
||||
# Set this to 1 to skip environment checks.
|
||||
SKIP_ENV_CHECKS ?= 0
|
||||
|
||||
|
||||
@ -288,14 +288,12 @@ class DirectoryScan:
|
||||
) -> None:
|
||||
"""Scan provided path and add module entries to provided list."""
|
||||
try:
|
||||
# Special case: let's save some time and skip the whole 'babase'
|
||||
# package since we know it doesn't contain any meta tags.
|
||||
fullpath = Path(path, subpath)
|
||||
# Note: skipping hidden dirs (starting with '.').
|
||||
entries = [
|
||||
(path, Path(subpath, name))
|
||||
for name in os.listdir(fullpath)
|
||||
# Actually scratch that for now; trying to avoid special cases.
|
||||
# if name != 'babase'
|
||||
if not name.startswith('.')
|
||||
]
|
||||
except PermissionError:
|
||||
# Expected sometimes.
|
||||
|
||||
@ -52,7 +52,7 @@ if TYPE_CHECKING:
|
||||
|
||||
# Build number and version of the ballistica binary we expect to be
|
||||
# using.
|
||||
TARGET_BALLISTICA_BUILD = 21762
|
||||
TARGET_BALLISTICA_BUILD = 21770
|
||||
TARGET_BALLISTICA_VERSION = '1.7.33'
|
||||
|
||||
|
||||
@ -287,9 +287,9 @@ def _setup_certs(contains_python_dist: bool) -> None:
|
||||
import certifi
|
||||
|
||||
# Let both OpenSSL and requests (if present) know to use this.
|
||||
os.environ['SSL_CERT_FILE'] = os.environ[
|
||||
'REQUESTS_CA_BUNDLE'
|
||||
] = certifi.where()
|
||||
os.environ['SSL_CERT_FILE'] = os.environ['REQUESTS_CA_BUNDLE'] = (
|
||||
certifi.where()
|
||||
)
|
||||
|
||||
|
||||
def _setup_paths(
|
||||
|
||||
@ -256,9 +256,7 @@ class Map(Actor):
|
||||
return (
|
||||
None
|
||||
if val is None
|
||||
else babase.vec3validate(val)
|
||||
if __debug__
|
||||
else val
|
||||
else babase.vec3validate(val) if __debug__ else val
|
||||
)
|
||||
|
||||
def get_def_points(self, name: str) -> list[Sequence[float]]:
|
||||
@ -334,8 +332,7 @@ class Map(Actor):
|
||||
closest_player_dist = 9999.0
|
||||
for ppt in player_pts:
|
||||
dist = (ppt - testpt).length()
|
||||
if dist < closest_player_dist:
|
||||
closest_player_dist = dist
|
||||
closest_player_dist = min(dist, closest_player_dist)
|
||||
if closest_player_dist > farthestpt_dist:
|
||||
farthestpt_dist = closest_player_dist
|
||||
farthestpt = testpt
|
||||
|
||||
@ -13,7 +13,7 @@ from typing_extensions import override
|
||||
import bascenev1 as bs
|
||||
|
||||
from bascenev1lib.actor.bomb import Bomb, Blast
|
||||
from bascenev1lib.actor.powerupbox import PowerupBoxFactory
|
||||
from bascenev1lib.actor.powerupbox import PowerupBoxFactory, PowerupBox
|
||||
from bascenev1lib.actor.spazfactory import SpazFactory
|
||||
from bascenev1lib.gameutils import SharedObjects
|
||||
|
||||
@ -629,7 +629,8 @@ class Spaz(bs.Actor):
|
||||
1000.0 * (tval + self.curse_time)
|
||||
)
|
||||
self._curse_timer = bs.Timer(
|
||||
5.0, bs.WeakCall(self.handlemessage, CurseExplodeMessage())
|
||||
self.curse_time,
|
||||
bs.WeakCall(self.handlemessage, CurseExplodeMessage()),
|
||||
)
|
||||
|
||||
def equip_boxing_gloves(self) -> None:
|
||||
@ -1227,6 +1228,10 @@ class Spaz(bs.Actor):
|
||||
return None
|
||||
node = bs.getcollision().opposingnode
|
||||
|
||||
# Don't want to physically affect powerups.
|
||||
if node.getdelegate(PowerupBox):
|
||||
return None
|
||||
|
||||
# Only allow one hit per node per punch.
|
||||
if node and (node not in self._punched_nodes):
|
||||
punch_momentum_angular = (
|
||||
|
||||
@ -73,6 +73,7 @@ class MeteorShowerGame(bs.TeamGameActivity[Player, Team]):
|
||||
self._last_player_death_time: float | None = None
|
||||
self._meteor_time = 2.0
|
||||
self._timer: OnScreenTimer | None = None
|
||||
self._ended: bool = False
|
||||
|
||||
# Some base class overrides:
|
||||
self.default_music = (
|
||||
@ -161,6 +162,10 @@ class MeteorShowerGame(bs.TeamGameActivity[Player, Team]):
|
||||
return None
|
||||
|
||||
def _check_end_game(self) -> None:
|
||||
# We don't want to end this activity more than once.
|
||||
if self._ended:
|
||||
return
|
||||
|
||||
living_team_count = 0
|
||||
for team in self.teams:
|
||||
for player in team.players:
|
||||
@ -270,4 +275,5 @@ class MeteorShowerGame(bs.TeamGameActivity[Player, Team]):
|
||||
# Submit the score value in milliseconds.
|
||||
results.set_team_score(team, int(1000.0 * longest_life))
|
||||
|
||||
self._ended = True
|
||||
self.end(results=results)
|
||||
|
||||
@ -90,9 +90,7 @@ class CoopBrowserWindow(bui.Window):
|
||||
self._height = (
|
||||
657
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 730
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 800
|
||||
else 730 if uiscale is bui.UIScale.MEDIUM else 800
|
||||
)
|
||||
app.ui_v1.set_main_menu_location('Coop Select')
|
||||
self._r = 'coopSelectWindow'
|
||||
@ -104,6 +102,19 @@ class CoopBrowserWindow(bui.Window):
|
||||
'campaignDifficulty', 'easy'
|
||||
)
|
||||
|
||||
if (
|
||||
self._campaign_difficulty == 'hard'
|
||||
and not app.classic.accounts.have_pro_options()
|
||||
):
|
||||
plus.add_v1_account_transaction(
|
||||
{
|
||||
'type': 'SET_MISC_VAL',
|
||||
'name': 'campaignDifficulty',
|
||||
'value': 'easy',
|
||||
}
|
||||
)
|
||||
self._campaign_difficulty = 'easy'
|
||||
|
||||
super().__init__(
|
||||
root_widget=bui.containerwidget(
|
||||
size=(self._width, self._height + top_extra),
|
||||
@ -112,17 +123,13 @@ class CoopBrowserWindow(bui.Window):
|
||||
stack_offset=(
|
||||
(0, -15)
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else (0, 0)
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else (0, 0)
|
||||
else (0, 0) if uiscale is bui.UIScale.MEDIUM else (0, 0)
|
||||
),
|
||||
transition=transition,
|
||||
scale=(
|
||||
1.2
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 0.8
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 0.75
|
||||
else 0.8 if uiscale is bui.UIScale.MEDIUM else 0.75
|
||||
),
|
||||
)
|
||||
)
|
||||
@ -271,9 +278,11 @@ class CoopBrowserWindow(bui.Window):
|
||||
self._scrollwidget = bui.scrollwidget(
|
||||
parent=self._root_widget,
|
||||
highlight=False,
|
||||
position=(65 + x_inset, 120)
|
||||
if uiscale is bui.UIScale.SMALL and app.ui_v1.use_toolbars
|
||||
else (65 + x_inset, 70),
|
||||
position=(
|
||||
(65 + x_inset, 120)
|
||||
if uiscale is bui.UIScale.SMALL and app.ui_v1.use_toolbars
|
||||
else (65 + x_inset, 70)
|
||||
),
|
||||
size=(self._scroll_width, self._scroll_height),
|
||||
simple_culling_v=10.0,
|
||||
claims_left_right=True,
|
||||
@ -421,12 +430,14 @@ class CoopBrowserWindow(bui.Window):
|
||||
if tbtn.time_remaining_value_text is not None:
|
||||
bui.textwidget(
|
||||
edit=tbtn.time_remaining_value_text,
|
||||
text=bui.timestring(tbtn.time_remaining, centi=False)
|
||||
if (
|
||||
tbtn.has_time_remaining
|
||||
and self._tourney_data_up_to_date
|
||||
)
|
||||
else '-',
|
||||
text=(
|
||||
bui.timestring(tbtn.time_remaining, centi=False)
|
||||
if (
|
||||
tbtn.has_time_remaining
|
||||
and self._tourney_data_up_to_date
|
||||
)
|
||||
else '-'
|
||||
),
|
||||
)
|
||||
|
||||
# Also adjust the ad icon visibility.
|
||||
@ -447,9 +458,9 @@ class CoopBrowserWindow(bui.Window):
|
||||
try:
|
||||
bui.imagewidget(
|
||||
edit=self._hard_button_lock_image,
|
||||
opacity=0.0
|
||||
if bui.app.classic.accounts.have_pro_options()
|
||||
else 1.0,
|
||||
opacity=(
|
||||
0.0 if bui.app.classic.accounts.have_pro_options() else 1.0
|
||||
),
|
||||
)
|
||||
except Exception:
|
||||
logging.exception('Error updating campaign lock.')
|
||||
@ -559,12 +570,16 @@ class CoopBrowserWindow(bui.Window):
|
||||
enable_sound=False,
|
||||
on_activate_call=bui.Call(self._set_campaign_difficulty, 'easy'),
|
||||
on_select_call=bui.Call(self.sel_change, 'campaign', 'easyButton'),
|
||||
color=sel_color
|
||||
if self._campaign_difficulty == 'easy'
|
||||
else un_sel_color,
|
||||
textcolor=sel_textcolor
|
||||
if self._campaign_difficulty == 'easy'
|
||||
else un_sel_textcolor,
|
||||
color=(
|
||||
sel_color
|
||||
if self._campaign_difficulty == 'easy'
|
||||
else un_sel_color
|
||||
),
|
||||
textcolor=(
|
||||
sel_textcolor
|
||||
if self._campaign_difficulty == 'easy'
|
||||
else un_sel_textcolor
|
||||
),
|
||||
)
|
||||
bui.widget(edit=self._easy_button, show_buffer_left=100)
|
||||
if self._selected_campaign_level == 'easyButton':
|
||||
@ -585,12 +600,16 @@ class CoopBrowserWindow(bui.Window):
|
||||
enable_sound=False,
|
||||
on_activate_call=bui.Call(self._set_campaign_difficulty, 'hard'),
|
||||
on_select_call=bui.Call(self.sel_change, 'campaign', 'hardButton'),
|
||||
color=sel_color_hard
|
||||
if self._campaign_difficulty == 'hard'
|
||||
else un_sel_color,
|
||||
textcolor=sel_textcolor
|
||||
if self._campaign_difficulty == 'hard'
|
||||
else un_sel_textcolor,
|
||||
color=(
|
||||
sel_color_hard
|
||||
if self._campaign_difficulty == 'hard'
|
||||
else un_sel_color
|
||||
),
|
||||
textcolor=(
|
||||
sel_textcolor
|
||||
if self._campaign_difficulty == 'hard'
|
||||
else un_sel_textcolor
|
||||
),
|
||||
)
|
||||
self._hard_button_lock_image = bui.imagewidget(
|
||||
parent=parent_widget,
|
||||
@ -960,35 +979,43 @@ class CoopBrowserWindow(bui.Window):
|
||||
for i, tbutton in enumerate(self._tournament_buttons):
|
||||
bui.widget(
|
||||
edit=tbutton.button,
|
||||
up_widget=self._tournament_info_button
|
||||
if i == 0
|
||||
else self._tournament_buttons[i - 1].button,
|
||||
down_widget=self._tournament_buttons[(i + 1)].button
|
||||
if i + 1 < len(self._tournament_buttons)
|
||||
else custom_h_scroll,
|
||||
up_widget=(
|
||||
self._tournament_info_button
|
||||
if i == 0
|
||||
else self._tournament_buttons[i - 1].button
|
||||
),
|
||||
down_widget=(
|
||||
self._tournament_buttons[(i + 1)].button
|
||||
if i + 1 < len(self._tournament_buttons)
|
||||
else custom_h_scroll
|
||||
),
|
||||
)
|
||||
bui.widget(
|
||||
edit=tbutton.more_scores_button,
|
||||
down_widget=self._tournament_buttons[
|
||||
(i + 1)
|
||||
].current_leader_name_text
|
||||
if i + 1 < len(self._tournament_buttons)
|
||||
else custom_h_scroll,
|
||||
down_widget=(
|
||||
self._tournament_buttons[(i + 1)].current_leader_name_text
|
||||
if i + 1 < len(self._tournament_buttons)
|
||||
else custom_h_scroll
|
||||
),
|
||||
)
|
||||
bui.widget(
|
||||
edit=tbutton.current_leader_name_text,
|
||||
up_widget=self._tournament_info_button
|
||||
if i == 0
|
||||
else self._tournament_buttons[i - 1].more_scores_button,
|
||||
up_widget=(
|
||||
self._tournament_info_button
|
||||
if i == 0
|
||||
else self._tournament_buttons[i - 1].more_scores_button
|
||||
),
|
||||
)
|
||||
|
||||
for btn in self._custom_buttons:
|
||||
try:
|
||||
bui.widget(
|
||||
edit=btn.get_button(),
|
||||
up_widget=tournament_h_scroll
|
||||
if self._tournament_buttons
|
||||
else self._tournament_info_button,
|
||||
up_widget=(
|
||||
tournament_h_scroll
|
||||
if self._tournament_buttons
|
||||
else self._tournament_info_button
|
||||
),
|
||||
)
|
||||
except Exception:
|
||||
logging.exception('Error wiring up custom buttons.')
|
||||
@ -1042,8 +1069,9 @@ class CoopBrowserWindow(bui.Window):
|
||||
|
||||
def _switch_to_score(
|
||||
self,
|
||||
show_tab: StoreBrowserWindow.TabID
|
||||
| None = StoreBrowserWindow.TabID.EXTRAS,
|
||||
show_tab: (
|
||||
StoreBrowserWindow.TabID | None
|
||||
) = StoreBrowserWindow.TabID.EXTRAS,
|
||||
) -> None:
|
||||
# pylint: disable=cyclic-import
|
||||
from bauiv1lib.account import show_sign_in_prompt
|
||||
|
||||
@ -48,7 +48,10 @@ class _HostLookupThread(Thread):
|
||||
try:
|
||||
import socket
|
||||
|
||||
result = socket.gethostbyname(self._name)
|
||||
result = [
|
||||
item[-1][0]
|
||||
for item in socket.getaddrinfo(self.name, self._port)
|
||||
][0]
|
||||
except Exception:
|
||||
result = None
|
||||
bui.pushcall(
|
||||
@ -212,15 +215,19 @@ class ManualGatherTab(GatherTab):
|
||||
inactive_color = (0.5, 0.4, 0.5)
|
||||
bui.textwidget(
|
||||
edit=self._join_by_address_text,
|
||||
color=active_color
|
||||
if value is SubTabType.JOIN_BY_ADDRESS
|
||||
else inactive_color,
|
||||
color=(
|
||||
active_color
|
||||
if value is SubTabType.JOIN_BY_ADDRESS
|
||||
else inactive_color
|
||||
),
|
||||
)
|
||||
bui.textwidget(
|
||||
edit=self._favorites_text,
|
||||
color=active_color
|
||||
if value is SubTabType.FAVORITES
|
||||
else inactive_color,
|
||||
color=(
|
||||
active_color
|
||||
if value is SubTabType.FAVORITES
|
||||
else inactive_color
|
||||
),
|
||||
)
|
||||
|
||||
# Clear anything existing in the old sub-tab.
|
||||
@ -354,9 +361,7 @@ class ManualGatherTab(GatherTab):
|
||||
self._height = (
|
||||
578
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 670
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 800
|
||||
else 670 if uiscale is bui.UIScale.MEDIUM else 800
|
||||
)
|
||||
|
||||
self._scroll_width = self._width - 130 + 2 * x_inset
|
||||
@ -375,16 +380,12 @@ class ManualGatherTab(GatherTab):
|
||||
b_height = (
|
||||
107
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 142
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 190
|
||||
else 142 if uiscale is bui.UIScale.MEDIUM else 190
|
||||
)
|
||||
b_space_extra = (
|
||||
0
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else -2
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else -5
|
||||
else -2 if uiscale is bui.UIScale.MEDIUM else -5
|
||||
)
|
||||
|
||||
btnv = (
|
||||
@ -392,9 +393,7 @@ class ManualGatherTab(GatherTab):
|
||||
- (
|
||||
48
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 45
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 40
|
||||
else 45 if uiscale is bui.UIScale.MEDIUM else 40
|
||||
)
|
||||
- b_height
|
||||
)
|
||||
@ -513,9 +512,7 @@ class ManualGatherTab(GatherTab):
|
||||
scale=(
|
||||
1.8
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 1.55
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 1.0
|
||||
else 1.55 if uiscale is bui.UIScale.MEDIUM else 1.0
|
||||
),
|
||||
size=(c_width, c_height),
|
||||
transition='in_scale',
|
||||
|
||||
@ -52,9 +52,7 @@ class PlaylistCustomizeBrowserWindow(bui.Window):
|
||||
self._height = (
|
||||
380.0
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 420.0
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 500.0
|
||||
else 420.0 if uiscale is bui.UIScale.MEDIUM else 500.0
|
||||
)
|
||||
top_extra = 20.0 if uiscale is bui.UIScale.SMALL else 0.0
|
||||
|
||||
@ -66,13 +64,11 @@ class PlaylistCustomizeBrowserWindow(bui.Window):
|
||||
scale=(
|
||||
2.05
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 1.5
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 1.0
|
||||
else 1.5 if uiscale is bui.UIScale.MEDIUM else 1.0
|
||||
),
|
||||
stack_offset=(
|
||||
(0, -10) if uiscale is bui.UIScale.SMALL else (0, 0)
|
||||
),
|
||||
stack_offset=(0, -10)
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else (0, 0),
|
||||
)
|
||||
)
|
||||
|
||||
@ -118,9 +114,7 @@ class PlaylistCustomizeBrowserWindow(bui.Window):
|
||||
scl = (
|
||||
1.1
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 1.27
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 1.57
|
||||
else 1.27 if uiscale is bui.UIScale.MEDIUM else 1.57
|
||||
)
|
||||
scl *= 0.63
|
||||
v -= 65.0 * scl
|
||||
@ -285,9 +279,11 @@ class PlaylistCustomizeBrowserWindow(bui.Window):
|
||||
bui.widget(
|
||||
edit=scrollwidget,
|
||||
left_widget=new_button,
|
||||
right_widget=bui.get_special_widget('party_button')
|
||||
if bui.app.ui_v1.use_toolbars
|
||||
else None,
|
||||
right_widget=(
|
||||
bui.get_special_widget('party_button')
|
||||
if bui.app.ui_v1.use_toolbars
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
# make sure config exists
|
||||
@ -329,9 +325,9 @@ class PlaylistCustomizeBrowserWindow(bui.Window):
|
||||
|
||||
if self._selected_playlist_name is not None:
|
||||
cfg = bui.app.config
|
||||
cfg[
|
||||
self._pvars.config_name + ' Playlist Selection'
|
||||
] = self._selected_playlist_name
|
||||
cfg[self._pvars.config_name + ' Playlist Selection'] = (
|
||||
self._selected_playlist_name
|
||||
)
|
||||
cfg.commit()
|
||||
|
||||
bui.containerwidget(
|
||||
@ -408,9 +404,11 @@ class PlaylistCustomizeBrowserWindow(bui.Window):
|
||||
text=self._get_playlist_display_name(pname),
|
||||
h_align='left',
|
||||
v_align='center',
|
||||
color=(0.6, 0.6, 0.7, 1.0)
|
||||
if pname == '__default__'
|
||||
else (0.85, 0.85, 0.85, 1),
|
||||
color=(
|
||||
(0.6, 0.6, 0.7, 1.0)
|
||||
if pname == '__default__'
|
||||
else (0.85, 0.85, 0.85, 1)
|
||||
),
|
||||
always_highlight=True,
|
||||
on_select_call=bui.Call(self._select, pname, index),
|
||||
on_activate_call=bui.Call(self._edit_button.activate),
|
||||
@ -458,12 +456,12 @@ class PlaylistCustomizeBrowserWindow(bui.Window):
|
||||
# if we want and also lets us pass it to the game (since we reset
|
||||
# the whole python environment that's not actually easy).
|
||||
cfg = bui.app.config
|
||||
cfg[
|
||||
self._pvars.config_name + ' Playlist Selection'
|
||||
] = self._selected_playlist_name
|
||||
cfg[
|
||||
self._pvars.config_name + ' Playlist Randomize'
|
||||
] = self._do_randomize_val
|
||||
cfg[self._pvars.config_name + ' Playlist Selection'] = (
|
||||
self._selected_playlist_name
|
||||
)
|
||||
cfg[self._pvars.config_name + ' Playlist Randomize'] = (
|
||||
self._do_randomize_val
|
||||
)
|
||||
cfg.commit()
|
||||
|
||||
def _new_playlist(self) -> None:
|
||||
@ -536,12 +534,10 @@ class PlaylistCustomizeBrowserWindow(bui.Window):
|
||||
|
||||
# (we don't use len()-1 here because the default list adds one)
|
||||
assert self._selected_playlist_index is not None
|
||||
if self._selected_playlist_index > len(
|
||||
bui.app.config[self._pvars.config_name + ' Playlists']
|
||||
):
|
||||
self._selected_playlist_index = len(
|
||||
bui.app.config[self._pvars.config_name + ' Playlists']
|
||||
)
|
||||
self._selected_playlist_index = min(
|
||||
self._selected_playlist_index,
|
||||
len(bui.app.config[self._pvars.config_name + ' Playlists']),
|
||||
)
|
||||
self._refresh()
|
||||
|
||||
def _import_playlist(self) -> None:
|
||||
|
||||
@ -43,9 +43,7 @@ class SoundtrackBrowserWindow(bui.Window):
|
||||
self._height = (
|
||||
340
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 370
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 440
|
||||
else 370 if uiscale is bui.UIScale.MEDIUM else 440
|
||||
)
|
||||
spacing = 40.0
|
||||
v = self._height - 40.0
|
||||
@ -60,13 +58,11 @@ class SoundtrackBrowserWindow(bui.Window):
|
||||
scale=(
|
||||
2.3
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 1.6
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 1.0
|
||||
else 1.6 if uiscale is bui.UIScale.MEDIUM else 1.0
|
||||
),
|
||||
stack_offset=(
|
||||
(0, -18) if uiscale is bui.UIScale.SMALL else (0, 0)
|
||||
),
|
||||
stack_offset=(0, -18)
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else (0, 0),
|
||||
)
|
||||
)
|
||||
|
||||
@ -110,9 +106,7 @@ class SoundtrackBrowserWindow(bui.Window):
|
||||
scl = (
|
||||
1.0
|
||||
if uiscale is bui.UIScale.SMALL
|
||||
else 1.13
|
||||
if uiscale is bui.UIScale.MEDIUM
|
||||
else 1.4
|
||||
else 1.13 if uiscale is bui.UIScale.MEDIUM else 1.4
|
||||
)
|
||||
v -= 60.0 * scl
|
||||
self._new_button = btn = bui.buttonwidget(
|
||||
@ -245,9 +239,11 @@ class SoundtrackBrowserWindow(bui.Window):
|
||||
bui.widget(
|
||||
edit=self._scrollwidget,
|
||||
left_widget=self._new_button,
|
||||
right_widget=bui.get_special_widget('party_button')
|
||||
if bui.app.ui_v1.use_toolbars
|
||||
else self._scrollwidget,
|
||||
right_widget=(
|
||||
bui.get_special_widget('party_button')
|
||||
if bui.app.ui_v1.use_toolbars
|
||||
else self._scrollwidget
|
||||
),
|
||||
)
|
||||
self._col = bui.columnwidget(parent=scrollwidget, border=2, margin=0)
|
||||
|
||||
@ -286,8 +282,9 @@ class SoundtrackBrowserWindow(bui.Window):
|
||||
bui.getsound('shieldDown').play()
|
||||
assert self._selected_soundtrack_index is not None
|
||||
assert self._soundtracks is not None
|
||||
if self._selected_soundtrack_index >= len(self._soundtracks):
|
||||
self._selected_soundtrack_index = len(self._soundtracks)
|
||||
self._selected_soundtrack_index = min(
|
||||
self._selected_soundtrack_index, len(self._soundtracks)
|
||||
)
|
||||
self._refresh()
|
||||
|
||||
def _delete_soundtrack(self) -> None:
|
||||
|
||||
@ -14,7 +14,7 @@ Mac:
|
||||
(brew install python3).
|
||||
|
||||
Linux (x86_64):
|
||||
- Server binaries are currently compiled against Ubuntu 20 LTS.
|
||||
- Server binaries are currently compiled against Ubuntu 22 LTS.
|
||||
|
||||
Raspberry Pi:
|
||||
- The server binary was compiled on a Raspberry Pi 4 running Raspbian Buster.
|
||||
|
||||
@ -39,7 +39,7 @@ auto main(int argc, char** argv) -> int {
|
||||
namespace ballistica {
|
||||
|
||||
// These are set automatically via script; don't modify them here.
|
||||
const int kEngineBuildNumber = 21762;
|
||||
const int kEngineBuildNumber = 21770;
|
||||
const char* kEngineVersion = "1.7.33";
|
||||
const int kEngineApiVersion = 8;
|
||||
|
||||
|
||||
@ -5,10 +5,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
import copy
|
||||
import datetime
|
||||
from enum import Enum
|
||||
from dataclasses import field, dataclass
|
||||
from typing import TYPE_CHECKING, Any, Sequence, Annotated
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Sequence,
|
||||
Annotated,
|
||||
assert_type,
|
||||
assert_never,
|
||||
)
|
||||
|
||||
from typing_extensions import override
|
||||
import pytest
|
||||
@ -24,10 +32,11 @@ from efro.dataclassio import (
|
||||
Codec,
|
||||
DataclassFieldLookup,
|
||||
IOExtendedData,
|
||||
IOMultiType,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
from typing import Self
|
||||
|
||||
|
||||
class _EnumTest(Enum):
|
||||
@ -1069,3 +1078,221 @@ def test_soft_default() -> None:
|
||||
todict = dataclass_to_dict(orig)
|
||||
assert todict == {'ival': 2}
|
||||
assert dataclass_from_dict(_TestClassE8, todict) == orig
|
||||
|
||||
|
||||
class MTTestTypeID(Enum):
|
||||
"""IDs for our multi-type class."""
|
||||
|
||||
CLASS_1 = 'm1'
|
||||
CLASS_2 = 'm2'
|
||||
|
||||
|
||||
class MTTestBase(IOMultiType[MTTestTypeID]):
|
||||
"""Our multi-type class.
|
||||
|
||||
These top level multi-type classes are special parent classes
|
||||
that know about all of their child classes and how to serialize
|
||||
& deserialize them using explicit type ids. We can then use the
|
||||
parent class in annotations and dataclassio will do the right thing.
|
||||
Useful for stuff like Message classes where we may want to store a
|
||||
bunch of different types of them into one place.
|
||||
"""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type(cls, type_id: MTTestTypeID) -> type[MTTestBase]:
|
||||
"""Return the subclass for each of our type-ids."""
|
||||
|
||||
# This uses assert_never() to ensure we cover all cases in the
|
||||
# enum. Though this is less efficient than looking up by dict
|
||||
# would be. If we had lots of values we could also support lazy
|
||||
# loading by importing classes only when their value is being
|
||||
# requested.
|
||||
val: type[MTTestBase]
|
||||
if type_id is MTTestTypeID.CLASS_1:
|
||||
val = MTTestClass1
|
||||
elif type_id is MTTestTypeID.CLASS_2:
|
||||
val = MTTestClass2
|
||||
else:
|
||||
assert_never(type_id)
|
||||
return val
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type_id(cls) -> MTTestTypeID:
|
||||
"""Provide the type-id for this subclass."""
|
||||
# If we wanted, we could just maintain a static mapping
|
||||
# of types-to-ids here, but there are benefits to letting
|
||||
# each child class speak for itself. Namely that we can
|
||||
# do lazy-loading and don't need to have all types present
|
||||
# here.
|
||||
|
||||
# So we'll let all our child classes override this.
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass(frozen=True) # Frozen so we can test in set()
|
||||
class MTTestClass1(MTTestBase):
|
||||
"""A test child-class for use with our multi-type class."""
|
||||
|
||||
ival: int
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type_id(cls) -> MTTestTypeID:
|
||||
return MTTestTypeID.CLASS_1
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass(frozen=True) # Frozen so we can test in set()
|
||||
class MTTestClass2(MTTestBase):
|
||||
"""Another test child-class for use with our multi-type class."""
|
||||
|
||||
sval: str
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
def get_type_id(cls) -> MTTestTypeID:
|
||||
return MTTestTypeID.CLASS_2
|
||||
|
||||
|
||||
def test_multi_type() -> None:
|
||||
"""Test IOMultiType stuff."""
|
||||
# pylint: disable=too-many-locals
|
||||
# pylint: disable=too-many-statements
|
||||
|
||||
# Test converting single instances back and forth.
|
||||
val1: MTTestBase = MTTestClass1(ival=123)
|
||||
tpname = MTTestBase.ID_STORAGE_NAME
|
||||
outdict = dataclass_to_dict(val1)
|
||||
assert outdict == {'ival': 123, tpname: 'm1'}
|
||||
val2: MTTestBase = MTTestClass2(sval='whee')
|
||||
outdict2 = dataclass_to_dict(val2)
|
||||
assert outdict2 == {'sval': 'whee', tpname: 'm2'}
|
||||
|
||||
# Make sure types and values work for both concrete types and the
|
||||
# multi-type.
|
||||
assert_type(dataclass_from_dict(MTTestClass1, outdict), MTTestClass1)
|
||||
assert_type(dataclass_from_dict(MTTestBase, outdict), MTTestBase)
|
||||
|
||||
assert dataclass_from_dict(MTTestClass1, outdict) == val1
|
||||
assert dataclass_from_dict(MTTestClass2, outdict2) == val2
|
||||
assert dataclass_from_dict(MTTestBase, outdict) == val1
|
||||
assert dataclass_from_dict(MTTestBase, outdict2) == val2
|
||||
|
||||
# Trying to load as a multi-type should fail if there is no type
|
||||
# value present.
|
||||
outdictmod = copy.deepcopy(outdict)
|
||||
del outdictmod[tpname]
|
||||
with pytest.raises(ValueError):
|
||||
dataclass_from_dict(MTTestBase, outdictmod)
|
||||
|
||||
# However it should work when loading an exact type. This can be
|
||||
# necessary to gracefully upgrade old data to multi-type form.
|
||||
dataclass_from_dict(MTTestClass1, outdictmod)
|
||||
|
||||
# Now test our multi-type embedded in other classes. We should be
|
||||
# able to throw a mix of things in there and have them deserialize
|
||||
# back the types we started with.
|
||||
|
||||
# Individual values:
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TestContainerClass1:
|
||||
obj_a: MTTestBase
|
||||
obj_b: MTTestBase
|
||||
|
||||
container1 = _TestContainerClass1(
|
||||
obj_a=MTTestClass1(234), obj_b=MTTestClass2('987')
|
||||
)
|
||||
outdict = dataclass_to_dict(container1)
|
||||
container1b = dataclass_from_dict(_TestContainerClass1, outdict)
|
||||
assert container1 == container1b
|
||||
|
||||
# Lists:
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TestContainerClass2:
|
||||
objs: list[MTTestBase]
|
||||
|
||||
container2 = _TestContainerClass2(
|
||||
objs=[MTTestClass1(111), MTTestClass2('bbb')]
|
||||
)
|
||||
outdict = dataclass_to_dict(container2)
|
||||
container2b = dataclass_from_dict(_TestContainerClass2, outdict)
|
||||
assert container2 == container2b
|
||||
|
||||
# Dict values:
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TestContainerClass3:
|
||||
objs: dict[int, MTTestBase]
|
||||
|
||||
container3 = _TestContainerClass3(
|
||||
objs={1: MTTestClass1(456), 2: MTTestClass2('gronk')}
|
||||
)
|
||||
outdict = dataclass_to_dict(container3)
|
||||
container3b = dataclass_from_dict(_TestContainerClass3, outdict)
|
||||
assert container3 == container3b
|
||||
|
||||
# Tuples:
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TestContainerClass4:
|
||||
objs: tuple[MTTestBase, MTTestBase]
|
||||
|
||||
container4 = _TestContainerClass4(
|
||||
objs=(MTTestClass1(932), MTTestClass2('potato'))
|
||||
)
|
||||
outdict = dataclass_to_dict(container4)
|
||||
container4b = dataclass_from_dict(_TestContainerClass4, outdict)
|
||||
assert container4 == container4b
|
||||
|
||||
# Sets (note: dataclasses must be frozen for this to work):
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TestContainerClass5:
|
||||
objs: set[MTTestBase]
|
||||
|
||||
container5 = _TestContainerClass5(
|
||||
objs={MTTestClass1(424), MTTestClass2('goo')}
|
||||
)
|
||||
outdict = dataclass_to_dict(container5)
|
||||
container5b = dataclass_from_dict(_TestContainerClass5, outdict)
|
||||
assert container5 == container5b
|
||||
|
||||
# Optionals.
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TestContainerClass6:
|
||||
obj: MTTestBase | None
|
||||
|
||||
container6 = _TestContainerClass6(obj=None)
|
||||
outdict = dataclass_to_dict(container6)
|
||||
container6b = dataclass_from_dict(_TestContainerClass6, outdict)
|
||||
assert container6 == container6b
|
||||
|
||||
container6 = _TestContainerClass6(obj=MTTestClass2('fwr'))
|
||||
outdict = dataclass_to_dict(container6)
|
||||
container6b = dataclass_from_dict(_TestContainerClass6, outdict)
|
||||
assert container6 == container6b
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TestContainerClass7:
|
||||
obj: Annotated[
|
||||
MTTestBase | None,
|
||||
IOAttrs('o', soft_default=None),
|
||||
]
|
||||
|
||||
container7 = _TestContainerClass7(obj=None)
|
||||
outdict = dataclass_to_dict(container7)
|
||||
container7b = dataclass_from_dict(_TestContainerClass7, {})
|
||||
assert container7 == container7b
|
||||
|
||||
@ -149,16 +149,13 @@ class _BoundTestMessageSenderSync(BoundMessageSender):
|
||||
"""Protocol-specific bound sender."""
|
||||
|
||||
@overload
|
||||
def send(self, message: _TMsg1) -> _TResp1:
|
||||
...
|
||||
def send(self, message: _TMsg1) -> _TResp1: ...
|
||||
|
||||
@overload
|
||||
def send(self, message: _TMsg2) -> _TResp1 | _TResp2:
|
||||
...
|
||||
def send(self, message: _TMsg2) -> _TResp1 | _TResp2: ...
|
||||
|
||||
@overload
|
||||
def send(self, message: _TMsg3) -> None:
|
||||
...
|
||||
def send(self, message: _TMsg3) -> None: ...
|
||||
|
||||
def send(self, message: Message) -> Response | None:
|
||||
"""Send a message synchronously."""
|
||||
@ -188,16 +185,13 @@ class _BoundTestMessageSenderAsync(BoundMessageSender):
|
||||
"""Protocol-specific bound sender."""
|
||||
|
||||
@overload
|
||||
async def send_async(self, message: _TMsg1) -> _TResp1:
|
||||
...
|
||||
async def send_async(self, message: _TMsg1) -> _TResp1: ...
|
||||
|
||||
@overload
|
||||
async def send_async(self, message: _TMsg2) -> _TResp1 | _TResp2:
|
||||
...
|
||||
async def send_async(self, message: _TMsg2) -> _TResp1 | _TResp2: ...
|
||||
|
||||
@overload
|
||||
async def send_async(self, message: _TMsg3) -> None:
|
||||
...
|
||||
async def send_async(self, message: _TMsg3) -> None: ...
|
||||
|
||||
def send_async(self, message: Message) -> Awaitable[Response | None]:
|
||||
"""Send a message asynchronously."""
|
||||
@ -227,40 +221,32 @@ class _BoundTestMessageSenderBBoth(BoundMessageSender):
|
||||
"""Protocol-specific bound sender."""
|
||||
|
||||
@overload
|
||||
def send(self, message: _TMsg1) -> _TResp1:
|
||||
...
|
||||
def send(self, message: _TMsg1) -> _TResp1: ...
|
||||
|
||||
@overload
|
||||
def send(self, message: _TMsg2) -> _TResp1 | _TResp2:
|
||||
...
|
||||
def send(self, message: _TMsg2) -> _TResp1 | _TResp2: ...
|
||||
|
||||
@overload
|
||||
def send(self, message: _TMsg3) -> None:
|
||||
...
|
||||
def send(self, message: _TMsg3) -> None: ...
|
||||
|
||||
@overload
|
||||
def send(self, message: _TMsg4) -> None:
|
||||
...
|
||||
def send(self, message: _TMsg4) -> None: ...
|
||||
|
||||
def send(self, message: Message) -> Response | None:
|
||||
"""Send a message synchronously."""
|
||||
return self._sender.send(self._obj, message)
|
||||
|
||||
@overload
|
||||
async def send_async(self, message: _TMsg1) -> _TResp1:
|
||||
...
|
||||
async def send_async(self, message: _TMsg1) -> _TResp1: ...
|
||||
|
||||
@overload
|
||||
async def send_async(self, message: _TMsg2) -> _TResp1 | _TResp2:
|
||||
...
|
||||
async def send_async(self, message: _TMsg2) -> _TResp1 | _TResp2: ...
|
||||
|
||||
@overload
|
||||
async def send_async(self, message: _TMsg3) -> None:
|
||||
...
|
||||
async def send_async(self, message: _TMsg3) -> None: ...
|
||||
|
||||
@overload
|
||||
async def send_async(self, message: _TMsg4) -> None:
|
||||
...
|
||||
async def send_async(self, message: _TMsg4) -> None: ...
|
||||
|
||||
def send_async(self, message: Message) -> Awaitable[Response | None]:
|
||||
"""Send a message asynchronously."""
|
||||
@ -338,22 +324,19 @@ class _TestSyncMessageReceiver(MessageReceiver):
|
||||
def handler(
|
||||
self,
|
||||
call: Callable[[Any, _TMsg1], _TResp1],
|
||||
) -> Callable[[Any, _TMsg1], _TResp1]:
|
||||
...
|
||||
) -> Callable[[Any, _TMsg1], _TResp1]: ...
|
||||
|
||||
@overload
|
||||
def handler(
|
||||
self,
|
||||
call: Callable[[Any, _TMsg2], _TResp1 | _TResp2],
|
||||
) -> Callable[[Any, _TMsg2], _TResp1 | _TResp2]:
|
||||
...
|
||||
) -> Callable[[Any, _TMsg2], _TResp1 | _TResp2]: ...
|
||||
|
||||
@overload
|
||||
def handler(
|
||||
self,
|
||||
call: Callable[[Any, _TMsg3], None],
|
||||
) -> Callable[[Any, _TMsg3], None]:
|
||||
...
|
||||
) -> Callable[[Any, _TMsg3], None]: ...
|
||||
|
||||
def handler(self, call: Callable) -> Callable:
|
||||
"""Decorator to register message handlers."""
|
||||
@ -399,22 +382,19 @@ class _TestAsyncMessageReceiver(MessageReceiver):
|
||||
def handler(
|
||||
self,
|
||||
call: Callable[[Any, _TMsg1], Awaitable[_TResp1]],
|
||||
) -> Callable[[Any, _TMsg1], Awaitable[_TResp1]]:
|
||||
...
|
||||
) -> Callable[[Any, _TMsg1], Awaitable[_TResp1]]: ...
|
||||
|
||||
@overload
|
||||
def handler(
|
||||
self,
|
||||
call: Callable[[Any, _TMsg2], Awaitable[_TResp1 | _TResp2]],
|
||||
) -> Callable[[Any, _TMsg2], Awaitable[_TResp1 | _TResp2]]:
|
||||
...
|
||||
) -> Callable[[Any, _TMsg2], Awaitable[_TResp1 | _TResp2]]: ...
|
||||
|
||||
@overload
|
||||
def handler(
|
||||
self,
|
||||
call: Callable[[Any, _TMsg3], Awaitable[None]],
|
||||
) -> Callable[[Any, _TMsg3], Awaitable[None]]:
|
||||
...
|
||||
) -> Callable[[Any, _TMsg3], Awaitable[None]]: ...
|
||||
|
||||
def handler(self, call: Callable) -> Callable:
|
||||
"""Decorator to register message handlers."""
|
||||
|
||||
@ -75,9 +75,9 @@ class ResponseData:
|
||||
delay_seconds: Annotated[float, IOAttrs('d', store_default=False)] = 0.0
|
||||
login: Annotated[str | None, IOAttrs('l', store_default=False)] = None
|
||||
logout: Annotated[bool, IOAttrs('lo', store_default=False)] = False
|
||||
dir_manifest: Annotated[
|
||||
str | None, IOAttrs('man', store_default=False)
|
||||
] = None
|
||||
dir_manifest: Annotated[str | None, IOAttrs('man', store_default=False)] = (
|
||||
None
|
||||
)
|
||||
uploads: Annotated[
|
||||
tuple[list[str], str, dict] | None, IOAttrs('u', store_default=False)
|
||||
] = None
|
||||
@ -97,9 +97,9 @@ class ResponseData:
|
||||
input_prompt: Annotated[
|
||||
tuple[str, bool] | None, IOAttrs('inp', store_default=False)
|
||||
] = None
|
||||
end_message: Annotated[
|
||||
str | None, IOAttrs('em', store_default=False)
|
||||
] = None
|
||||
end_message: Annotated[str | None, IOAttrs('em', store_default=False)] = (
|
||||
None
|
||||
)
|
||||
end_message_end: Annotated[str, IOAttrs('eme', store_default=False)] = '\n'
|
||||
end_command: Annotated[
|
||||
tuple[str, dict] | None, IOAttrs('ec', store_default=False)
|
||||
|
||||
@ -63,9 +63,9 @@ class PrivateHostingConfig:
|
||||
randomize: bool = False
|
||||
tutorial: bool = False
|
||||
custom_team_names: tuple[str, str] | None = None
|
||||
custom_team_colors: tuple[
|
||||
tuple[float, float, float], tuple[float, float, float]
|
||||
] | None = None
|
||||
custom_team_colors: (
|
||||
tuple[tuple[float, float, float], tuple[float, float, float]] | None
|
||||
) = None
|
||||
playlist: list[dict[str, Any]] | None = None
|
||||
exit_minutes: float = 120.0
|
||||
exit_minutes_unclean: float = 180.0
|
||||
|
||||
@ -134,9 +134,9 @@ class ServerConfig:
|
||||
team_names: tuple[str, str] | None = None
|
||||
|
||||
# Team colors (teams mode only).
|
||||
team_colors: tuple[
|
||||
tuple[float, float, float], tuple[float, float, float]
|
||||
] | None = None
|
||||
team_colors: (
|
||||
tuple[tuple[float, float, float], tuple[float, float, float]] | None
|
||||
) = None
|
||||
|
||||
# Whether to enable the queue where players can line up before entering
|
||||
# your server. Disabling this can be used as a workaround to deal with
|
||||
|
||||
@ -18,10 +18,10 @@ if TYPE_CHECKING:
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class DirectoryManifestFile:
|
||||
"""Describes metadata and hashes for a file in a manifest."""
|
||||
"""Describes a file in a manifest."""
|
||||
|
||||
filehash: Annotated[str, IOAttrs('h')]
|
||||
filesize: Annotated[int, IOAttrs('s')]
|
||||
hash_sha256: Annotated[str, IOAttrs('h')]
|
||||
size: Annotated[int, IOAttrs('s')]
|
||||
|
||||
|
||||
@ioprepped
|
||||
@ -67,7 +67,7 @@ class DirectoryManifest:
|
||||
return (
|
||||
filepath,
|
||||
DirectoryManifestFile(
|
||||
filehash=sha.hexdigest(), filesize=filesize
|
||||
hash_sha256=sha.hexdigest(), size=filesize
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@ -52,7 +52,7 @@ PY_REQUIREMENTS = [
|
||||
PyRequirement(pipname='requests', minversion=[2, 31, 0]),
|
||||
PyRequirement(pipname='pdoc', minversion=[14, 4, 0]),
|
||||
PyRequirement(pipname='PyYAML', minversion=[6, 0, 1]),
|
||||
PyRequirement(pipname='black', minversion=[23, 12, 1]),
|
||||
PyRequirement(pipname='black', minversion=[24, 1, 1]),
|
||||
PyRequirement(pipname='typing_extensions', minversion=[4, 9, 0]),
|
||||
PyRequirement(pipname='types-filelock', minversion=[3, 2, 7]),
|
||||
PyRequirement(pipname='types-requests', minversion=[2, 31, 0, 20240106]),
|
||||
@ -614,7 +614,7 @@ def _get_server_config_template_yaml(projroot: str) -> str:
|
||||
# Ignore indented lines (our few multi-line special cases).
|
||||
continue
|
||||
|
||||
if line.startswith(']'):
|
||||
if line.startswith(']') or line.startswith(')'):
|
||||
# Ignore closing lines (our few multi-line special cases).
|
||||
continue
|
||||
|
||||
@ -643,7 +643,7 @@ def _get_server_config_template_yaml(projroot: str) -> str:
|
||||
before_equal_sign = before_equal_sign.strip()
|
||||
vval_raw = vval_raw.strip()
|
||||
vname = before_equal_sign.split()[0]
|
||||
assert vname.endswith(':')
|
||||
assert vname.endswith(':'), f"'{vname}' does not end with ':'"
|
||||
vname = vname[:-1]
|
||||
vval: Any
|
||||
if vval_raw == 'field(default_factory=list)':
|
||||
|
||||
@ -83,57 +83,46 @@ if TYPE_CHECKING:
|
||||
class _CallNoArgs(Generic[OutT]):
|
||||
"""Single argument variant of call wrapper."""
|
||||
|
||||
def __init__(self, _call: Callable[[], OutT]):
|
||||
...
|
||||
def __init__(self, _call: Callable[[], OutT]): ...
|
||||
|
||||
def __call__(self) -> OutT:
|
||||
...
|
||||
def __call__(self) -> OutT: ...
|
||||
|
||||
class _Call1Arg(Generic[In1T, OutT]):
|
||||
"""Single argument variant of call wrapper."""
|
||||
|
||||
def __init__(self, _call: Callable[[In1T], OutT]):
|
||||
...
|
||||
def __init__(self, _call: Callable[[In1T], OutT]): ...
|
||||
|
||||
def __call__(self, _arg1: In1T) -> OutT:
|
||||
...
|
||||
def __call__(self, _arg1: In1T) -> OutT: ...
|
||||
|
||||
class _Call2Args(Generic[In1T, In2T, OutT]):
|
||||
"""Two argument variant of call wrapper"""
|
||||
|
||||
def __init__(self, _call: Callable[[In1T, In2T], OutT]):
|
||||
...
|
||||
def __init__(self, _call: Callable[[In1T, In2T], OutT]): ...
|
||||
|
||||
def __call__(self, _arg1: In1T, _arg2: In2T) -> OutT:
|
||||
...
|
||||
def __call__(self, _arg1: In1T, _arg2: In2T) -> OutT: ...
|
||||
|
||||
class _Call3Args(Generic[In1T, In2T, In3T, OutT]):
|
||||
"""Three argument variant of call wrapper"""
|
||||
|
||||
def __init__(self, _call: Callable[[In1T, In2T, In3T], OutT]):
|
||||
...
|
||||
def __init__(self, _call: Callable[[In1T, In2T, In3T], OutT]): ...
|
||||
|
||||
def __call__(self, _arg1: In1T, _arg2: In2T, _arg3: In3T) -> OutT:
|
||||
...
|
||||
def __call__(self, _arg1: In1T, _arg2: In2T, _arg3: In3T) -> OutT: ...
|
||||
|
||||
class _Call4Args(Generic[In1T, In2T, In3T, In4T, OutT]):
|
||||
"""Four argument variant of call wrapper"""
|
||||
|
||||
def __init__(self, _call: Callable[[In1T, In2T, In3T, In4T], OutT]):
|
||||
...
|
||||
def __init__(self, _call: Callable[[In1T, In2T, In3T, In4T], OutT]): ...
|
||||
|
||||
def __call__(
|
||||
self, _arg1: In1T, _arg2: In2T, _arg3: In3T, _arg4: In4T
|
||||
) -> OutT:
|
||||
...
|
||||
) -> OutT: ...
|
||||
|
||||
class _Call5Args(Generic[In1T, In2T, In3T, In4T, In5T, OutT]):
|
||||
"""Five argument variant of call wrapper"""
|
||||
|
||||
def __init__(
|
||||
self, _call: Callable[[In1T, In2T, In3T, In4T, In5T], OutT]
|
||||
):
|
||||
...
|
||||
): ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
@ -142,16 +131,14 @@ if TYPE_CHECKING:
|
||||
_arg3: In3T,
|
||||
_arg4: In4T,
|
||||
_arg5: In5T,
|
||||
) -> OutT:
|
||||
...
|
||||
) -> OutT: ...
|
||||
|
||||
class _Call6Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, OutT]):
|
||||
"""Six argument variant of call wrapper"""
|
||||
|
||||
def __init__(
|
||||
self, _call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T], OutT]
|
||||
):
|
||||
...
|
||||
): ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
@ -161,8 +148,7 @@ if TYPE_CHECKING:
|
||||
_arg4: In4T,
|
||||
_arg5: In5T,
|
||||
_arg6: In6T,
|
||||
) -> OutT:
|
||||
...
|
||||
) -> OutT: ...
|
||||
|
||||
class _Call7Args(Generic[In1T, In2T, In3T, In4T, In5T, In6T, In7T, OutT]):
|
||||
"""Seven argument variant of call wrapper"""
|
||||
@ -170,8 +156,7 @@ if TYPE_CHECKING:
|
||||
def __init__(
|
||||
self,
|
||||
_call: Callable[[In1T, In2T, In3T, In4T, In5T, In6T, In7T], OutT],
|
||||
):
|
||||
...
|
||||
): ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
@ -182,50 +167,43 @@ if TYPE_CHECKING:
|
||||
_arg5: In5T,
|
||||
_arg6: In6T,
|
||||
_arg7: In7T,
|
||||
) -> OutT:
|
||||
...
|
||||
) -> OutT: ...
|
||||
|
||||
# No arg call; no args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(call: Callable[[], OutT]) -> _CallNoArgs[OutT]:
|
||||
...
|
||||
def Call(call: Callable[[], OutT]) -> _CallNoArgs[OutT]: ...
|
||||
|
||||
# 1 arg call; 1 arg bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(call: Callable[[In1T], OutT], arg1: In1T) -> _CallNoArgs[OutT]:
|
||||
...
|
||||
def Call(call: Callable[[In1T], OutT], arg1: In1T) -> _CallNoArgs[OutT]: ...
|
||||
|
||||
# 1 arg call; no args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(call: Callable[[In1T], OutT]) -> _Call1Arg[In1T, OutT]:
|
||||
...
|
||||
def Call(call: Callable[[In1T], OutT]) -> _Call1Arg[In1T, OutT]: ...
|
||||
|
||||
# 2 arg call; 2 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(
|
||||
call: Callable[[In1T, In2T], OutT], arg1: In1T, arg2: In2T
|
||||
) -> _CallNoArgs[OutT]:
|
||||
...
|
||||
) -> _CallNoArgs[OutT]: ...
|
||||
|
||||
# 2 arg call; 1 arg bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(
|
||||
call: Callable[[In1T, In2T], OutT], arg1: In1T
|
||||
) -> _Call1Arg[In2T, OutT]:
|
||||
...
|
||||
) -> _Call1Arg[In2T, OutT]: ...
|
||||
|
||||
# 2 arg call; no args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(
|
||||
call: Callable[[In1T, In2T], OutT]
|
||||
) -> _Call2Args[In1T, In2T, OutT]:
|
||||
...
|
||||
) -> _Call2Args[In1T, In2T, OutT]: ...
|
||||
|
||||
# 3 arg call; 3 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@ -235,32 +213,28 @@ if TYPE_CHECKING:
|
||||
arg1: In1T,
|
||||
arg2: In2T,
|
||||
arg3: In3T,
|
||||
) -> _CallNoArgs[OutT]:
|
||||
...
|
||||
) -> _CallNoArgs[OutT]: ...
|
||||
|
||||
# 3 arg call; 2 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(
|
||||
call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T, arg2: In2T
|
||||
) -> _Call1Arg[In3T, OutT]:
|
||||
...
|
||||
) -> _Call1Arg[In3T, OutT]: ...
|
||||
|
||||
# 3 arg call; 1 arg bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(
|
||||
call: Callable[[In1T, In2T, In3T], OutT], arg1: In1T
|
||||
) -> _Call2Args[In2T, In3T, OutT]:
|
||||
...
|
||||
) -> _Call2Args[In2T, In3T, OutT]: ...
|
||||
|
||||
# 3 arg call; no args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(
|
||||
call: Callable[[In1T, In2T, In3T], OutT]
|
||||
) -> _Call3Args[In1T, In2T, In3T, OutT]:
|
||||
...
|
||||
) -> _Call3Args[In1T, In2T, In3T, OutT]: ...
|
||||
|
||||
# 4 arg call; 4 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@ -271,8 +245,7 @@ if TYPE_CHECKING:
|
||||
arg2: In2T,
|
||||
arg3: In3T,
|
||||
arg4: In4T,
|
||||
) -> _CallNoArgs[OutT]:
|
||||
...
|
||||
) -> _CallNoArgs[OutT]: ...
|
||||
|
||||
# 4 arg call; 3 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@ -282,8 +255,7 @@ if TYPE_CHECKING:
|
||||
arg1: In1T,
|
||||
arg2: In2T,
|
||||
arg3: In3T,
|
||||
) -> _Call1Arg[In4T, OutT]:
|
||||
...
|
||||
) -> _Call1Arg[In4T, OutT]: ...
|
||||
|
||||
# 4 arg call; 2 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@ -292,8 +264,7 @@ if TYPE_CHECKING:
|
||||
call: Callable[[In1T, In2T, In3T, In4T], OutT],
|
||||
arg1: In1T,
|
||||
arg2: In2T,
|
||||
) -> _Call2Args[In3T, In4T, OutT]:
|
||||
...
|
||||
) -> _Call2Args[In3T, In4T, OutT]: ...
|
||||
|
||||
# 4 arg call; 1 arg bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@ -301,16 +272,14 @@ if TYPE_CHECKING:
|
||||
def Call(
|
||||
call: Callable[[In1T, In2T, In3T, In4T], OutT],
|
||||
arg1: In1T,
|
||||
) -> _Call3Args[In2T, In3T, In4T, OutT]:
|
||||
...
|
||||
) -> _Call3Args[In2T, In3T, In4T, OutT]: ...
|
||||
|
||||
# 4 arg call; no args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@overload
|
||||
def Call(
|
||||
call: Callable[[In1T, In2T, In3T, In4T], OutT],
|
||||
) -> _Call4Args[In1T, In2T, In3T, In4T, OutT]:
|
||||
...
|
||||
) -> _Call4Args[In1T, In2T, In3T, In4T, OutT]: ...
|
||||
|
||||
# 5 arg call; 5 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@ -322,8 +291,7 @@ if TYPE_CHECKING:
|
||||
arg3: In3T,
|
||||
arg4: In4T,
|
||||
arg5: In5T,
|
||||
) -> _CallNoArgs[OutT]:
|
||||
...
|
||||
) -> _CallNoArgs[OutT]: ...
|
||||
|
||||
# 6 arg call; 6 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@ -336,8 +304,7 @@ if TYPE_CHECKING:
|
||||
arg4: In4T,
|
||||
arg5: In5T,
|
||||
arg6: In6T,
|
||||
) -> _CallNoArgs[OutT]:
|
||||
...
|
||||
) -> _CallNoArgs[OutT]: ...
|
||||
|
||||
# 7 arg call; 7 args bundled.
|
||||
# noinspection PyPep8Naming
|
||||
@ -351,12 +318,10 @@ if TYPE_CHECKING:
|
||||
arg5: In5T,
|
||||
arg6: In6T,
|
||||
arg7: In7T,
|
||||
) -> _CallNoArgs[OutT]:
|
||||
...
|
||||
) -> _CallNoArgs[OutT]: ...
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
def Call(*_args: Any, **_keywds: Any) -> Any:
|
||||
...
|
||||
def Call(*_args: Any, **_keywds: Any) -> Any: ...
|
||||
|
||||
# (Type-safe Partial)
|
||||
# A convenient wrapper around functools.partial which adds type-safety
|
||||
|
||||
@ -32,6 +32,7 @@ class HostConfig:
|
||||
user: str = 'ubuntu'
|
||||
port: int = 22
|
||||
mosh_port: int | None = None
|
||||
mosh_port_2: int | None = None
|
||||
mosh_server_path: str | None = None
|
||||
mosh_shell: str = 'sh'
|
||||
workspaces_root: str = '/home/${USER}/cloudshell_workspaces'
|
||||
|
||||
@ -11,7 +11,13 @@ data formats in a nondestructive manner.
|
||||
from __future__ import annotations
|
||||
|
||||
from efro.util import set_canonical_module_names
|
||||
from efro.dataclassio._base import Codec, IOAttrs, IOExtendedData
|
||||
from efro.dataclassio._base import (
|
||||
Codec,
|
||||
IOAttrs,
|
||||
IOExtendedData,
|
||||
IOMultiType,
|
||||
EXTRA_ATTRS_ATTR,
|
||||
)
|
||||
from efro.dataclassio._prep import (
|
||||
ioprep,
|
||||
ioprepped,
|
||||
@ -29,20 +35,22 @@ from efro.dataclassio._api import (
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'JsonStyle',
|
||||
'Codec',
|
||||
'DataclassFieldLookup',
|
||||
'EXTRA_ATTRS_ATTR',
|
||||
'IOAttrs',
|
||||
'IOExtendedData',
|
||||
'ioprep',
|
||||
'ioprepped',
|
||||
'will_ioprep',
|
||||
'is_ioprepped_dataclass',
|
||||
'DataclassFieldLookup',
|
||||
'dataclass_to_dict',
|
||||
'dataclass_to_json',
|
||||
'IOMultiType',
|
||||
'JsonStyle',
|
||||
'dataclass_from_dict',
|
||||
'dataclass_from_json',
|
||||
'dataclass_to_dict',
|
||||
'dataclass_to_json',
|
||||
'dataclass_validate',
|
||||
'ioprep',
|
||||
'ioprepped',
|
||||
'is_ioprepped_dataclass',
|
||||
'will_ioprep',
|
||||
]
|
||||
|
||||
# Have these things present themselves cleanly as 'thismodule.SomeClass'
|
||||
|
||||
@ -27,7 +27,7 @@ class JsonStyle(Enum):
|
||||
"""Different style types for json."""
|
||||
|
||||
# Single line, no spaces, no sorting. Not deterministic.
|
||||
# Use this for most storage purposes.
|
||||
# Use this where speed is more important than determinism.
|
||||
FAST = 'fast'
|
||||
|
||||
# Single line, no spaces, sorted keys. Deterministic.
|
||||
@ -40,7 +40,9 @@ class JsonStyle(Enum):
|
||||
|
||||
|
||||
def dataclass_to_dict(
|
||||
obj: Any, codec: Codec = Codec.JSON, coerce_to_float: bool = True
|
||||
obj: Any,
|
||||
codec: Codec = Codec.JSON,
|
||||
coerce_to_float: bool = True,
|
||||
) -> dict:
|
||||
"""Given a dataclass object, return a json-friendly dict.
|
||||
|
||||
@ -101,32 +103,36 @@ def dataclass_from_dict(
|
||||
|
||||
The dict must be formatted to match the specified codec (generally
|
||||
json-friendly object types). This means that sequence values such as
|
||||
tuples or sets should be passed as lists, enums should be passed as their
|
||||
associated values, nested dataclasses should be passed as dicts, etc.
|
||||
tuples or sets should be passed as lists, enums should be passed as
|
||||
their associated values, nested dataclasses should be passed as dicts,
|
||||
etc.
|
||||
|
||||
All values are checked to ensure their types/values are valid.
|
||||
|
||||
Data for attributes of type Any will be checked to ensure they match
|
||||
types supported directly by json. This does not include types such
|
||||
as tuples which are implicitly translated by Python's json module
|
||||
(as this would break the ability to do a lossless round-trip with data).
|
||||
(as this would break the ability to do a lossless round-trip with
|
||||
data).
|
||||
|
||||
If coerce_to_float is True, int values passed for float typed fields
|
||||
will be converted to float values. Otherwise, a TypeError is raised.
|
||||
|
||||
If allow_unknown_attrs is False, AttributeErrors will be raised for
|
||||
attributes present in the dict but not on the data class. Otherwise, they
|
||||
will be preserved as part of the instance and included if it is
|
||||
exported back to a dict, unless discard_unknown_attrs is True, in which
|
||||
case they will simply be discarded.
|
||||
If `allow_unknown_attrs` is False, AttributeErrors will be raised for
|
||||
attributes present in the dict but not on the data class. Otherwise,
|
||||
they will be preserved as part of the instance and included if it is
|
||||
exported back to a dict, unless `discard_unknown_attrs` is True, in
|
||||
which case they will simply be discarded.
|
||||
"""
|
||||
return _Inputter(
|
||||
val = _Inputter(
|
||||
cls,
|
||||
codec=codec,
|
||||
coerce_to_float=coerce_to_float,
|
||||
allow_unknown_attrs=allow_unknown_attrs,
|
||||
discard_unknown_attrs=discard_unknown_attrs,
|
||||
).run(values)
|
||||
assert isinstance(val, cls)
|
||||
return val
|
||||
|
||||
|
||||
def dataclass_from_json(
|
||||
|
||||
@ -8,39 +8,23 @@ import dataclasses
|
||||
import typing
|
||||
import datetime
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, get_args
|
||||
from typing import TYPE_CHECKING, get_args, TypeVar, Generic
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
from typing import _AnnotatedAlias # type: ignore
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable
|
||||
from typing import Any, Callable, Literal, ClassVar, Self
|
||||
|
||||
# Types which we can pass through as-is.
|
||||
SIMPLE_TYPES = {int, bool, str, float, type(None)}
|
||||
|
||||
# Attr name for dict of extra attributes included on dataclass instances.
|
||||
# Note that this is only added if extra attributes are present.
|
||||
# Attr name for dict of extra attributes included on dataclass
|
||||
# instances. Note that this is only added if extra attributes are
|
||||
# present.
|
||||
EXTRA_ATTRS_ATTR = '_DCIOEXATTRS'
|
||||
|
||||
|
||||
def _raise_type_error(
|
||||
fieldpath: str, valuetype: type, expected: tuple[type, ...]
|
||||
) -> None:
|
||||
"""Raise an error when a field value's type does not match expected."""
|
||||
assert isinstance(expected, tuple)
|
||||
assert all(isinstance(e, type) for e in expected)
|
||||
if len(expected) == 1:
|
||||
expected_str = expected[0].__name__
|
||||
else:
|
||||
expected_str = ' | '.join(t.__name__ for t in expected)
|
||||
raise TypeError(
|
||||
f'Invalid value type for "{fieldpath}";'
|
||||
f' expected "{expected_str}", got'
|
||||
f' "{valuetype.__name__}".'
|
||||
)
|
||||
|
||||
|
||||
class Codec(Enum):
|
||||
"""Specifies expected data format exported to or imported from."""
|
||||
|
||||
@ -78,32 +62,46 @@ class IOExtendedData:
|
||||
"""
|
||||
|
||||
|
||||
def _is_valid_for_codec(obj: Any, codec: Codec) -> bool:
|
||||
"""Return whether a value consists solely of json-supported types.
|
||||
EnumT = TypeVar('EnumT', bound=Enum)
|
||||
|
||||
Note that this does not include things like tuples which are
|
||||
implicitly translated to lists by python's json module.
|
||||
|
||||
class IOMultiType(Generic[EnumT]):
|
||||
"""A base class for types that can map to multiple dataclass types.
|
||||
|
||||
This enables usage of high level base classes (for example
|
||||
a 'Message' type) in annotations, with dataclassio automatically
|
||||
serializing & deserializing dataclass subclasses based on their
|
||||
type ('MessagePing', 'MessageChat', etc.)
|
||||
|
||||
Standard usage involves creating a class which inherits from this
|
||||
one which acts as a 'registry', and then creating dataclass classes
|
||||
inheriting from that registry class. Dataclassio will then do the
|
||||
right thing when that registry class is used in type annotations.
|
||||
|
||||
See tests/test_efro/test_dataclassio.py for examples.
|
||||
"""
|
||||
if obj is None:
|
||||
return True
|
||||
|
||||
objtype = type(obj)
|
||||
if objtype in (int, float, str, bool):
|
||||
return True
|
||||
if objtype is dict:
|
||||
# JSON 'objects' supports only string dict keys, but all value types.
|
||||
return all(
|
||||
isinstance(k, str) and _is_valid_for_codec(v, codec)
|
||||
for k, v in obj.items()
|
||||
)
|
||||
if objtype is list:
|
||||
return all(_is_valid_for_codec(elem, codec) for elem in obj)
|
||||
# Dataclasses inheriting from an IOMultiType will store a type-id
|
||||
# with this key in their serialized data. This value can be
|
||||
# overridden in IOMultiType subclasses as desired.
|
||||
ID_STORAGE_NAME = '_dciotype'
|
||||
|
||||
# A few things are valid in firestore but not json.
|
||||
if issubclass(objtype, datetime.datetime) or objtype is bytes:
|
||||
return codec is Codec.FIRESTORE
|
||||
@classmethod
|
||||
def get_type(cls, type_id: EnumT) -> type[Self]:
|
||||
"""Return a specific subclass given a type-id."""
|
||||
raise NotImplementedError()
|
||||
|
||||
return False
|
||||
@classmethod
|
||||
def get_type_id(cls) -> EnumT:
|
||||
"""Return the type-id for this subclass."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def get_type_id_type(cls) -> type[EnumT]:
|
||||
"""Return the Enum type this class uses as its type-id."""
|
||||
out: type[EnumT] = cls.__orig_bases__[0].__args__[0] # type: ignore
|
||||
assert issubclass(out, Enum)
|
||||
return out
|
||||
|
||||
|
||||
class IOAttrs:
|
||||
@ -192,7 +190,7 @@ class IOAttrs:
|
||||
"""Ensure the IOAttrs instance is ok to use with the provided field."""
|
||||
|
||||
# Turning off store_default requires the field to have either
|
||||
# a default or a a default_factory or for us to have soft equivalents.
|
||||
# a default or a default_factory or for us to have soft equivalents.
|
||||
|
||||
if not self.store_default:
|
||||
field_default_factory: Any = field.default_factory
|
||||
@ -241,6 +239,52 @@ class IOAttrs:
|
||||
)
|
||||
|
||||
|
||||
def _raise_type_error(
|
||||
fieldpath: str, valuetype: type, expected: tuple[type, ...]
|
||||
) -> None:
|
||||
"""Raise an error when a field value's type does not match expected."""
|
||||
assert isinstance(expected, tuple)
|
||||
assert all(isinstance(e, type) for e in expected)
|
||||
if len(expected) == 1:
|
||||
expected_str = expected[0].__name__
|
||||
else:
|
||||
expected_str = ' | '.join(t.__name__ for t in expected)
|
||||
raise TypeError(
|
||||
f'Invalid value type for "{fieldpath}";'
|
||||
f' expected "{expected_str}", got'
|
||||
f' "{valuetype.__name__}".'
|
||||
)
|
||||
|
||||
|
||||
def _is_valid_for_codec(obj: Any, codec: Codec) -> bool:
|
||||
"""Return whether a value consists solely of json-supported types.
|
||||
|
||||
Note that this does not include things like tuples which are
|
||||
implicitly translated to lists by python's json module.
|
||||
"""
|
||||
if obj is None:
|
||||
return True
|
||||
|
||||
objtype = type(obj)
|
||||
if objtype in (int, float, str, bool):
|
||||
return True
|
||||
if objtype is dict:
|
||||
# JSON 'objects' supports only string dict keys, but all value
|
||||
# types.
|
||||
return all(
|
||||
isinstance(k, str) and _is_valid_for_codec(v, codec)
|
||||
for k, v in obj.items()
|
||||
)
|
||||
if objtype is list:
|
||||
return all(_is_valid_for_codec(elem, codec) for elem in obj)
|
||||
|
||||
# A few things are valid in firestore but not json.
|
||||
if issubclass(objtype, datetime.datetime) or objtype is bytes:
|
||||
return codec is Codec.FIRESTORE
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _get_origin(anntype: Any) -> Any:
|
||||
"""Given a type annotation, return its origin or itself if there is none.
|
||||
|
||||
@ -255,9 +299,9 @@ def _get_origin(anntype: Any) -> Any:
|
||||
|
||||
def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
|
||||
"""Parse Annotated() constructs, returning annotated type & IOAttrs."""
|
||||
# If we get an Annotated[foo, bar, eep] we take
|
||||
# foo as the actual type, and we look for IOAttrs instances in
|
||||
# bar/eep to affect our behavior.
|
||||
# If we get an Annotated[foo, bar, eep] we take foo as the actual
|
||||
# type, and we look for IOAttrs instances in bar/eep to affect our
|
||||
# behavior.
|
||||
ioattrs: IOAttrs | None = None
|
||||
if isinstance(anntype, _AnnotatedAlias):
|
||||
annargs = get_args(anntype)
|
||||
@ -270,8 +314,8 @@ def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
|
||||
)
|
||||
ioattrs = annarg
|
||||
|
||||
# I occasionally just throw a 'x' down when I mean IOAttrs('x');
|
||||
# catch these mistakes.
|
||||
# I occasionally just throw a 'x' down when I mean
|
||||
# IOAttrs('x'); catch these mistakes.
|
||||
elif isinstance(annarg, (str, int, float, bool)):
|
||||
raise RuntimeError(
|
||||
f'Raw {type(annarg)} found in Annotated[] entry:'
|
||||
@ -279,3 +323,21 @@ def _parse_annotated(anntype: Any) -> tuple[Any, IOAttrs | None]:
|
||||
)
|
||||
anntype = annargs[0]
|
||||
return anntype, ioattrs
|
||||
|
||||
|
||||
def _get_multitype_type(
|
||||
cls: type[IOMultiType], fieldpath: str, val: Any
|
||||
) -> type[Any]:
|
||||
if not isinstance(val, dict):
|
||||
raise ValueError(
|
||||
f"Found a {type(val)} at '{fieldpath}'; expected a dict."
|
||||
)
|
||||
storename = cls.ID_STORAGE_NAME
|
||||
id_val = val.get(storename)
|
||||
if id_val is None:
|
||||
raise ValueError(
|
||||
f"Expected a '{storename}'" f" value for object at '{fieldpath}'."
|
||||
)
|
||||
id_enum_type = cls.get_type_id_type()
|
||||
id_enum = id_enum_type(id_val)
|
||||
return cls.get_type(id_enum)
|
||||
|
||||
@ -13,7 +13,7 @@ import dataclasses
|
||||
import typing
|
||||
import types
|
||||
import datetime
|
||||
from typing import TYPE_CHECKING, Generic, TypeVar
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from efro.util import enum_by_value, check_utc
|
||||
from efro.dataclassio._base import (
|
||||
@ -25,6 +25,8 @@ from efro.dataclassio._base import (
|
||||
SIMPLE_TYPES,
|
||||
_raise_type_error,
|
||||
IOExtendedData,
|
||||
_get_multitype_type,
|
||||
IOMultiType,
|
||||
)
|
||||
from efro.dataclassio._prep import PrepSession
|
||||
|
||||
@ -34,13 +36,11 @@ if TYPE_CHECKING:
|
||||
from efro.dataclassio._base import IOAttrs
|
||||
from efro.dataclassio._outputter import _Outputter
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
class _Inputter(Generic[T]):
|
||||
class _Inputter:
|
||||
def __init__(
|
||||
self,
|
||||
cls: type[T],
|
||||
cls: type[Any],
|
||||
codec: Codec,
|
||||
coerce_to_float: bool,
|
||||
allow_unknown_attrs: bool = True,
|
||||
@ -59,27 +59,45 @@ class _Inputter(Generic[T]):
|
||||
' when allow_unknown_attrs is False.'
|
||||
)
|
||||
|
||||
def run(self, values: dict) -> T:
|
||||
def run(self, values: dict) -> Any:
|
||||
"""Do the thing."""
|
||||
|
||||
# For special extended data types, call their 'will_output' callback.
|
||||
tcls = self._cls
|
||||
outcls: type[Any]
|
||||
|
||||
if issubclass(tcls, IOExtendedData):
|
||||
# If we're dealing with a multi-type subclass which is NOT a
|
||||
# dataclass, we must rely on its stored type to figure out
|
||||
# what type of dataclass we're going to. If we are a dataclass
|
||||
# then we already know what type we're going to so we can
|
||||
# survive without this, which is often necessary when reading
|
||||
# old data that doesn't have a type id attr yet.
|
||||
if issubclass(self._cls, IOMultiType) and not dataclasses.is_dataclass(
|
||||
self._cls
|
||||
):
|
||||
type_id_val = values.get(self._cls.ID_STORAGE_NAME)
|
||||
if type_id_val is None:
|
||||
raise ValueError(
|
||||
f'No type id value present for multi-type object:'
|
||||
f' {values}.'
|
||||
)
|
||||
type_id_enum = self._cls.get_type_id_type()
|
||||
enum_val = type_id_enum(type_id_val)
|
||||
outcls = self._cls.get_type(enum_val)
|
||||
else:
|
||||
outcls = self._cls
|
||||
|
||||
# FIXME - should probably move this into _dataclass_from_input
|
||||
# so it can work on nested values.
|
||||
if issubclass(outcls, IOExtendedData):
|
||||
is_ext = True
|
||||
tcls.will_input(values)
|
||||
outcls.will_input(values)
|
||||
else:
|
||||
is_ext = False
|
||||
|
||||
out = self._dataclass_from_input(self._cls, '', values)
|
||||
assert isinstance(out, self._cls)
|
||||
out = self._dataclass_from_input(outcls, '', values)
|
||||
assert isinstance(out, outcls)
|
||||
|
||||
if is_ext:
|
||||
# mypy complains that we're no longer returning a T
|
||||
# if we operate on out directly.
|
||||
out2 = out
|
||||
assert isinstance(out2, IOExtendedData)
|
||||
out2.did_input()
|
||||
out.did_input()
|
||||
|
||||
return out
|
||||
|
||||
@ -111,8 +129,8 @@ class _Inputter(Generic[T]):
|
||||
# noinspection PyPep8
|
||||
if origin is typing.Union or origin is types.UnionType:
|
||||
# Currently, the only unions we support are None/Value
|
||||
# (translated from Optional), which we verified on prep.
|
||||
# So let's treat this as a simple optional case.
|
||||
# (translated from Optional), which we verified on prep. So
|
||||
# let's treat this as a simple optional case.
|
||||
if value is None:
|
||||
return None
|
||||
childanntypes_l = [
|
||||
@ -123,13 +141,15 @@ class _Inputter(Generic[T]):
|
||||
cls, fieldpath, childanntypes_l[0], value, ioattrs
|
||||
)
|
||||
|
||||
# Everything below this point assumes the annotation type resolves
|
||||
# to a concrete type. (This should have been verified at prep time).
|
||||
# Everything below this point assumes the annotation type
|
||||
# resolves to a concrete type. (This should have been verified
|
||||
# at prep time).
|
||||
assert isinstance(origin, type)
|
||||
|
||||
if origin in SIMPLE_TYPES:
|
||||
if type(value) is not origin:
|
||||
# Special case: if they want to coerce ints to floats, do so.
|
||||
# Special case: if they want to coerce ints to floats,
|
||||
# do so.
|
||||
if (
|
||||
self._coerce_to_float
|
||||
and origin is float
|
||||
@ -157,6 +177,16 @@ class _Inputter(Generic[T]):
|
||||
if dataclasses.is_dataclass(origin):
|
||||
return self._dataclass_from_input(origin, fieldpath, value)
|
||||
|
||||
# ONLY consider something as a multi-type when it's not a
|
||||
# dataclass (all dataclasses inheriting from the multi-type
|
||||
# should just be processed as dataclasses).
|
||||
if issubclass(origin, IOMultiType):
|
||||
return self._dataclass_from_input(
|
||||
_get_multitype_type(anntype, fieldpath, value),
|
||||
fieldpath,
|
||||
value,
|
||||
)
|
||||
|
||||
if issubclass(origin, Enum):
|
||||
return enum_by_value(origin, value)
|
||||
|
||||
@ -228,10 +258,23 @@ class _Inputter(Generic[T]):
|
||||
f.name: _parse_annotated(prep.annotations[f.name]) for f in fields
|
||||
}
|
||||
|
||||
# Special case: if this is a multi-type class it probably has a
|
||||
# type attr. Ignore that while parsing since we already have a
|
||||
# definite type and it will just pollute extra-attrs otherwise.
|
||||
if issubclass(cls, IOMultiType):
|
||||
type_id_store_name = cls.ID_STORAGE_NAME
|
||||
else:
|
||||
type_id_store_name = None
|
||||
|
||||
# Go through all data in the input, converting it to either dataclass
|
||||
# args or extra data.
|
||||
args: dict[str, Any] = {}
|
||||
for rawkey, value in values.items():
|
||||
|
||||
# Ignore _dciotype or whatnot.
|
||||
if type_id_store_name is not None and rawkey == type_id_store_name:
|
||||
continue
|
||||
|
||||
key = prep.storage_names_to_attr_names.get(rawkey, rawkey)
|
||||
field = fields_by_name.get(key)
|
||||
|
||||
@ -473,6 +516,19 @@ class _Inputter(Generic[T]):
|
||||
# We contain elements of some specified type.
|
||||
assert len(childanntypes) == 1
|
||||
childanntype = childanntypes[0]
|
||||
|
||||
# If our annotation type inherits from IOMultiType, use type-id
|
||||
# values to determine which type to load for each element.
|
||||
if issubclass(childanntype, IOMultiType):
|
||||
return seqtype(
|
||||
self._dataclass_from_input(
|
||||
_get_multitype_type(childanntype, fieldpath, i),
|
||||
fieldpath,
|
||||
i,
|
||||
)
|
||||
for i in value
|
||||
)
|
||||
|
||||
return seqtype(
|
||||
self._value_from_input(cls, fieldpath, childanntype, i, ioattrs)
|
||||
for i in value
|
||||
|
||||
@ -25,6 +25,7 @@ from efro.dataclassio._base import (
|
||||
SIMPLE_TYPES,
|
||||
_raise_type_error,
|
||||
IOExtendedData,
|
||||
IOMultiType,
|
||||
)
|
||||
from efro.dataclassio._prep import PrepSession
|
||||
|
||||
@ -49,6 +50,8 @@ class _Outputter:
|
||||
assert dataclasses.is_dataclass(self._obj)
|
||||
|
||||
# For special extended data types, call their 'will_output' callback.
|
||||
# FIXME - should probably move this into _process_dataclass so it
|
||||
# can work on nested values.
|
||||
if isinstance(self._obj, IOExtendedData):
|
||||
self._obj.will_output()
|
||||
|
||||
@ -69,6 +72,7 @@ class _Outputter:
|
||||
def _process_dataclass(self, cls: type, obj: Any, fieldpath: str) -> Any:
|
||||
# pylint: disable=too-many-locals
|
||||
# pylint: disable=too-many-branches
|
||||
# pylint: disable=too-many-statements
|
||||
prep = PrepSession(explicit=False).prep_dataclass(
|
||||
type(obj), recursion_level=0
|
||||
)
|
||||
@ -139,6 +143,25 @@ class _Outputter:
|
||||
if self._create:
|
||||
assert out is not None
|
||||
out.update(extra_attrs)
|
||||
|
||||
# If this obj inherits from multi-type, store its type id.
|
||||
if isinstance(obj, IOMultiType):
|
||||
type_id = obj.get_type_id()
|
||||
|
||||
# Sanity checks; make sure looking up this id gets us this
|
||||
# type.
|
||||
assert isinstance(type_id.value, str)
|
||||
if obj.get_type(type_id) is not type(obj):
|
||||
raise RuntimeError(
|
||||
f'dataclassio: object of type {type(obj)}'
|
||||
f' gives type-id {type_id} but that id gives type'
|
||||
f' {obj.get_type(type_id)}. Something is out of sync.'
|
||||
)
|
||||
assert obj.get_type(type_id) is type(obj)
|
||||
if self._create:
|
||||
assert out is not None
|
||||
out[obj.ID_STORAGE_NAME] = type_id.value
|
||||
|
||||
return out
|
||||
|
||||
def _process_value(
|
||||
@ -231,6 +254,7 @@ class _Outputter:
|
||||
f'Expected a list for {fieldpath};'
|
||||
f' found a {type(value)}'
|
||||
)
|
||||
|
||||
childanntypes = typing.get_args(anntype)
|
||||
|
||||
# 'Any' type children; make sure they are valid values for
|
||||
@ -246,8 +270,37 @@ class _Outputter:
|
||||
# Hmm; should we do a copy here?
|
||||
return value if self._create else None
|
||||
|
||||
# We contain elements of some specified type.
|
||||
# We contain elements of some single specified type.
|
||||
assert len(childanntypes) == 1
|
||||
childanntype = childanntypes[0]
|
||||
|
||||
# If that type is a multi-type, we determine our type per-object.
|
||||
if issubclass(childanntype, IOMultiType):
|
||||
# In the multi-type case, we use each object's own type
|
||||
# to do its conversion, but lets at least make sure each
|
||||
# of those types inherits from the annotated multi-type
|
||||
# class.
|
||||
for x in value:
|
||||
if not isinstance(x, childanntype):
|
||||
raise ValueError(
|
||||
f"Found a {type(x)} value under '{fieldpath}'."
|
||||
f' Everything must inherit from'
|
||||
f' {childanntype}.'
|
||||
)
|
||||
|
||||
if self._create:
|
||||
out: list[Any] = []
|
||||
for x in value:
|
||||
# We know these are dataclasses so no need to do
|
||||
# the generic _process_value.
|
||||
out.append(self._process_dataclass(cls, x, fieldpath))
|
||||
return out
|
||||
for x in value:
|
||||
# We know these are dataclasses so no need to do
|
||||
# the generic _process_value.
|
||||
self._process_dataclass(cls, x, fieldpath)
|
||||
|
||||
# Normal non-multitype case; everything's got the same type.
|
||||
if self._create:
|
||||
return [
|
||||
self._process_value(
|
||||
@ -307,6 +360,21 @@ class _Outputter:
|
||||
)
|
||||
return self._process_dataclass(cls, value, fieldpath)
|
||||
|
||||
# ONLY consider something as a multi-type when it's not a
|
||||
# dataclass (all dataclasses inheriting from the multi-type should
|
||||
# just be processed as dataclasses).
|
||||
if issubclass(origin, IOMultiType):
|
||||
# In the multi-type case, we use each object's own type to
|
||||
# do its conversion, but lets at least make sure each of
|
||||
# those types inherits from the annotated multi-type class.
|
||||
if not isinstance(value, origin):
|
||||
raise ValueError(
|
||||
f"Found a {type(value)} value at '{fieldpath}'."
|
||||
f' It is expected to inherit from {origin}.'
|
||||
)
|
||||
|
||||
return self._process_dataclass(cls, value, fieldpath)
|
||||
|
||||
if issubclass(origin, Enum):
|
||||
if not isinstance(value, origin):
|
||||
raise TypeError(
|
||||
|
||||
@ -17,7 +17,12 @@ import datetime
|
||||
from typing import TYPE_CHECKING, TypeVar, get_type_hints
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
from efro.dataclassio._base import _parse_annotated, _get_origin, SIMPLE_TYPES
|
||||
from efro.dataclassio._base import (
|
||||
_parse_annotated,
|
||||
_get_origin,
|
||||
SIMPLE_TYPES,
|
||||
IOMultiType,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
@ -260,6 +265,13 @@ class PrepSession:
|
||||
|
||||
origin = _get_origin(anntype)
|
||||
|
||||
# If we inherit from IOMultiType, we use its type map to
|
||||
# determine which type we're going to instead of the annotation.
|
||||
# And we can't really check those types because they are
|
||||
# lazy-loaded. So I guess we're done here.
|
||||
if issubclass(origin, IOMultiType):
|
||||
return
|
||||
|
||||
# noinspection PyPep8
|
||||
if origin is typing.Union or origin is types.UnionType:
|
||||
self.prep_union(
|
||||
|
||||
@ -278,9 +278,7 @@ def _desc(obj: Any) -> str:
|
||||
tpss = (
|
||||
f', contains [{tpsj}, ...]'
|
||||
if len(obj) > 3
|
||||
else f', contains [{tpsj}]'
|
||||
if tps
|
||||
else ''
|
||||
else f', contains [{tpsj}]' if tps else ''
|
||||
)
|
||||
extra = f' (len {len(obj)}{tpss})'
|
||||
elif isinstance(obj, dict):
|
||||
@ -299,9 +297,7 @@ def _desc(obj: Any) -> str:
|
||||
pairss = (
|
||||
f', contains {{{pairsj}, ...}}'
|
||||
if len(obj) > 3
|
||||
else f', contains {{{pairsj}}}'
|
||||
if pairs
|
||||
else ''
|
||||
else f', contains {{{pairsj}}}' if pairs else ''
|
||||
)
|
||||
extra = f' (len {len(obj)}{pairss})'
|
||||
if extra is None:
|
||||
|
||||
@ -92,9 +92,9 @@ class LogEntry:
|
||||
# incorporated into custom log processing. To populate this, our
|
||||
# LogHandler class looks for a 'labels' dict passed in the optional
|
||||
# 'extra' dict arg to standard Python log calls.
|
||||
labels: Annotated[
|
||||
dict[str, str], IOAttrs('la', store_default=False)
|
||||
] = field(default_factory=dict)
|
||||
labels: Annotated[dict[str, str], IOAttrs('la', store_default=False)] = (
|
||||
field(default_factory=dict)
|
||||
)
|
||||
|
||||
|
||||
@ioprepped
|
||||
@ -483,11 +483,11 @@ class LogHandler(logging.Handler):
|
||||
# after a short bit if we never get a newline.
|
||||
ship_task = self._file_chunk_ship_task[name]
|
||||
if ship_task is None:
|
||||
self._file_chunk_ship_task[
|
||||
name
|
||||
] = self._event_loop.create_task(
|
||||
self._ship_chunks_task(name),
|
||||
name='log ship file chunks',
|
||||
self._file_chunk_ship_task[name] = (
|
||||
self._event_loop.create_task(
|
||||
self._ship_chunks_task(name),
|
||||
name='log ship file chunks',
|
||||
)
|
||||
)
|
||||
|
||||
except Exception:
|
||||
|
||||
@ -499,8 +499,7 @@ class MessageProtocol:
|
||||
f' @overload\n'
|
||||
f' {pfx}def send{sfx}(self,'
|
||||
f' message: {msgtypevar})'
|
||||
f' -> {rtypevar}:\n'
|
||||
f' ...\n'
|
||||
f' -> {rtypevar}: ...\n'
|
||||
)
|
||||
rtypevar = 'Response | None'
|
||||
if async_pass:
|
||||
@ -607,8 +606,7 @@ class MessageProtocol:
|
||||
f' call: Callable[[Any, {msgtypevar}], '
|
||||
f'{rtypevar}],\n'
|
||||
f' )'
|
||||
f' -> Callable[[Any, {msgtypevar}], {rtypevar}]:\n'
|
||||
f' ...\n'
|
||||
f' -> Callable[[Any, {msgtypevar}], {rtypevar}]: ...\n'
|
||||
)
|
||||
out += (
|
||||
'\n'
|
||||
|
||||
@ -55,12 +55,13 @@ class MessageReceiver:
|
||||
def __init__(self, protocol: MessageProtocol) -> None:
|
||||
self.protocol = protocol
|
||||
self._handlers: dict[type[Message], Callable] = {}
|
||||
self._decode_filter_call: Callable[
|
||||
[Any, dict, Message], None
|
||||
] | None = None
|
||||
self._encode_filter_call: Callable[
|
||||
[Any, Message | None, Response | SysResponse, dict], None
|
||||
] | None = None
|
||||
self._decode_filter_call: (
|
||||
Callable[[Any, dict, Message], None] | None
|
||||
) = None
|
||||
self._encode_filter_call: (
|
||||
Callable[[Any, Message | None, Response | SysResponse, dict], None]
|
||||
| None
|
||||
) = None
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
def register_handler(
|
||||
|
||||
@ -41,18 +41,18 @@ class MessageSender:
|
||||
def __init__(self, protocol: MessageProtocol) -> None:
|
||||
self.protocol = protocol
|
||||
self._send_raw_message_call: Callable[[Any, str], str] | None = None
|
||||
self._send_async_raw_message_call: Callable[
|
||||
[Any, str], Awaitable[str]
|
||||
] | None = None
|
||||
self._send_async_raw_message_ex_call: Callable[
|
||||
[Any, str, Message], Awaitable[str]
|
||||
] | None = None
|
||||
self._encode_filter_call: Callable[
|
||||
[Any, Message, dict], None
|
||||
] | None = None
|
||||
self._decode_filter_call: Callable[
|
||||
[Any, Message, dict, Response | SysResponse], None
|
||||
] | None = None
|
||||
self._send_async_raw_message_call: (
|
||||
Callable[[Any, str], Awaitable[str]] | None
|
||||
) = None
|
||||
self._send_async_raw_message_ex_call: (
|
||||
Callable[[Any, str, Message], Awaitable[str]] | None
|
||||
) = None
|
||||
self._encode_filter_call: (
|
||||
Callable[[Any, Message, dict], None] | None
|
||||
) = None
|
||||
self._decode_filter_call: (
|
||||
Callable[[Any, Message, dict, Response | SysResponse], None] | None
|
||||
) = None
|
||||
self._peer_desc_call: Callable[[Any], str] | None = None
|
||||
|
||||
def send_method(
|
||||
|
||||
@ -317,8 +317,6 @@ _envval = os.environ.get('EFRO_TERMCOLORS')
|
||||
color_enabled: bool = (
|
||||
True
|
||||
if _envval == '1'
|
||||
else False
|
||||
if _envval == '0'
|
||||
else _default_color_enabled()
|
||||
else False if _envval == '0' else _default_color_enabled()
|
||||
)
|
||||
Clr: type[ClrBase] = ClrAlways if color_enabled else ClrNever
|
||||
|
||||
@ -236,7 +236,7 @@ class DirtyBit:
|
||||
auto_dirty_seconds: float | None = None,
|
||||
min_update_interval: float | None = None,
|
||||
):
|
||||
curtime = time.time()
|
||||
curtime = time.monotonic()
|
||||
self._retry_interval = retry_interval
|
||||
self._auto_dirty_seconds = auto_dirty_seconds
|
||||
self._min_update_interval = min_update_interval
|
||||
@ -268,11 +268,13 @@ class DirtyBit:
|
||||
# If we're freshly clean, set our next auto-dirty time (if we have
|
||||
# one).
|
||||
if self._dirty and not value and self._auto_dirty_seconds is not None:
|
||||
self._next_auto_dirty_time = time.time() + self._auto_dirty_seconds
|
||||
self._next_auto_dirty_time = (
|
||||
time.monotonic() + self._auto_dirty_seconds
|
||||
)
|
||||
|
||||
# If we're freshly dirty, schedule an immediate update.
|
||||
if not self._dirty and value:
|
||||
self._next_update_time = time.time()
|
||||
self._next_update_time = time.monotonic()
|
||||
|
||||
# If they want to enforce a minimum update interval,
|
||||
# push out the next update time if it hasn't been long enough.
|
||||
@ -295,7 +297,7 @@ class DirtyBit:
|
||||
Takes into account the amount of time passed since the target
|
||||
was marked dirty or since should_update last returned True.
|
||||
"""
|
||||
curtime = time.time()
|
||||
curtime = time.monotonic()
|
||||
|
||||
# Auto-dirty ourself if we're into that.
|
||||
if (
|
||||
@ -459,8 +461,7 @@ if TYPE_CHECKING:
|
||||
class ValueDispatcherMethod(Generic[ValT, RetT]):
|
||||
"""Used by the valuedispatchmethod decorator."""
|
||||
|
||||
def __call__(self, value: ValT) -> RetT:
|
||||
...
|
||||
def __call__(self, value: ValT) -> RetT: ...
|
||||
|
||||
def register(
|
||||
self, value: ValT
|
||||
@ -872,3 +873,11 @@ def ago_str(
|
||||
timedelta_str(now - timeval, maxparts=maxparts, decimals=decimals)
|
||||
+ ' ago'
|
||||
)
|
||||
|
||||
|
||||
def split_list(input_list: list[T], max_length: int) -> list[list[T]]:
|
||||
"""Split a single list into smaller lists."""
|
||||
return [
|
||||
input_list[i : i + max_length]
|
||||
for i in range(0, len(input_list), max_length)
|
||||
]
|
||||
|
||||
@ -105,13 +105,11 @@ def extract_flag(args: list[str], name: str) -> bool:
|
||||
@overload
|
||||
def extract_arg(
|
||||
args: list[str], name: str, required: Literal[False] = False
|
||||
) -> str | None:
|
||||
...
|
||||
) -> str | None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def extract_arg(args: list[str], name: str, required: Literal[True]) -> str:
|
||||
...
|
||||
def extract_arg(args: list[str], name: str, required: Literal[True]) -> str: ...
|
||||
|
||||
|
||||
def extract_arg(
|
||||
|
||||
@ -103,14 +103,14 @@ def format_project_cpp_files(projroot: Path, full: bool) -> None:
|
||||
dirtyfiles = cache.get_dirty_files()
|
||||
|
||||
def format_file(filename: str) -> dict[str, Any]:
|
||||
start_time = time.time()
|
||||
start_time = time.monotonic()
|
||||
|
||||
# Note: seems os.system does not unlock the gil;
|
||||
# make sure to use subprocess.
|
||||
result = subprocess.call(['clang-format', '-i', filename])
|
||||
if result != 0:
|
||||
raise RuntimeError(f'Formatting failed for {filename}')
|
||||
duration = time.time() - start_time
|
||||
duration = time.monotonic() - start_time
|
||||
print(f'Formatted {filename} in {duration:.2f} seconds.')
|
||||
sys.stdout.flush()
|
||||
return {'f': filename, 't': duration}
|
||||
@ -514,7 +514,7 @@ def _run_pylint(
|
||||
from pylint import lint
|
||||
from efro.terminal import Clr
|
||||
|
||||
start_time = time.time()
|
||||
start_time = time.monotonic()
|
||||
args = ['--rcfile', str(pylintrc), '--output-format=colorized']
|
||||
|
||||
args += dirtyfiles
|
||||
@ -540,7 +540,7 @@ def _run_pylint(
|
||||
if run.linter.msg_status != 0:
|
||||
raise CleanError('Pylint failed.')
|
||||
|
||||
duration = time.time() - start_time
|
||||
duration = time.monotonic() - start_time
|
||||
print(
|
||||
f'{Clr.GRN}Pylint passed for {name}'
|
||||
f' in {duration:.1f} seconds.{Clr.RST}'
|
||||
@ -796,12 +796,12 @@ def mypy(projroot: Path, full: bool) -> None:
|
||||
filenames = get_script_filenames(projroot)
|
||||
desc = '(full)' if full else '(incremental)'
|
||||
print(f'{Clr.BLU}Running Mypy {desc}...{Clr.RST}', flush=True)
|
||||
starttime = time.time()
|
||||
starttime = time.monotonic()
|
||||
try:
|
||||
mypy_files(projroot, filenames, full)
|
||||
except Exception as exc:
|
||||
raise CleanError('Mypy failed.') from exc
|
||||
duration = time.time() - starttime
|
||||
duration = time.monotonic() - starttime
|
||||
print(
|
||||
f'{Clr.GRN}Mypy passed in {duration:.1f} seconds.{Clr.RST}', flush=True
|
||||
)
|
||||
@ -819,7 +819,7 @@ def dmypy(projroot: Path) -> None:
|
||||
return
|
||||
|
||||
print('Running Mypy (daemon)...', flush=True)
|
||||
starttime = time.time()
|
||||
starttime = time.monotonic()
|
||||
try:
|
||||
args = [
|
||||
'dmypy',
|
||||
@ -834,7 +834,7 @@ def dmypy(projroot: Path) -> None:
|
||||
subprocess.run(args, check=True)
|
||||
except Exception as exc:
|
||||
raise CleanError('Mypy daemon: fail.') from exc
|
||||
duration = time.time() - starttime
|
||||
duration = time.monotonic() - starttime
|
||||
print(
|
||||
f'{Clr.GRN}Mypy daemon passed in {duration:.1f} seconds.{Clr.RST}',
|
||||
flush=True,
|
||||
@ -893,7 +893,7 @@ def _run_idea_inspections(
|
||||
|
||||
from efro.terminal import Clr
|
||||
|
||||
start_time = time.time()
|
||||
start_time = time.monotonic()
|
||||
print(
|
||||
f'{Clr.BLU}{displayname} checking'
|
||||
f' {len(scripts)} file(s)...{Clr.RST}',
|
||||
@ -944,7 +944,7 @@ def _run_idea_inspections(
|
||||
f'{Clr.SRED}{displayname} inspection'
|
||||
f' found {total_errors} error(s).{Clr.RST}'
|
||||
)
|
||||
duration = time.time() - start_time
|
||||
duration = time.monotonic() - start_time
|
||||
print(
|
||||
f'{Clr.GRN}{displayname} passed for {len(scripts)} files'
|
||||
f' in {duration:.1f} seconds.{Clr.RST}',
|
||||
|
||||
@ -120,9 +120,11 @@ def clientprint(
|
||||
assert _g_thread_local_storage is not None
|
||||
print(
|
||||
*args,
|
||||
file=_g_thread_local_storage.stderr
|
||||
if stderr
|
||||
else _g_thread_local_storage.stdout,
|
||||
file=(
|
||||
_g_thread_local_storage.stderr
|
||||
if stderr
|
||||
else _g_thread_local_storage.stdout
|
||||
),
|
||||
end=end,
|
||||
)
|
||||
else:
|
||||
|
||||
@ -37,8 +37,8 @@ VERSION_MIN_TVOS = '9.0'
|
||||
# why-is-lldb-generating-exc-bad-instruction-with-user-compiled-library-on-macos
|
||||
#
|
||||
# For now will try to ride out this 3.0 LTS version as long as possible.
|
||||
OPENSSL_VER_APPLE = '3.0.12'
|
||||
OPENSSL_VER_ANDROID = '3.0.12'
|
||||
OPENSSL_VER_APPLE = '3.0.13'
|
||||
OPENSSL_VER_ANDROID = '3.0.13'
|
||||
|
||||
LIBFFI_VER_APPLE = '3.4.4'
|
||||
BZIP2_VER_APPLE = '1.0.8'
|
||||
|
||||
@ -39,7 +39,6 @@ def _valid_filename(fname: str) -> bool:
|
||||
'requirements.txt',
|
||||
'pylintrc',
|
||||
'clang-format',
|
||||
'pycheckers',
|
||||
'style.yapf',
|
||||
'test_task_bin',
|
||||
'.editorconfig',
|
||||
|
||||
@ -47,7 +47,6 @@ def install_tool_config(projroot: Path, src: Path, dst: Path) -> None:
|
||||
comment = ';;'
|
||||
elif dst.name in [
|
||||
'.mypy.ini',
|
||||
'.pycheckers',
|
||||
'.pylintrc',
|
||||
'.style.yapf',
|
||||
'.clang-format',
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user