mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-01-26 08:53:32 +08:00
Merge remote-tracking branch 'origin' into docs-generation
This commit is contained in:
commit
d5fbc23da1
@ -420,8 +420,8 @@
|
||||
"assets/build/ba_data/audio/zoeOw.ogg": "https://files.ballistica.net/cache/ba1/51/eb/0a567253cc08c94c5d315a64d9af",
|
||||
"assets/build/ba_data/audio/zoePickup01.ogg": "https://files.ballistica.net/cache/ba1/bc/8f/a9c51a09c418136e386b7fdf21c7",
|
||||
"assets/build/ba_data/audio/zoeScream01.ogg": "https://files.ballistica.net/cache/ba1/02/e5/84916e123f47ccf11ddda380d699",
|
||||
"assets/build/ba_data/data/langdata.json": "https://files.ballistica.net/cache/ba1/b8/e5/db9d86f120c6f2788f68320453e4",
|
||||
"assets/build/ba_data/data/languages/arabic.json": "https://files.ballistica.net/cache/ba1/0f/e1/94378b32c786d5365a7810a15d73",
|
||||
"assets/build/ba_data/data/langdata.json": "https://files.ballistica.net/cache/ba1/a7/67/0179e7eae3d1c7a4a9df6049229d",
|
||||
"assets/build/ba_data/data/languages/arabic.json": "https://files.ballistica.net/cache/ba1/ca/75/3de74bd6e498113b99bbf9eda645",
|
||||
"assets/build/ba_data/data/languages/belarussian.json": "https://files.ballistica.net/cache/ba1/55/8c/8d0a0585e434b94865ae4befc090",
|
||||
"assets/build/ba_data/data/languages/chinese.json": "https://files.ballistica.net/cache/ba1/f6/21/951b7ff02b0ad14b1f0ac55763c4",
|
||||
"assets/build/ba_data/data/languages/chinesetraditional.json": "https://files.ballistica.net/cache/ba1/ef/c2/a607f318b815f025a20ab92f0a7b",
|
||||
@ -431,7 +431,7 @@
|
||||
"assets/build/ba_data/data/languages/dutch.json": "https://files.ballistica.net/cache/ba1/97/90/39ba65c2ad714429aec82ea1ae3e",
|
||||
"assets/build/ba_data/data/languages/english.json": "https://files.ballistica.net/cache/ba1/99/2a/bdcfa0932cf73e5cf63fd8113b1b",
|
||||
"assets/build/ba_data/data/languages/esperanto.json": "https://files.ballistica.net/cache/ba1/4c/c7/0184b8178869d1a3827a1bfcd5bb",
|
||||
"assets/build/ba_data/data/languages/filipino.json": "https://files.ballistica.net/cache/ba1/6c/81/fad9858b8904190be7686ee245f8",
|
||||
"assets/build/ba_data/data/languages/filipino.json": "https://files.ballistica.net/cache/ba1/ef/54/9441116af8c547c9bf8ca3f278cd",
|
||||
"assets/build/ba_data/data/languages/french.json": "https://files.ballistica.net/cache/ba1/b6/e0/37dd30b686f475733ccc4b3cab49",
|
||||
"assets/build/ba_data/data/languages/german.json": "https://files.ballistica.net/cache/ba1/20/3f/198dcc5cfed5789042e1595bd048",
|
||||
"assets/build/ba_data/data/languages/gibberish.json": "https://files.ballistica.net/cache/ba1/03/6a/4db89c5bf1ced8eb5a5615a4ae64",
|
||||
@ -446,14 +446,14 @@
|
||||
"assets/build/ba_data/data/languages/portuguese.json": "https://files.ballistica.net/cache/ba1/26/41/f1246ab56c6b7853f605c3a95889",
|
||||
"assets/build/ba_data/data/languages/romanian.json": "https://files.ballistica.net/cache/ba1/82/12/57bf144e12be229a9b70da9c45cb",
|
||||
"assets/build/ba_data/data/languages/russian.json": "https://files.ballistica.net/cache/ba1/b2/46/89ae228342f20ca4937ee254197b",
|
||||
"assets/build/ba_data/data/languages/serbian.json": "https://files.ballistica.net/cache/ba1/e6/59/af13a5d296da5935699bec902ed7",
|
||||
"assets/build/ba_data/data/languages/serbian.json": "https://files.ballistica.net/cache/ba1/a5/48/47d5eb30535158610cdace1edfcd",
|
||||
"assets/build/ba_data/data/languages/slovak.json": "https://files.ballistica.net/cache/ba1/9f/a6/a2c9d7f3f90a2320aa45ccfd65cd",
|
||||
"assets/build/ba_data/data/languages/spanish.json": "https://files.ballistica.net/cache/ba1/87/5d/d36a8a2e9cb0f02731a3fd7af000",
|
||||
"assets/build/ba_data/data/languages/swedish.json": "https://files.ballistica.net/cache/ba1/50/9f/be006ba19be6a69a57837eb6dca0",
|
||||
"assets/build/ba_data/data/languages/tamil.json": "https://files.ballistica.net/cache/ba1/cb/11/e11957be752c3dc552898b60ab20",
|
||||
"assets/build/ba_data/data/languages/thai.json": "https://files.ballistica.net/cache/ba1/74/3d/c3d40a1e5ee1edf82555da05eda9",
|
||||
"assets/build/ba_data/data/languages/turkish.json": "https://files.ballistica.net/cache/ba1/0a/4f/90fcd63bd12a7648b2a1e9b01586",
|
||||
"assets/build/ba_data/data/languages/ukrainian.json": "https://files.ballistica.net/cache/ba1/87/20/259904441097b886b841d7c4d09a",
|
||||
"assets/build/ba_data/data/languages/ukrainian.json": "https://files.ballistica.net/cache/ba1/7f/bb/6239adeb551be5e09f3457d7b411",
|
||||
"assets/build/ba_data/data/languages/venetian.json": "https://files.ballistica.net/cache/ba1/e2/e1/b815d9f2e9b2c3a4daddaf728225",
|
||||
"assets/build/ba_data/data/languages/vietnamese.json": "https://files.ballistica.net/cache/ba1/0b/24/3cc2b5a6ebe4bca1e01b40f8ed09",
|
||||
"assets/build/ba_data/data/maps/big_g.json": "https://files.ballistica.net/cache/ba1/47/0a/a617cc85d927b576c4e6fc1091ed",
|
||||
@ -3971,50 +3971,50 @@
|
||||
"assets/src/ba_data/python/ba/_generated/__init__.py": "https://files.ballistica.net/cache/ba1/ee/e8/cad05aa531c7faf7ff7b96db7f6e",
|
||||
"assets/src/ba_data/python/ba/_generated/enums.py": "https://files.ballistica.net/cache/ba1/b2/e5/0ee0561e16257a32830645239f34",
|
||||
"ballisticacore-windows/Generic/BallisticaCore.ico": "https://files.ballistica.net/cache/ba1/89/c0/e32c7d2a35dc9aef57cc73b0911a",
|
||||
"build/prefab/full/linux_arm64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/ee/26/7d95e9e9690eaa1b865463014f98",
|
||||
"build/prefab/full/linux_arm64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/3f/35/9b198b207859e7aea60def53a71d",
|
||||
"build/prefab/full/linux_arm64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/74/1d/fc9e33e565475daaac80da5252f0",
|
||||
"build/prefab/full/linux_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/7c/a0/c05d501b5285b420df878daee007",
|
||||
"build/prefab/full/linux_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/80/92/11a44a4395f70cf709ac82283cf0",
|
||||
"build/prefab/full/linux_x86_64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/09/aa/968d1f5d5f8e263cb798948ecf12",
|
||||
"build/prefab/full/linux_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/e1/07/87ebd20d55fce1c5f71185d91887",
|
||||
"build/prefab/full/linux_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/9a/ef/661bef4f0f72ee27160fd85ffbc9",
|
||||
"build/prefab/full/linux_x86_64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/cf/19/130f96a18e2f027b6d51d1133fea",
|
||||
"build/prefab/full/linux_x86_64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/9c/7b/ac1a200be0f37078af0991faca3b",
|
||||
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/5a/cd/ef7c51e344560a70d4ca092b656f",
|
||||
"build/prefab/full/linux_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/d9/1f/c527e0722286b8c9dd8cf8bbb1e8",
|
||||
"build/prefab/full/mac_arm64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/1a/e1/a1a8034e2e1fade16ace9c907ff5",
|
||||
"build/prefab/full/mac_arm64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/19/07/095195bc15b265b89492f8641b28",
|
||||
"build/prefab/full/mac_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/fa/a8/8f0f464da6fbaf48cd32a52e57a7",
|
||||
"build/prefab/full/mac_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/86/e6/54166b647cb27688365f18188864",
|
||||
"build/prefab/full/mac_x86_64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/fc/0b/02397618cdb66f16f20ed42b56f0",
|
||||
"build/prefab/full/mac_x86_64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/cb/bf/86fd8654910cac65cf3417fe44c4",
|
||||
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/9a/49/c975de8bfc3393550b36d3aceee2",
|
||||
"build/prefab/full/mac_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/2f/13/71e11f74c7d9f4b7aae29fd698fa",
|
||||
"build/prefab/full/windows_x86_gui/debug/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/02/89/46e6a952d95f6d2777f59761a0a7",
|
||||
"build/prefab/full/windows_x86_gui/release/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/71/8f/05b55549929a9f9bbeb87044affe",
|
||||
"build/prefab/full/windows_x86_server/debug/dist/BallisticaCoreHeadless.exe": "https://files.ballistica.net/cache/ba1/06/28/f87dd138acb909d269630fbc822d",
|
||||
"build/prefab/full/windows_x86_server/release/dist/BallisticaCoreHeadless.exe": "https://files.ballistica.net/cache/ba1/43/13/780f59f3e669d350a5454052b8ec",
|
||||
"build/prefab/lib/linux_arm64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/36/4c/60982c040f3d99115533defa8424",
|
||||
"build/prefab/lib/linux_arm64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/fa/ce/979941714e5818f53e7b432999e5",
|
||||
"build/prefab/lib/linux_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/e8/7a/53b3337ca506f115d3abb2ed2178",
|
||||
"build/prefab/lib/linux_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/14/fc/47d215d72a1f92884b4bb933d174",
|
||||
"build/prefab/lib/linux_x86_64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/b9/56/8d0c5c7c3e88053b91ca3347d81c",
|
||||
"build/prefab/lib/linux_x86_64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/cb/17/666666b770534df7bf22bd8b339a",
|
||||
"build/prefab/lib/linux_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/16/f2/f3ddad586518c3cf691f549c935a",
|
||||
"build/prefab/lib/linux_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/ec/44/0371116fbbdec59df047cd704739",
|
||||
"build/prefab/lib/mac_arm64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/da/99/a26510b1c727ba3c21059b6e527c",
|
||||
"build/prefab/lib/mac_arm64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/0d/aa/d66c7ed8e090061c4223ffda8691",
|
||||
"build/prefab/lib/mac_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/95/04/32ec2d08c22a60c1094df98f7cde",
|
||||
"build/prefab/lib/mac_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/6e/ad/651d3b89a8b3b1adbadffa242453",
|
||||
"build/prefab/lib/mac_x86_64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/62/2c/9005fac95dba13211491a4a174b1",
|
||||
"build/prefab/lib/mac_x86_64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/57/bc/4baf0c1c3ae705b13351d77c2321",
|
||||
"build/prefab/lib/mac_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/a6/3c/13037c954372776021d4e02cd976",
|
||||
"build/prefab/lib/mac_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/03/18/548ae6eb8fa4c8e022c549e95408",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreGenericInternal.lib": "https://files.ballistica.net/cache/ba1/c2/d1/075f751f5d75b98934fcacf49e30",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreGenericInternal.pdb": "https://files.ballistica.net/cache/ba1/23/3a/0f2194973ba501270391c8914832",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreHeadlessInternal.lib": "https://files.ballistica.net/cache/ba1/41/23/56af29e4b57c39805c3d1be25d42",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreHeadlessInternal.pdb": "https://files.ballistica.net/cache/ba1/94/b8/6163d06d7374cde403b51b1a8cdc",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaCoreGenericInternal.lib": "https://files.ballistica.net/cache/ba1/56/8c/2d149890a539907d8fbfb682038d",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaCoreGenericInternal.pdb": "https://files.ballistica.net/cache/ba1/a1/e5/f47f6a080bd269d919bbda6eadb8",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaCoreHeadlessInternal.lib": "https://files.ballistica.net/cache/ba1/18/b2/9c4875a5a057670e2348dc59103b",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaCoreHeadlessInternal.pdb": "https://files.ballistica.net/cache/ba1/cc/08/576b8a703cbacbb58fd5283479f3",
|
||||
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/50/3a/54aaa64b509aba3e1ddfcf7dc94f",
|
||||
"build/prefab/full/linux_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/b4/be/e13ed5d279bfc0d3d986be35332a",
|
||||
"build/prefab/full/mac_arm64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/10/36/ed8ebda9b7c5109357713021b6d1",
|
||||
"build/prefab/full/mac_arm64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/23/fa/41a6f0af82cf7622d4f93bf8b242",
|
||||
"build/prefab/full/mac_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/8e/ee/496554c8717e9f9532fadcc9289e",
|
||||
"build/prefab/full/mac_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/7c/8c/c92c1ba68d275b20f702cfd6ad58",
|
||||
"build/prefab/full/mac_x86_64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/a2/04/5f38800e416b684931ffeb673ad0",
|
||||
"build/prefab/full/mac_x86_64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/e5/dc/b31c92119ae5b570293e8e065ac9",
|
||||
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/56/ea/5dcce18693a4c8133daef2d88895",
|
||||
"build/prefab/full/mac_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/7f/a0/ca0b6404e38a91b18f6a022b4a00",
|
||||
"build/prefab/full/windows_x86_gui/debug/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/0f/dd/d0f6fa8df5906444cd5b303c3953",
|
||||
"build/prefab/full/windows_x86_gui/release/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/f5/cf/e7464eb6cc321d57f0ab274d14ee",
|
||||
"build/prefab/full/windows_x86_server/debug/dist/BallisticaCoreHeadless.exe": "https://files.ballistica.net/cache/ba1/e1/ec/9e3f6447dd7268e67b56c8578741",
|
||||
"build/prefab/full/windows_x86_server/release/dist/BallisticaCoreHeadless.exe": "https://files.ballistica.net/cache/ba1/b1/db/660910d2c407711becc95184bd5b",
|
||||
"build/prefab/lib/linux_arm64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/db/b3/f606248668c48ec4c3088ba5b8b5",
|
||||
"build/prefab/lib/linux_arm64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/77/20/f9e6b32d95d24e87ae5b73b016cf",
|
||||
"build/prefab/lib/linux_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/18/d2/bfc47ac110285a0c2f48babcff03",
|
||||
"build/prefab/lib/linux_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/1b/25/46721d6d1a6286df32b5c999a7d1",
|
||||
"build/prefab/lib/linux_x86_64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/b8/e4/ddbf15a2ec9fa0a3edff2d0e897a",
|
||||
"build/prefab/lib/linux_x86_64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/69/a3/b56d339f4dc95b5333c97e423d8c",
|
||||
"build/prefab/lib/linux_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/bf/53/63ceb27c4f2eea9fc4e8455f8f30",
|
||||
"build/prefab/lib/linux_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/a0/fc/0e6981872542df974cc69e10949d",
|
||||
"build/prefab/lib/mac_arm64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/e6/19/7a72afb6912b37b395330cba9eab",
|
||||
"build/prefab/lib/mac_arm64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/db/75/d3de1742c3404572b4e85820b7d2",
|
||||
"build/prefab/lib/mac_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/d0/5d/8449c26ddb6a8558eba2c5d7ce6d",
|
||||
"build/prefab/lib/mac_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/86/98/4d4a0d9a52e34e07061e31d038fd",
|
||||
"build/prefab/lib/mac_x86_64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/9a/37/e528806cecf86c20acac74127499",
|
||||
"build/prefab/lib/mac_x86_64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/29/16/1d9549c835276428a038f1692e22",
|
||||
"build/prefab/lib/mac_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/86/b7/e47605a45acbad6f6f5a44b3266e",
|
||||
"build/prefab/lib/mac_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/09/56/af875f3eaacd2056b8768c6fa53c",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreGenericInternal.lib": "https://files.ballistica.net/cache/ba1/67/a2/829f2962de6305612c07124a9297",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreGenericInternal.pdb": "https://files.ballistica.net/cache/ba1/3d/d5/2bc1f16d9ef459ada00afc343a81",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreHeadlessInternal.lib": "https://files.ballistica.net/cache/ba1/d8/84/7c17af707c4700ab75026311d21a",
|
||||
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreHeadlessInternal.pdb": "https://files.ballistica.net/cache/ba1/fe/06/6921fe2c065426a82f912732b1ad",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaCoreGenericInternal.lib": "https://files.ballistica.net/cache/ba1/6d/ef/4192a89cace9e99ce87d2d118e01",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaCoreGenericInternal.pdb": "https://files.ballistica.net/cache/ba1/b2/a4/d9de56f34bc35d044d673986e678",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaCoreHeadlessInternal.lib": "https://files.ballistica.net/cache/ba1/6b/07/26c48232c8aeaf9ca6bdd0d54e60",
|
||||
"build/prefab/lib/windows/Release_Win32/BallisticaCoreHeadlessInternal.pdb": "https://files.ballistica.net/cache/ba1/49/3a/3fddd087687893bb595ea2155e91",
|
||||
"src/ballistica/generated/python_embedded/binding.inc": "https://files.ballistica.net/cache/ba1/b3/15/7c6d580b3482870b5b058858624c",
|
||||
"src/ballistica/generated/python_embedded/bootstrap.inc": "https://files.ballistica.net/cache/ba1/9d/7e/0a5125aa421e722c59d22b8beb19"
|
||||
}
|
||||
13
.github/ISSUE_TEMPLATE/bug_report.md
vendored
13
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -7,26 +7,27 @@ assignees: ''
|
||||
|
||||
---
|
||||
|
||||
## Description
|
||||
### Description
|
||||
Describe the bug. Do not forget to fill the title.
|
||||
Make sure you're running game without any modifications (unless you want to report an api bug).
|
||||
|
||||
## Steps to reproduce
|
||||
### Steps to reproduce
|
||||
1. Launch BombSquad
|
||||
2. Go to '...'
|
||||
3. Press '...'
|
||||
4. Bug!
|
||||
|
||||
## Expected behavior
|
||||
### Expected behavior
|
||||
Describe what you think should happen.
|
||||
|
||||
## Machine
|
||||
### Machine
|
||||
**Platform**: Windows 10 / Ubuntu 20.04 LTS / AOSP 8.1 / etc.
|
||||
**BombSquad version**: [1.5.27](https://github.com/efroemling/ballistica/releases/tag/v1.5.27)
|
||||
**Commit**: [2642488](https://github.com/efroemling/ballistica/commit/2642488a51b250752169738f5aeeccaafa2bc8de)
|
||||
Select what do you want to use: release version or commit. Please use a hyperlink.
|
||||
|
||||
## Screenshots
|
||||
### Screenshots
|
||||
Put some screenshots here if needed.
|
||||
|
||||
## Extra
|
||||
### Extra
|
||||
Put some extra information here. For example, describe your assumptions about the cause of the bug.
|
||||
|
||||
27
.idea/dictionaries/ericf.xml
generated
27
.idea/dictionaries/ericf.xml
generated
@ -54,6 +54,8 @@
|
||||
<w>adisp</w>
|
||||
<w>advertizing</w>
|
||||
<w>aidl</w>
|
||||
<w>aioloop</w>
|
||||
<w>aiomain</w>
|
||||
<w>alarmsound</w>
|
||||
<w>alibaba</w>
|
||||
<w>allpaths</w>
|
||||
@ -228,6 +230,7 @@
|
||||
<w>bisectmodule</w>
|
||||
<w>bitcode</w>
|
||||
<w>blarg</w>
|
||||
<w>blargh</w>
|
||||
<w>blas</w>
|
||||
<w>blastos</w>
|
||||
<w>bldtp</w>
|
||||
@ -301,6 +304,7 @@
|
||||
<w>cachefile</w>
|
||||
<w>cachemap</w>
|
||||
<w>cachepath</w>
|
||||
<w>cadata</w>
|
||||
<w>cafile</w>
|
||||
<w>calced</w>
|
||||
<w>calcing</w>
|
||||
@ -647,6 +651,7 @@
|
||||
<w>duckdns</w>
|
||||
<w>dummymodule</w>
|
||||
<w>dummyname</w>
|
||||
<w>dummyret</w>
|
||||
<w>dummytoken</w>
|
||||
<w>dummyval</w>
|
||||
<w>dups</w>
|
||||
@ -835,6 +840,7 @@
|
||||
<w>flines</w>
|
||||
<w>floof</w>
|
||||
<w>floofcls</w>
|
||||
<w>floop</w>
|
||||
<w>flycheck</w>
|
||||
<w>fmod</w>
|
||||
<w>fname</w>
|
||||
@ -1076,6 +1082,7 @@
|
||||
<w>howtoplay</w>
|
||||
<w>hpos</w>
|
||||
<w>hscrollwidget</w>
|
||||
<w>hsdata</w>
|
||||
<w>hspacing</w>
|
||||
<w>httprequest</w>
|
||||
<w>hurtiness</w>
|
||||
@ -1147,6 +1154,7 @@
|
||||
<w>installdir</w>
|
||||
<w>instancer</w>
|
||||
<w>interfacetype</w>
|
||||
<w>internalmodule</w>
|
||||
<w>internalsrc</w>
|
||||
<w>interstitials</w>
|
||||
<w>intex</w>
|
||||
@ -1223,7 +1231,9 @@
|
||||
<w>killpath</w>
|
||||
<w>killpaths</w>
|
||||
<w>kingofthehill</w>
|
||||
<w>kinterval</w>
|
||||
<w>kronk</w>
|
||||
<w>ktimeout</w>
|
||||
<w>kwargannotation</w>
|
||||
<w>kwds</w>
|
||||
<w>kwonlyargs</w>
|
||||
@ -1415,6 +1425,7 @@
|
||||
<w>megalint</w>
|
||||
<w>memfunctions</w>
|
||||
<w>menubar</w>
|
||||
<w>messagetype</w>
|
||||
<w>metamakefile</w>
|
||||
<w>metaprogramming</w>
|
||||
<w>metascan</w>
|
||||
@ -1442,6 +1453,7 @@
|
||||
<w>mipmap</w>
|
||||
<w>mipmaps</w>
|
||||
<w>mkflags</w>
|
||||
<w>mlen</w>
|
||||
<w>mline</w>
|
||||
<w>mmapmodule</w>
|
||||
<w>mmult</w>
|
||||
@ -1468,6 +1480,12 @@
|
||||
<w>msbuild</w>
|
||||
<w>msgdict</w>
|
||||
<w>msgfull</w>
|
||||
<w>msgid</w>
|
||||
<w>msglen</w>
|
||||
<w>msgnum</w>
|
||||
<w>msgpriv</w>
|
||||
<w>msgpub</w>
|
||||
<w>msgtest</w>
|
||||
<w>msgtype</w>
|
||||
<w>msgtypes</w>
|
||||
<w>msgtypevar</w>
|
||||
@ -1700,6 +1718,7 @@
|
||||
<w>pdataclass</w>
|
||||
<w>pedit</w>
|
||||
<w>peditui</w>
|
||||
<w>peername</w>
|
||||
<w>pentry</w>
|
||||
<w>perma</w>
|
||||
<w>perrdetail</w>
|
||||
@ -1914,6 +1933,7 @@
|
||||
<w>pypaths</w>
|
||||
<w>pysources</w>
|
||||
<w>pytest</w>
|
||||
<w>pythondevmode</w>
|
||||
<w>pythondirs</w>
|
||||
<w>pythondontwritebytecode</w>
|
||||
<w>pythonenumsmodule</w>
|
||||
@ -1946,6 +1966,7 @@
|
||||
<w>rcvs</w>
|
||||
<w>rdict</w>
|
||||
<w>rdir</w>
|
||||
<w>readexactly</w>
|
||||
<w>readline</w>
|
||||
<w>readlines</w>
|
||||
<w>realpath</w>
|
||||
@ -2008,6 +2029,9 @@
|
||||
<w>rscale</w>
|
||||
<w>rsdr</w>
|
||||
<w>rsms</w>
|
||||
<w>rspdata</w>
|
||||
<w>rsplen</w>
|
||||
<w>rspobj</w>
|
||||
<w>rstr</w>
|
||||
<w>rtest</w>
|
||||
<w>rtnetlink</w>
|
||||
@ -2217,6 +2241,7 @@
|
||||
<w>srctxt</w>
|
||||
<w>sred</w>
|
||||
<w>sshd</w>
|
||||
<w>sslcontext</w>
|
||||
<w>sslproto</w>
|
||||
<w>ssval</w>
|
||||
<w>stackstr</w>
|
||||
@ -2327,6 +2352,7 @@
|
||||
<w>targetpath</w>
|
||||
<w>targetpractice</w>
|
||||
<w>targs</w>
|
||||
<w>tasklabel</w>
|
||||
<w>tbtcolor</w>
|
||||
<w>tbtn</w>
|
||||
<w>tbttxt</w>
|
||||
@ -2399,6 +2425,7 @@
|
||||
<w>themself</w>
|
||||
<w>thingie</w>
|
||||
<w>this'll</w>
|
||||
<w>thismodule</w>
|
||||
<w>threadtype</w>
|
||||
<w>throwiness</w>
|
||||
<w>ticon</w>
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
### 1.6.10 (20503, 2022-03-07)
|
||||
### 1.6.10 (20510, 2022-03-18)
|
||||
- Added `_ba.get_client_public_device_uuid` function which returns a semi-permanent device id for a connected client running 1.6.10 or newer. Can be useful to combat spam attacks or other mischief.
|
||||
- Fixed an issue with `make update` not properly rewriting Visual Studio project files to account for new/deleted source files.
|
||||
- Removed various bits of code associated with the (no-longer-functional) Google Play Games multiplayer connections.
|
||||
- Added lots of foundation code for v2 master-server connections (not yet enabled).
|
||||
|
||||
### 1.6.9 (20486, 2022-02-22)
|
||||
- Upgraded Android Python to 3.9.10
|
||||
|
||||
5
Makefile
5
Makefile
@ -705,6 +705,11 @@ test-message:
|
||||
@tools/pcommand pytest -o log_cli=true -o log_cli_level=debug -s -vv \
|
||||
tests/test_efro/test_message.py
|
||||
|
||||
# Individual test with extra output enabled.
|
||||
test-rpc:
|
||||
@tools/pcommand pytest -o log_cli=true -o log_cli_level=debug -s -vv \
|
||||
tests/test_efro/test_rpc.py
|
||||
|
||||
# Tell make which of these targets don't represent files.
|
||||
.PHONY: test test-full test-assetmanager
|
||||
|
||||
|
||||
@ -497,18 +497,20 @@
|
||||
"ba_data/python/efro/__pycache__/__init__.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/__pycache__/call.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/__pycache__/error.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/__pycache__/message.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/__pycache__/rpc.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/__pycache__/terminal.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/__pycache__/util.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/call.py",
|
||||
"ba_data/python/efro/dataclassio/__init__.py",
|
||||
"ba_data/python/efro/dataclassio/__pycache__/__init__.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/dataclassio/__pycache__/_api.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/dataclassio/__pycache__/_base.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/dataclassio/__pycache__/_inputter.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/dataclassio/__pycache__/_outputter.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/dataclassio/__pycache__/_pathcapture.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/dataclassio/__pycache__/_prep.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/dataclassio/__pycache__/extras.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/dataclassio/_api.py",
|
||||
"ba_data/python/efro/dataclassio/_base.py",
|
||||
"ba_data/python/efro/dataclassio/_inputter.py",
|
||||
"ba_data/python/efro/dataclassio/_outputter.py",
|
||||
@ -516,7 +518,19 @@
|
||||
"ba_data/python/efro/dataclassio/_prep.py",
|
||||
"ba_data/python/efro/dataclassio/extras.py",
|
||||
"ba_data/python/efro/error.py",
|
||||
"ba_data/python/efro/message.py",
|
||||
"ba_data/python/efro/message/__init__.py",
|
||||
"ba_data/python/efro/message/__pycache__/__init__.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/message/__pycache__/_message.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/message/__pycache__/_module.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/message/__pycache__/_protocol.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/message/__pycache__/_receiver.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/message/__pycache__/_sender.cpython-39.opt-1.pyc",
|
||||
"ba_data/python/efro/message/_message.py",
|
||||
"ba_data/python/efro/message/_module.py",
|
||||
"ba_data/python/efro/message/_protocol.py",
|
||||
"ba_data/python/efro/message/_receiver.py",
|
||||
"ba_data/python/efro/message/_sender.py",
|
||||
"ba_data/python/efro/rpc.py",
|
||||
"ba_data/python/efro/terminal.py",
|
||||
"ba_data/python/efro/util.py",
|
||||
"server/__pycache__/ballisticacore_server.cpython-39.opt-1.pyc",
|
||||
|
||||
@ -644,6 +644,7 @@ SCRIPT_TARGETS_PY_PUBLIC_TOOLS = \
|
||||
build/ba_data/python/efro/__init__.py \
|
||||
build/ba_data/python/efro/call.py \
|
||||
build/ba_data/python/efro/dataclassio/__init__.py \
|
||||
build/ba_data/python/efro/dataclassio/_api.py \
|
||||
build/ba_data/python/efro/dataclassio/_base.py \
|
||||
build/ba_data/python/efro/dataclassio/_inputter.py \
|
||||
build/ba_data/python/efro/dataclassio/_outputter.py \
|
||||
@ -651,7 +652,13 @@ SCRIPT_TARGETS_PY_PUBLIC_TOOLS = \
|
||||
build/ba_data/python/efro/dataclassio/_prep.py \
|
||||
build/ba_data/python/efro/dataclassio/extras.py \
|
||||
build/ba_data/python/efro/error.py \
|
||||
build/ba_data/python/efro/message.py \
|
||||
build/ba_data/python/efro/message/__init__.py \
|
||||
build/ba_data/python/efro/message/_message.py \
|
||||
build/ba_data/python/efro/message/_module.py \
|
||||
build/ba_data/python/efro/message/_protocol.py \
|
||||
build/ba_data/python/efro/message/_receiver.py \
|
||||
build/ba_data/python/efro/message/_sender.py \
|
||||
build/ba_data/python/efro/rpc.py \
|
||||
build/ba_data/python/efro/terminal.py \
|
||||
build/ba_data/python/efro/util.py
|
||||
|
||||
@ -664,6 +671,7 @@ SCRIPT_TARGETS_PYC_PUBLIC_TOOLS = \
|
||||
build/ba_data/python/efro/__pycache__/__init__.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/__pycache__/call.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/dataclassio/__pycache__/__init__.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/dataclassio/__pycache__/_api.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/dataclassio/__pycache__/_base.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/dataclassio/__pycache__/_inputter.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/dataclassio/__pycache__/_outputter.cpython-39.opt-1.pyc \
|
||||
@ -671,7 +679,13 @@ SCRIPT_TARGETS_PYC_PUBLIC_TOOLS = \
|
||||
build/ba_data/python/efro/dataclassio/__pycache__/_prep.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/dataclassio/__pycache__/extras.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/__pycache__/error.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/__pycache__/message.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/message/__pycache__/__init__.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/message/__pycache__/_message.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/message/__pycache__/_module.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/message/__pycache__/_protocol.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/message/__pycache__/_receiver.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/message/__pycache__/_sender.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/__pycache__/rpc.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/__pycache__/terminal.cpython-39.opt-1.pyc \
|
||||
build/ba_data/python/efro/__pycache__/util.cpython-39.opt-1.pyc
|
||||
|
||||
|
||||
@ -1 +1 @@
|
||||
61174160426480901885067629344524001940
|
||||
263249044076897294312459199917994463006
|
||||
@ -84,12 +84,24 @@ app: App
|
||||
__all__: list[str] = []
|
||||
|
||||
|
||||
# Change everything's listed module to simply 'ba' (instead of 'ba.foo.bar').
|
||||
# Have these things present themselves cleanly as 'ba.Foo'
|
||||
# instead of 'ba._submodule.Foo'
|
||||
def _simplify_module_names() -> None:
|
||||
import os
|
||||
|
||||
for attr, _obj in globals().items():
|
||||
if not attr.startswith('_'):
|
||||
__all__.append(attr)
|
||||
|
||||
# Though pdoc gets confused when we override __module__,
|
||||
# so let's make an exception for it.
|
||||
if os.environ.get('BA_DOCS_GENERATION', '0') != '1':
|
||||
from efro.util import set_canonical_module
|
||||
globs = globals()
|
||||
set_canonical_module(
|
||||
module_globals=globs,
|
||||
names=[n for n in globs.keys() if not n.startswith('_')])
|
||||
|
||||
|
||||
_simplify_module_names()
|
||||
del _simplify_module_names
|
||||
|
||||
27
ballisticacore-cmake/.idea/dictionaries/ericf.xml
generated
27
ballisticacore-cmake/.idea/dictionaries/ericf.xml
generated
@ -31,6 +31,8 @@
|
||||
<w>afty</w>
|
||||
<w>aftz</w>
|
||||
<w>aint</w>
|
||||
<w>aioloop</w>
|
||||
<w>aiomain</w>
|
||||
<w>airborn</w>
|
||||
<w>alext</w>
|
||||
<w>alibaba</w>
|
||||
@ -112,6 +114,7 @@
|
||||
<w>bitpos</w>
|
||||
<w>bitval</w>
|
||||
<w>blarg</w>
|
||||
<w>blargh</w>
|
||||
<w>blitters</w>
|
||||
<w>blitting</w>
|
||||
<w>blockadr</w>
|
||||
@ -155,6 +158,7 @@
|
||||
<w>buttonwidget</w>
|
||||
<w>bwst</w>
|
||||
<w>cacert</w>
|
||||
<w>cadata</w>
|
||||
<w>cafile</w>
|
||||
<w>calced</w>
|
||||
<w>calcing</w>
|
||||
@ -314,6 +318,7 @@
|
||||
<w>dstpath</w>
|
||||
<w>dstr</w>
|
||||
<w>dtest</w>
|
||||
<w>dummyret</w>
|
||||
<w>dummyval</w>
|
||||
<w>dummyvalid</w>
|
||||
<w>dval</w>
|
||||
@ -404,6 +409,7 @@
|
||||
<w>fjco</w>
|
||||
<w>fjcoiwef</w>
|
||||
<w>flipbit</w>
|
||||
<w>floop</w>
|
||||
<w>flopsy</w>
|
||||
<w>fname</w>
|
||||
<w>fnode</w>
|
||||
@ -522,6 +528,7 @@
|
||||
<w>hotkeys</w>
|
||||
<w>hotplug</w>
|
||||
<w>hscrollwidget</w>
|
||||
<w>hsdata</w>
|
||||
<w>htonf</w>
|
||||
<w>htonl</w>
|
||||
<w>htons</w>
|
||||
@ -551,6 +558,7 @@
|
||||
<w>insta</w>
|
||||
<w>intercollide</w>
|
||||
<w>internalformat</w>
|
||||
<w>internalmodule</w>
|
||||
<w>interuptions</w>
|
||||
<w>intstr</w>
|
||||
<w>invote</w>
|
||||
@ -600,8 +608,10 @@
|
||||
<w>kickee</w>
|
||||
<w>killable</w>
|
||||
<w>killcount</w>
|
||||
<w>kinterval</w>
|
||||
<w>kmod</w>
|
||||
<w>kronk</w>
|
||||
<w>ktimeout</w>
|
||||
<w>kwds</w>
|
||||
<w>kxyz</w>
|
||||
<w>langutils</w>
|
||||
@ -682,6 +692,7 @@
|
||||
<w>memcpy</w>
|
||||
<w>meshdata</w>
|
||||
<w>messagebox</w>
|
||||
<w>messagetype</w>
|
||||
<w>metamakefile</w>
|
||||
<w>meth</w>
|
||||
<w>mhbegin</w>
|
||||
@ -693,6 +704,7 @@
|
||||
<w>mipmapcount</w>
|
||||
<w>mipmaps</w>
|
||||
<w>mkflags</w>
|
||||
<w>mlen</w>
|
||||
<w>mmask</w>
|
||||
<w>mmdevapi</w>
|
||||
<w>modder</w>
|
||||
@ -705,6 +717,12 @@
|
||||
<w>msaa</w>
|
||||
<w>msgdict</w>
|
||||
<w>msgfull</w>
|
||||
<w>msgid</w>
|
||||
<w>msglen</w>
|
||||
<w>msgnum</w>
|
||||
<w>msgpriv</w>
|
||||
<w>msgpub</w>
|
||||
<w>msgtest</w>
|
||||
<w>msgtype</w>
|
||||
<w>msgtypes</w>
|
||||
<w>msgtypevar</w>
|
||||
@ -838,6 +856,7 @@
|
||||
<w>pcommands</w>
|
||||
<w>pdataclass</w>
|
||||
<w>pdst</w>
|
||||
<w>peername</w>
|
||||
<w>persp</w>
|
||||
<w>pflag</w>
|
||||
<w>pflags</w>
|
||||
@ -915,6 +934,7 @@
|
||||
<w>pylib</w>
|
||||
<w>pyobj</w>
|
||||
<w>pyobjs</w>
|
||||
<w>pythondevmode</w>
|
||||
<w>pythonenumsmodule</w>
|
||||
<w>pytype</w>
|
||||
<w>qerr</w>
|
||||
@ -937,6 +957,7 @@
|
||||
<w>rcva</w>
|
||||
<w>rcvs</w>
|
||||
<w>reaaaly</w>
|
||||
<w>readexactly</w>
|
||||
<w>readset</w>
|
||||
<w>realloc</w>
|
||||
<w>reallocations</w>
|
||||
@ -979,6 +1000,9 @@
|
||||
<w>rresult</w>
|
||||
<w>rscode</w>
|
||||
<w>rsgc</w>
|
||||
<w>rspdata</w>
|
||||
<w>rsplen</w>
|
||||
<w>rspobj</w>
|
||||
<w>rtest</w>
|
||||
<w>rtypes</w>
|
||||
<w>rtypevar</w>
|
||||
@ -1069,6 +1093,7 @@
|
||||
<w>sresult</w>
|
||||
<w>sscanf</w>
|
||||
<w>ssize</w>
|
||||
<w>sslcontext</w>
|
||||
<w>sssi</w>
|
||||
<w>sssisisis</w>
|
||||
<w>sssissss</w>
|
||||
@ -1124,6 +1149,7 @@
|
||||
<w>tabtypes</w>
|
||||
<w>talloc</w>
|
||||
<w>targs</w>
|
||||
<w>tasklabel</w>
|
||||
<w>tcls</w>
|
||||
<w>tegra</w>
|
||||
<w>telefonaktiebolaget</w>
|
||||
@ -1141,6 +1167,7 @@
|
||||
<w>thang</w>
|
||||
<w>thecommand</w>
|
||||
<w>theres</w>
|
||||
<w>thismodule</w>
|
||||
<w>threadname</w>
|
||||
<w>threadtype</w>
|
||||
<w>ticon</w>
|
||||
|
||||
@ -39,6 +39,7 @@
|
||||
],
|
||||
"python_paths": [
|
||||
"assets/src/ba_data/python",
|
||||
"src/meta",
|
||||
"tools"
|
||||
],
|
||||
"python_source_dirs": [
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->
|
||||
<h4><em>last updated for Ballistica version 1.6.10 build 20501</em></h4>
|
||||
<h4><em>last updated for Ballistica version 1.6.10 build 20510</em></h4>
|
||||
<p>This page documents the Python classes and functions in the 'ba' module,
|
||||
which are the ones most relevant to modding in Ballistica. If you come across something you feel should be included here or could be better explained, please <a href="mailto:support@froemling.net">let me know</a>. Happy modding!</p>
|
||||
<hr>
|
||||
|
||||
@ -21,7 +21,7 @@
|
||||
namespace ballistica {
|
||||
|
||||
// These are set automatically via script; don't modify them here.
|
||||
const int kAppBuildNumber = 20503;
|
||||
const int kAppBuildNumber = 20510;
|
||||
const char* kAppVersion = "1.6.10";
|
||||
|
||||
// Our standalone globals.
|
||||
|
||||
@ -110,10 +110,6 @@ namespace ballistica {
|
||||
#define BA_ENABLE_SDL_JOYSTICKS 0
|
||||
#endif
|
||||
|
||||
#ifndef BA_USE_ICLOUD
|
||||
#define BA_USE_ICLOUD 0
|
||||
#endif
|
||||
|
||||
#ifndef BA_USE_STORE_KIT
|
||||
#define BA_USE_STORE_KIT 0
|
||||
#endif
|
||||
@ -242,7 +238,6 @@ class BuildConfig {
|
||||
bool arcade_build() const { return EXPBOOL_(BA_ARCADE_BUILD); }
|
||||
bool iircade_build() const { return EXPBOOL_(BA_IIRCADE_BUILD); }
|
||||
|
||||
bool use_icloud() const { return EXPBOOL_(BA_USE_ICLOUD); }
|
||||
bool use_store_kit() const { return EXPBOOL_(BA_USE_STORE_KIT); }
|
||||
bool use_game_center() const { return EXPBOOL_(BA_USE_GAME_CENTER); }
|
||||
bool use_stdin_thread() const { return EXPBOOL_(BA_USE_STDIN_THREAD); }
|
||||
|
||||
@ -290,7 +290,7 @@ GLContext::GLContext(int target_res_x, int target_res_y, bool fullscreen)
|
||||
|
||||
#if BA_SDL2_BUILD
|
||||
// Gonna wait and see if if still need this.
|
||||
#else
|
||||
#elif BA_SDL_BUILD
|
||||
glClearColor(0, 0, 0, 1);
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
SDL_GL_SwapBuffers();
|
||||
|
||||
@ -16,6 +16,9 @@
|
||||
#if BA_USE_ES3_INCLUDES
|
||||
#include <GLES3/gl3.h>
|
||||
#include <GLES3/gl3ext.h>
|
||||
#elif BA_OSTYPE_IOS_TVOS
|
||||
#include <OpenGLES/ES2/gl.h>
|
||||
#include <OpenGLES/ES2/glext.h>
|
||||
#else
|
||||
#if BA_SDL_BUILD
|
||||
#include <SDL/SDL.h> // needed for ios?...
|
||||
@ -47,6 +50,7 @@
|
||||
|
||||
#else // BA_OSTYPE_IOS_TVOS || BA_OSTYPE_ANDROID
|
||||
|
||||
// SDK Desktop builds.
|
||||
#if BA_SDL2_BUILD
|
||||
#include <SDL_opengl.h>
|
||||
#elif BA_SDL_BUILD // BA_SDL2_BUILD
|
||||
@ -55,6 +59,10 @@
|
||||
#endif // BA_SDL2_BUILD
|
||||
|
||||
#if BA_OSTYPE_MACOS
|
||||
#if BA_XCODE_BUILD
|
||||
#include <OpenGL/gl.h>
|
||||
#include <OpenGL/glu.h>
|
||||
#endif // BA_XCODE_BUILD
|
||||
#include <OpenGL/glext.h>
|
||||
#endif // BA_OSTYPE_MACOS
|
||||
|
||||
|
||||
@ -20,7 +20,7 @@
|
||||
|
||||
namespace ballistica {
|
||||
|
||||
#if BA_OSTYPE_MACOS && BA_XCODE_BUILD
|
||||
#if BA_OSTYPE_MACOS && BA_XCODE_BUILD && !BA_XCODE_NEW_PROJECT
|
||||
void GraphicsServer::FullscreenCheck() {
|
||||
if (!fullscreen_enabled()) {
|
||||
#if BA_ENABLE_OPENGL
|
||||
@ -577,7 +577,7 @@ void GraphicsServer::HandleFullscreenToggling(bool do_set_existing_fs,
|
||||
} else if (do_toggle_fs) {
|
||||
// If we're doing a fullscreen-toggle, we need to do it after coming out of
|
||||
// sync mode (because the toggle triggers sync-mode itself).
|
||||
#if BA_OSTYPE_MACOS && BA_XCODE_BUILD
|
||||
#if BA_OSTYPE_MACOS && BA_XCODE_BUILD && !BA_XCODE_NEW_PROJECT
|
||||
#if BA_ENABLE_OPENGL
|
||||
SDL_WM_ToggleFullScreen(gl_context_->sdl_screen_surface());
|
||||
#endif
|
||||
|
||||
@ -21,7 +21,7 @@
|
||||
#include "ballistica/ui/widget/row_widget.h"
|
||||
#include "ballistica/ui/widget/scroll_widget.h"
|
||||
|
||||
#if !BA_HEADLESS_BUILD
|
||||
#if !BA_HEADLESS_BUILD && !BA_XCODE_NEW_PROJECT
|
||||
extern "C" void SDL_ericf_focus(void);
|
||||
#endif
|
||||
|
||||
@ -1859,7 +1859,8 @@ auto PyFocusWindow(PyObject* self, PyObject* args, PyObject* keywds)
|
||||
return nullptr;
|
||||
}
|
||||
assert(InGameThread());
|
||||
#if BA_OSTYPE_MACOS && BA_XCODE_BUILD && !BA_HEADLESS_BUILD
|
||||
#if BA_OSTYPE_MACOS && BA_XCODE_BUILD && !BA_HEADLESS_BUILD \
|
||||
&& !BA_XCODE_NEW_PROJECT
|
||||
SDL_ericf_focus();
|
||||
#else
|
||||
#endif
|
||||
|
||||
@ -5,7 +5,7 @@ PCOMMAND = ../../tools/pcommand
|
||||
all: sources
|
||||
|
||||
clean:
|
||||
rm -rf ../ballistica/generated ../../assets/src/ba_data/python/ba/_generated
|
||||
rm -rf ../ballistica/generated ../../assets/src/ba_data/python/ba/_generated bametainternal/generated
|
||||
|
||||
|
||||
# This section is generated by batools.metamakefile; do not edit by hand.
|
||||
|
||||
@ -16,7 +16,8 @@ from efro.error import CleanError, RemoteError
|
||||
from efro.dataclassio import ioprepped
|
||||
from efro.message import (Message, Response, MessageProtocol, MessageSender,
|
||||
BoundMessageSender, MessageReceiver,
|
||||
BoundMessageReceiver)
|
||||
BoundMessageReceiver, UnregisteredMessageIDError,
|
||||
EmptyResponse)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable, Optional, Awaitable
|
||||
@ -51,6 +52,13 @@ class _TMsg3(Message):
|
||||
sval: str
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TMsg4(Message):
|
||||
"""Just testing."""
|
||||
sval2: str
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _TResp1(Response):
|
||||
@ -183,20 +191,20 @@ class _BoundTestMessageSenderAsync(BoundMessageSender):
|
||||
# SEND_BOTH_CODE_TEST_BEGIN
|
||||
|
||||
|
||||
class _TestMessageSenderBoth(MessageSender):
|
||||
class _TestMessageSenderBBoth(MessageSender):
|
||||
"""Protocol-specific sender."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
protocol = TEST_PROTOCOL
|
||||
protocol = TEST_PROTOCOL_B
|
||||
super().__init__(protocol)
|
||||
|
||||
def __get__(self,
|
||||
obj: Any,
|
||||
type_in: Any = None) -> _BoundTestMessageSenderBoth:
|
||||
return _BoundTestMessageSenderBoth(obj, self)
|
||||
type_in: Any = None) -> _BoundTestMessageSenderBBoth:
|
||||
return _BoundTestMessageSenderBBoth(obj, self)
|
||||
|
||||
|
||||
class _BoundTestMessageSenderBoth(BoundMessageSender):
|
||||
class _BoundTestMessageSenderBBoth(BoundMessageSender):
|
||||
"""Protocol-specific bound sender."""
|
||||
|
||||
@overload
|
||||
@ -211,6 +219,10 @@ class _BoundTestMessageSenderBoth(BoundMessageSender):
|
||||
def send(self, message: _TMsg3) -> None:
|
||||
...
|
||||
|
||||
@overload
|
||||
def send(self, message: _TMsg4) -> None:
|
||||
...
|
||||
|
||||
def send(self, message: Message) -> Optional[Response]:
|
||||
"""Send a message synchronously."""
|
||||
return self._sender.send(self._obj, message)
|
||||
@ -227,6 +239,10 @@ class _BoundTestMessageSenderBoth(BoundMessageSender):
|
||||
async def send_async(self, message: _TMsg3) -> None:
|
||||
...
|
||||
|
||||
@overload
|
||||
async def send_async(self, message: _TMsg4) -> None:
|
||||
...
|
||||
|
||||
async def send_async(self, message: Message) -> Optional[Response]:
|
||||
"""Send a message asynchronously."""
|
||||
return await self._sender.send_async(self._obj, message)
|
||||
@ -267,9 +283,12 @@ class _TestSingleMessageReceiver(MessageReceiver):
|
||||
class _BoundTestSingleMessageReceiver(BoundMessageReceiver):
|
||||
"""Protocol-specific bound receiver."""
|
||||
|
||||
def handle_raw_message(self, message: str) -> str:
|
||||
def handle_raw_message(self,
|
||||
message: str,
|
||||
raise_unregistered: bool = False) -> str:
|
||||
"""Synchronously handle a raw incoming message."""
|
||||
return self._receiver.handle_raw_message(self._obj, message)
|
||||
return self._receiver.handle_raw_message(self._obj, message,
|
||||
raise_unregistered)
|
||||
|
||||
|
||||
# RCV_SINGLE_CODE_TEST_END
|
||||
@ -324,9 +343,12 @@ class _TestSyncMessageReceiver(MessageReceiver):
|
||||
class _BoundTestSyncMessageReceiver(BoundMessageReceiver):
|
||||
"""Protocol-specific bound receiver."""
|
||||
|
||||
def handle_raw_message(self, message: str) -> str:
|
||||
def handle_raw_message(self,
|
||||
message: str,
|
||||
raise_unregistered: bool = False) -> str:
|
||||
"""Synchronously handle a raw incoming message."""
|
||||
return self._receiver.handle_raw_message(self._obj, message)
|
||||
return self._receiver.handle_raw_message(self._obj, message,
|
||||
raise_unregistered)
|
||||
|
||||
|
||||
# RCV_SYNC_CODE_TEST_END
|
||||
@ -381,10 +403,12 @@ class _TestAsyncMessageReceiver(MessageReceiver):
|
||||
class _BoundTestAsyncMessageReceiver(BoundMessageReceiver):
|
||||
"""Protocol-specific bound receiver."""
|
||||
|
||||
async def handle_raw_message(self, message: str) -> str:
|
||||
async def handle_raw_message(self,
|
||||
message: str,
|
||||
raise_unregistered: bool = False) -> str:
|
||||
"""Asynchronously handle a raw incoming message."""
|
||||
return await self._receiver.handle_raw_message_async(
|
||||
self._obj, message)
|
||||
self._obj, message, raise_unregistered)
|
||||
|
||||
|
||||
# RCV_ASYNC_CODE_TEST_END
|
||||
@ -403,6 +427,23 @@ TEST_PROTOCOL = MessageProtocol(
|
||||
log_remote_exceptions=False,
|
||||
)
|
||||
|
||||
# Represents an 'evolved' TEST_PROTOCOL (one extra message type added).
|
||||
# (so we can test communication failures talking to older protocols)
|
||||
TEST_PROTOCOL_B = MessageProtocol(
|
||||
message_types={
|
||||
0: _TMsg1,
|
||||
1: _TMsg2,
|
||||
2: _TMsg3,
|
||||
3: _TMsg4,
|
||||
},
|
||||
response_types={
|
||||
0: _TResp1,
|
||||
1: _TResp2,
|
||||
},
|
||||
trusted_sender=True,
|
||||
log_remote_exceptions=False,
|
||||
)
|
||||
|
||||
TEST_PROTOCOL_SINGLE = MessageProtocol(
|
||||
message_types={
|
||||
0: _TMsg1,
|
||||
@ -534,9 +575,9 @@ def test_sender_module_both_emb() -> None:
|
||||
# here, but it requires us to pass code which imports this test module
|
||||
# to get at the protocol, and that currently fails in our static mypy
|
||||
# tests.
|
||||
smod = TEST_PROTOCOL.do_create_sender_module(
|
||||
'TestMessageSenderBoth',
|
||||
protocol_create_code='protocol = TEST_PROTOCOL',
|
||||
smod = TEST_PROTOCOL_B.do_create_sender_module(
|
||||
'TestMessageSenderBBoth',
|
||||
protocol_create_code='protocol = TEST_PROTOCOL_B',
|
||||
enable_sync_sends=True,
|
||||
enable_async_sends=True,
|
||||
private=True,
|
||||
@ -544,7 +585,7 @@ def test_sender_module_both_emb() -> None:
|
||||
|
||||
# Clip everything up to our first class declaration.
|
||||
lines = smod.splitlines()
|
||||
classline = lines.index('class _TestMessageSenderBoth(MessageSender):')
|
||||
classline = lines.index('class _TestMessageSenderBBoth(MessageSender):')
|
||||
clipped = '\n'.join(lines[classline:])
|
||||
|
||||
# This snippet should match what we've got embedded above;
|
||||
@ -697,11 +738,16 @@ def test_receiver_creation() -> None:
|
||||
def test_full_pipeline() -> None:
|
||||
"""Test the full pipeline."""
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
# pylint: disable=too-many-statements
|
||||
|
||||
# Define a class that can send messages and one that can receive them.
|
||||
class TestClassS:
|
||||
"""Test class incorporating send functionality."""
|
||||
|
||||
msg = _TestMessageSenderBoth()
|
||||
msg = _TestMessageSenderBBoth()
|
||||
|
||||
test_handling_unregistered = False
|
||||
|
||||
def __init__(self, target: Union[TestClassRSync,
|
||||
TestClassRAsync]) -> None:
|
||||
@ -713,7 +759,15 @@ def test_full_pipeline() -> None:
|
||||
# Just talk directly to the receiver for this example.
|
||||
# (currently only support synchronous receivers)
|
||||
assert isinstance(self._target, TestClassRSync)
|
||||
return self._target.receiver.handle_raw_message(data)
|
||||
try:
|
||||
return self._target.receiver.handle_raw_message(
|
||||
data, raise_unregistered=self.test_handling_unregistered)
|
||||
except UnregisteredMessageIDError:
|
||||
if self.test_handling_unregistered:
|
||||
# Emulate forwarding unregistered messages on to some
|
||||
# other handler...
|
||||
return self.msg.protocol.encode_response(EmptyResponse())
|
||||
raise
|
||||
|
||||
@msg.send_async_method
|
||||
async def _send_raw_message_async(self, data: str) -> str:
|
||||
@ -816,6 +870,18 @@ def test_full_pipeline() -> None:
|
||||
response6 = asyncio.run(obj2.msg.send_async(_TMsg1(ival=0)))
|
||||
assert isinstance(response6, _TResp1)
|
||||
|
||||
# Our sender here is using a 'newer' protocol which contains a message
|
||||
# type not in the older protocol used by our receivers. Make sure we
|
||||
# get the expected error when trying to send that message to them.
|
||||
with pytest.raises(RemoteError):
|
||||
_response7 = obj.msg.send(_TMsg4(sval2='blargh'))
|
||||
|
||||
# Also make sure the receiver can explicitly handle unregistered
|
||||
# messages (by forwarding them along to something that can, etc).
|
||||
obj.test_handling_unregistered = True
|
||||
response7 = obj.msg.send(_TMsg4(sval2='blargh'))
|
||||
assert response7 is None
|
||||
|
||||
# Make sure static typing lines up with what we expect.
|
||||
if os.environ.get('EFRO_TEST_MESSAGE_FAST') != '1':
|
||||
assert static_type_equals(response6, _TResp1)
|
||||
|
||||
383
tests/test_efro/test_rpc.py
Normal file
383
tests/test_efro/test_rpc.py
Normal file
@ -0,0 +1,383 @@
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""Testing rpc functionality."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
import random
|
||||
import asyncio
|
||||
import weakref
|
||||
from enum import unique, Enum
|
||||
from typing import TYPE_CHECKING
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pytest
|
||||
|
||||
from efro.rpc import RPCEndpoint
|
||||
from efro.error import CommunicationError
|
||||
from efro.dataclassio import ioprepped, dataclass_from_json, dataclass_to_json
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional, Awaitable, Callable
|
||||
|
||||
ADDR = '127.0.0.1'
|
||||
# Randomize this a bit to avoid failing on parallel testing.
|
||||
# Ideally we should let the OS pick the address and pass the
|
||||
# resulting one to the client.
|
||||
PORT = random.randrange(8000, 16000)
|
||||
|
||||
SLOW_WAIT = 1.0
|
||||
|
||||
|
||||
@unique
|
||||
class _MessageType(Enum):
|
||||
TEST1 = 't1'
|
||||
RESPONSE1 = 'r1'
|
||||
TEST2 = '2'
|
||||
RESPONSE2 = 'r2'
|
||||
TEST_SLOW = 'ts'
|
||||
RESPONSE_SLOW = 'rs'
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _Message:
|
||||
messagetype: _MessageType
|
||||
|
||||
|
||||
class _ServerClientCommon:
|
||||
|
||||
def __init__(self, keepalive_interval: float, keepalive_timeout: float,
|
||||
debug_print: bool) -> None:
|
||||
self._endpoint: Optional[RPCEndpoint] = None
|
||||
self._keepalive_interval = keepalive_interval
|
||||
self._keepalive_timeout = keepalive_timeout
|
||||
self._debug_print = debug_print
|
||||
|
||||
def has_endpoint(self) -> bool:
|
||||
"""Is our endpoint up yet?"""
|
||||
return self._endpoint is not None
|
||||
|
||||
@property
|
||||
def endpoint(self) -> RPCEndpoint:
|
||||
"""Our endpoint."""
|
||||
if self._endpoint is None:
|
||||
raise RuntimeError('Expected endpoint to exist.')
|
||||
return self._endpoint
|
||||
|
||||
async def send_message(self,
|
||||
message: _Message,
|
||||
timeout: Optional[float] = None) -> _Message:
|
||||
"""Send high level messages."""
|
||||
assert self._endpoint is not None
|
||||
response = await self._endpoint.send_message(
|
||||
dataclass_to_json(message).encode(), timeout=timeout)
|
||||
return dataclass_from_json(_Message, response.decode())
|
||||
|
||||
async def handle_message(self, msg: _Message) -> _Message:
|
||||
"""Handle a high-level message."""
|
||||
|
||||
if msg.messagetype is _MessageType.TEST1:
|
||||
return _Message(_MessageType.RESPONSE1)
|
||||
|
||||
if msg.messagetype is _MessageType.TEST2:
|
||||
return _Message(_MessageType.RESPONSE2)
|
||||
|
||||
if msg.messagetype is _MessageType.TEST_SLOW:
|
||||
await asyncio.sleep(SLOW_WAIT)
|
||||
return _Message(_MessageType.RESPONSE_SLOW)
|
||||
|
||||
raise RuntimeError(f'Got unexpected message type: {msg.messagetype}')
|
||||
|
||||
async def _handle_raw_message(self, message: bytes) -> bytes:
|
||||
msgobj = dataclass_from_json(_Message, message.decode())
|
||||
rspobj = await self.handle_message(msgobj)
|
||||
return dataclass_to_json(rspobj).encode()
|
||||
|
||||
|
||||
class _Server(_ServerClientCommon):
|
||||
|
||||
def __init__(self, keepalive_interval: float, keepalive_timeout: float,
|
||||
debug_print: bool) -> None:
|
||||
super().__init__(keepalive_interval=keepalive_interval,
|
||||
keepalive_timeout=keepalive_timeout,
|
||||
debug_print=debug_print)
|
||||
self.listener: Optional[asyncio.base_events.Server] = None
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Start serving. Call this before run()."""
|
||||
assert self.listener is None
|
||||
self.listener = await asyncio.start_server(self._handle_client, ADDR,
|
||||
PORT)
|
||||
|
||||
async def run(self) -> None:
|
||||
"""Do the thing."""
|
||||
assert self.listener is not None
|
||||
assert self._endpoint is None
|
||||
async with self.listener:
|
||||
try:
|
||||
await self.listener.serve_forever()
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
async def _handle_client(self, reader: asyncio.StreamReader,
|
||||
writer: asyncio.StreamWriter) -> None:
|
||||
assert self._endpoint is None
|
||||
|
||||
# Note to self: passing ourself as a handler creates a dependency
|
||||
# loop; in production code we'd probably want to store this as a
|
||||
# weak ref or whatnot to keep teardown deterministic.
|
||||
self._endpoint = RPCEndpoint(
|
||||
self._handle_raw_message,
|
||||
reader,
|
||||
writer,
|
||||
keepalive_interval=self._keepalive_interval,
|
||||
keepalive_timeout=self._keepalive_timeout,
|
||||
debug_print=self._debug_print,
|
||||
label='test_rpc_server')
|
||||
|
||||
await self._endpoint.run()
|
||||
|
||||
|
||||
class _Client(_ServerClientCommon):
|
||||
|
||||
def __init__(self, keepalive_interval: float, keepalive_timeout: float,
|
||||
debug_print: bool) -> None:
|
||||
super().__init__(keepalive_interval=keepalive_interval,
|
||||
keepalive_timeout=keepalive_timeout,
|
||||
debug_print=debug_print)
|
||||
|
||||
async def run(self) -> None:
|
||||
"""Do the thing."""
|
||||
reader, writer = await asyncio.open_connection(ADDR, PORT)
|
||||
# Note to self: passing ourself as a handler creates a dependency
|
||||
# loop; in production code we'd probably want to store this as a
|
||||
# weak ref or whatnot to keep teardown deterministic.
|
||||
self._endpoint = RPCEndpoint(
|
||||
self._handle_raw_message,
|
||||
reader,
|
||||
writer,
|
||||
keepalive_interval=self._keepalive_interval,
|
||||
keepalive_timeout=self._keepalive_timeout,
|
||||
debug_print=self._debug_print,
|
||||
label='test_rpc_client')
|
||||
await self._endpoint.run()
|
||||
|
||||
|
||||
class _Tester:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
keepalive_interval: float = RPCEndpoint.DEFAULT_KEEPALIVE_INTERVAL,
|
||||
keepalive_timeout: float = RPCEndpoint.DEFAULT_KEEPALIVE_TIMEOUT,
|
||||
server_debug_print: bool = True,
|
||||
client_debug_print: bool = True) -> None:
|
||||
self.client = _Client(keepalive_interval=keepalive_interval,
|
||||
keepalive_timeout=keepalive_timeout,
|
||||
debug_print=client_debug_print)
|
||||
self.server = _Server(keepalive_interval=keepalive_interval,
|
||||
keepalive_timeout=keepalive_timeout,
|
||||
debug_print=server_debug_print)
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
def run(self, testcall: Awaitable[None]) -> None:
|
||||
"""Run our test."""
|
||||
|
||||
asyncio.run(self._run(testcall), debug=True)
|
||||
|
||||
# Make sure the endpoints go down immediately when we remove our
|
||||
# only refs to them.
|
||||
server_endpoint_ref = weakref.ref(self.server.endpoint)
|
||||
client_endpoint_ref = weakref.ref(self.client.endpoint)
|
||||
del self.client._endpoint
|
||||
del self.server._endpoint
|
||||
|
||||
for name, endpoint in [
|
||||
('server', server_endpoint_ref()),
|
||||
('client', client_endpoint_ref()),
|
||||
]:
|
||||
if endpoint is not None:
|
||||
import gc
|
||||
print('referrers:', gc.get_referrers(endpoint))
|
||||
raise RuntimeError(f'{name} did not go down cleanly')
|
||||
|
||||
async def _run(self, testcall: Awaitable[None]) -> None:
|
||||
|
||||
# Give server a chance to spin up before kicking off client.
|
||||
await self.server.start()
|
||||
|
||||
# Now run our server, our client, and our tests simultaneously.
|
||||
await asyncio.gather(
|
||||
self.server.run(),
|
||||
self.client.run(),
|
||||
self._run_test(testcall),
|
||||
)
|
||||
|
||||
async def _run_test(self, testcall: Awaitable[None]) -> None:
|
||||
"""Set up before and tear down after a test call."""
|
||||
assert self.server.listener is not None
|
||||
|
||||
# Wait until the client has connected.
|
||||
while not self.server.has_endpoint():
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
print('test_rpc test call starting...')
|
||||
|
||||
# Do the thing.
|
||||
await testcall
|
||||
|
||||
print('test_rpc test call completed; tearing down...')
|
||||
|
||||
# Close both our listener socket and our established endpoint;
|
||||
# this should break us out of our loop.
|
||||
self.server.endpoint.close()
|
||||
await self.server.endpoint.wait_closed()
|
||||
|
||||
self.server.listener.close()
|
||||
await self.server.listener.wait_closed()
|
||||
|
||||
|
||||
def test_keepalive_fail() -> None:
|
||||
"""Test keepalive timeout."""
|
||||
kinterval = 0.05
|
||||
ktimeout = 0.25
|
||||
tester = _Tester(keepalive_interval=kinterval, keepalive_timeout=ktimeout)
|
||||
|
||||
async def _do_it() -> None:
|
||||
|
||||
# Tell our client to not send keepalives.
|
||||
tester.client.endpoint.test_suppress_keepalives = True
|
||||
|
||||
# Sleep just past the keepalive timeout and make sure the endpoint
|
||||
# IS going down.
|
||||
await asyncio.sleep(ktimeout * 1.25)
|
||||
assert tester.server.endpoint.is_closing()
|
||||
|
||||
tester.run(_do_it())
|
||||
|
||||
|
||||
def test_keepalive_success() -> None:
|
||||
"""Test keepalive non-timeout."""
|
||||
kinterval = 0.05
|
||||
ktimeout = 0.25
|
||||
tester = _Tester(keepalive_interval=kinterval, keepalive_timeout=ktimeout)
|
||||
|
||||
async def _do_it() -> None:
|
||||
|
||||
# Sleep just past the keepalive timeout and make sure the endpoint
|
||||
# is NOT going down
|
||||
await asyncio.sleep(ktimeout * 1.25)
|
||||
assert not tester.server.endpoint.is_closing()
|
||||
|
||||
tester.run(_do_it())
|
||||
|
||||
|
||||
def test_simple_messages() -> None:
|
||||
"""Test basic messages and responses."""
|
||||
tester = _Tester()
|
||||
|
||||
async def _do_it() -> None:
|
||||
|
||||
# Send some messages both directions and make sure we get the expected
|
||||
# response types.
|
||||
|
||||
resp = await tester.server.send_message(_Message(_MessageType.TEST1))
|
||||
assert resp.messagetype is _MessageType.RESPONSE1
|
||||
|
||||
resp = await tester.client.send_message(_Message(_MessageType.TEST1))
|
||||
assert resp.messagetype is _MessageType.RESPONSE1
|
||||
|
||||
resp = await tester.server.send_message(_Message(_MessageType.TEST2))
|
||||
assert resp.messagetype is _MessageType.RESPONSE2
|
||||
|
||||
resp = await tester.client.send_message(_Message(_MessageType.TEST2))
|
||||
assert resp.messagetype is _MessageType.RESPONSE2
|
||||
|
||||
tester.run(_do_it())
|
||||
|
||||
|
||||
def test_simultaneous_messages() -> None:
|
||||
"""Test basic messages and responses."""
|
||||
tester = _Tester()
|
||||
|
||||
async def _do_it() -> None:
|
||||
|
||||
# Send a bunch of messages both ways at once and make sure
|
||||
# they all come through simultaneously-ish.
|
||||
starttime = time.monotonic()
|
||||
results = await asyncio.gather(
|
||||
tester.client.send_message(_Message(_MessageType.TEST_SLOW)),
|
||||
tester.server.send_message(_Message(_MessageType.TEST_SLOW)),
|
||||
tester.client.send_message(_Message(_MessageType.TEST_SLOW)),
|
||||
tester.server.send_message(_Message(_MessageType.TEST_SLOW)),
|
||||
tester.client.send_message(_Message(_MessageType.TEST_SLOW)),
|
||||
tester.server.send_message(_Message(_MessageType.TEST_SLOW)),
|
||||
)
|
||||
|
||||
# This should all go through in the same time that 1 goes through in.
|
||||
assert (time.monotonic() - starttime) < 1.1 * SLOW_WAIT
|
||||
|
||||
# Make sure we got all correct responses.
|
||||
assert all(r.messagetype is _MessageType.RESPONSE_SLOW
|
||||
for r in results)
|
||||
|
||||
# They should all be uniquely created message objects.
|
||||
assert len(set(id(r) for r in results)) == len(results)
|
||||
|
||||
tester.run(_do_it())
|
||||
|
||||
|
||||
def test_message_timeout() -> None:
|
||||
"""Test sends timing out."""
|
||||
tester = _Tester()
|
||||
|
||||
async def _do_it() -> None:
|
||||
|
||||
# This message should return after a short wait.
|
||||
resp = await tester.server.send_message(
|
||||
_Message(_MessageType.TEST_SLOW))
|
||||
assert resp.messagetype is _MessageType.RESPONSE_SLOW
|
||||
|
||||
# This message should time out.
|
||||
with pytest.raises(CommunicationError):
|
||||
resp = await tester.server.send_message(
|
||||
_Message(_MessageType.TEST_SLOW),
|
||||
timeout=0.5,
|
||||
)
|
||||
|
||||
tester.run(_do_it())
|
||||
|
||||
|
||||
def test_server_interrupt() -> None:
|
||||
"""Test server dying during message send."""
|
||||
tester = _Tester()
|
||||
|
||||
async def _do_it() -> None:
|
||||
|
||||
async def _kill_connection() -> None:
|
||||
await asyncio.sleep(0.2)
|
||||
tester.server.endpoint.close()
|
||||
|
||||
asyncio.create_task(_kill_connection())
|
||||
with pytest.raises(CommunicationError):
|
||||
await tester.server.send_message(_Message(_MessageType.TEST_SLOW))
|
||||
|
||||
tester.run(_do_it())
|
||||
|
||||
|
||||
def test_client_interrupt() -> None:
|
||||
"""Test client dying during message send."""
|
||||
tester = _Tester()
|
||||
|
||||
async def _do_it() -> None:
|
||||
|
||||
async def _kill_connection() -> None:
|
||||
await asyncio.sleep(0.2)
|
||||
tester.client.endpoint.close()
|
||||
|
||||
asyncio.create_task(_kill_connection())
|
||||
with pytest.raises(CommunicationError):
|
||||
await tester.server.send_message(_Message(_MessageType.TEST_SLOW))
|
||||
|
||||
tester.run(_do_it())
|
||||
@ -8,8 +8,6 @@ import sys
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import efrotools
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
@ -64,16 +62,22 @@ def androidaddr(archive_dir: str, arch: str, addr: str) -> None:
|
||||
print(f"ERROR: can't find addr2line binary ({len(lines)} options).")
|
||||
sys.exit(255)
|
||||
addr2line = lines[0]
|
||||
efrotools.run('mkdir -p "' + os.path.join(rootdir, 'android_addr_tmp') +
|
||||
'"')
|
||||
subprocess.run('mkdir -p "' + os.path.join(rootdir, 'android_addr_tmp') +
|
||||
'"',
|
||||
shell=True,
|
||||
check=True)
|
||||
try:
|
||||
efrotools.run('cd "' + os.path.join(rootdir, 'android_addr_tmp') +
|
||||
'" && tar -xf "' +
|
||||
os.path.join(archive_dir, 'unstripped_libs',
|
||||
archs[arch]['libmain'] + '.tgz') + '"')
|
||||
efrotools.run(
|
||||
subprocess.run('cd "' + os.path.join(rootdir, 'android_addr_tmp') +
|
||||
'" && tar -xf "' +
|
||||
os.path.join(archive_dir, 'unstripped_libs',
|
||||
archs[arch]['libmain'] + '.tgz') + '"',
|
||||
shell=True,
|
||||
check=True)
|
||||
subprocess.run(
|
||||
addr2line + ' -e "' +
|
||||
os.path.join(rootdir, 'android_addr_tmp', archs[arch]['libmain']) +
|
||||
'" ' + addr)
|
||||
'" ' + addr,
|
||||
shell=True,
|
||||
check=True)
|
||||
finally:
|
||||
os.system('rm -rf "' + os.path.join(rootdir, 'android_addr_tmp') + '"')
|
||||
|
||||
@ -812,7 +812,7 @@ def filter_server_config(projroot: str, infilepath: str) -> str:
|
||||
def update_docs_md(check: bool) -> None:
|
||||
"""Updates docs markdown files if necessary."""
|
||||
# pylint: disable=too-many-locals
|
||||
from efrotools import get_files_hash, run
|
||||
from efrotools import get_files_hash
|
||||
|
||||
docs_path = 'docs/ba_module.md'
|
||||
|
||||
@ -853,7 +853,7 @@ def update_docs_md(check: bool) -> None:
|
||||
raise RuntimeError('Docs markdown is out of date.')
|
||||
|
||||
print(f'Updating {docs_path}...', flush=True)
|
||||
run('make docs')
|
||||
subprocess.run('make docs', shell=True, check=True)
|
||||
|
||||
# Our docs markdown is just the docs html with a few added
|
||||
# bits at the top.
|
||||
|
||||
@ -91,6 +91,7 @@ def generate(projroot: str) -> None:
|
||||
version, build_number = get_current_version()
|
||||
|
||||
try:
|
||||
os.environ['BA_DOCS_GENERATION'] = '1'
|
||||
pdoc.render.env.globals['ba_version'] = version
|
||||
pdoc.render.env.globals['ba_build'] = build_number
|
||||
pdoc.render.configure(search=True,
|
||||
|
||||
@ -61,7 +61,7 @@ def clean_orphaned_assets() -> None:
|
||||
"""Remove asset files that are no longer part of the build."""
|
||||
import os
|
||||
import json
|
||||
import efrotools
|
||||
import subprocess
|
||||
|
||||
# Operate from dist root..
|
||||
os.chdir(PROJROOT)
|
||||
@ -82,7 +82,9 @@ def clean_orphaned_assets() -> None:
|
||||
os.unlink(fpath)
|
||||
|
||||
# Lastly, clear empty dirs.
|
||||
efrotools.run('find assets/build -depth -empty -type d -delete')
|
||||
subprocess.run('find assets/build -depth -empty -type d -delete',
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
|
||||
def resize_image() -> None:
|
||||
@ -91,7 +93,7 @@ def resize_image() -> None:
|
||||
args: xres, yres, src, dst
|
||||
"""
|
||||
import os
|
||||
import efrotools
|
||||
import subprocess
|
||||
if len(sys.argv) != 6:
|
||||
raise Exception('expected 5 args')
|
||||
width = int(sys.argv[2])
|
||||
@ -103,7 +105,9 @@ def resize_image() -> None:
|
||||
if not src.endswith('.png'):
|
||||
raise RuntimeError(f'src must be a png; got "{src}"')
|
||||
print('Creating: ' + os.path.basename(dst), file=sys.stderr)
|
||||
efrotools.run(f'convert "{src}" -resize {width}x{height} "{dst}"')
|
||||
subprocess.run(f'convert "{src}" -resize {width}x{height} "{dst}"',
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
|
||||
def check_clean_safety() -> None:
|
||||
|
||||
@ -10,152 +10,22 @@ data formats in a nondestructive manner.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
from efro.dataclassio._outputter import _Outputter
|
||||
from efro.dataclassio._inputter import _Inputter
|
||||
from efro.dataclassio._base import Codec, IOAttrs, IOExtendedData
|
||||
from efro.util import set_canonical_module
|
||||
from efro.dataclassio._base import (Codec, IOAttrs, IOExtendedData)
|
||||
from efro.dataclassio._prep import (ioprep, ioprepped, will_ioprep,
|
||||
is_ioprepped_dataclass)
|
||||
from efro.dataclassio._pathcapture import DataclassFieldLookup
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Optional
|
||||
from efro.dataclassio._api import (JsonStyle, dataclass_to_dict,
|
||||
dataclass_to_json, dataclass_from_dict,
|
||||
dataclass_from_json, dataclass_validate)
|
||||
|
||||
__all__ = [
|
||||
'Codec', 'IOAttrs', 'IOExtendedData', 'ioprep', 'ioprepped', 'will_ioprep',
|
||||
'is_ioprepped_dataclass', 'DataclassFieldLookup', 'dataclass_to_dict',
|
||||
'dataclass_to_json', 'dataclass_from_dict', 'dataclass_from_json',
|
||||
'dataclass_validate'
|
||||
'JsonStyle', 'Codec', 'IOAttrs', 'IOExtendedData', 'ioprep', 'ioprepped',
|
||||
'will_ioprep', 'is_ioprepped_dataclass', 'DataclassFieldLookup',
|
||||
'dataclass_to_dict', 'dataclass_to_json', 'dataclass_from_dict',
|
||||
'dataclass_from_json', 'dataclass_validate'
|
||||
]
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
class JsonStyle(Enum):
|
||||
"""Different style types for json."""
|
||||
|
||||
# Single line, no spaces, no sorting. Not deterministic.
|
||||
# Use this for most storage purposes.
|
||||
FAST = 'fast'
|
||||
|
||||
# Single line, no spaces, sorted keys. Deterministic.
|
||||
# Use this when output may be hashed or compared for equality.
|
||||
SORTED = 'sorted'
|
||||
|
||||
# Multiple lines, spaces, sorted keys. Deterministic.
|
||||
# Use this for pretty human readable output.
|
||||
PRETTY = 'pretty'
|
||||
|
||||
|
||||
def dataclass_to_dict(obj: Any,
|
||||
codec: Codec = Codec.JSON,
|
||||
coerce_to_float: bool = True) -> dict:
|
||||
"""Given a dataclass object, return a json-friendly dict.
|
||||
|
||||
All values will be checked to ensure they match the types specified
|
||||
on fields. Note that a limited set of types and data configurations is
|
||||
supported.
|
||||
|
||||
Values with type Any will be checked to ensure they match types supported
|
||||
directly by json. This does not include types such as tuples which are
|
||||
implicitly translated by Python's json module (as this would break
|
||||
the ability to do a lossless round-trip with data).
|
||||
|
||||
If coerce_to_float is True, integer values present on float typed fields
|
||||
will be converted to float in the dict output. If False, a TypeError
|
||||
will be triggered.
|
||||
"""
|
||||
|
||||
out = _Outputter(obj,
|
||||
create=True,
|
||||
codec=codec,
|
||||
coerce_to_float=coerce_to_float).run()
|
||||
assert isinstance(out, dict)
|
||||
return out
|
||||
|
||||
|
||||
def dataclass_to_json(obj: Any,
|
||||
coerce_to_float: bool = True,
|
||||
pretty: bool = False,
|
||||
sort_keys: Optional[bool] = None) -> str:
|
||||
"""Utility function; return a json string from a dataclass instance.
|
||||
|
||||
Basically json.dumps(dataclass_to_dict(...)).
|
||||
By default, keys are sorted for pretty output and not otherwise, but
|
||||
this can be overridden by supplying a value for the 'sort_keys' arg.
|
||||
"""
|
||||
import json
|
||||
jdict = dataclass_to_dict(obj=obj,
|
||||
coerce_to_float=coerce_to_float,
|
||||
codec=Codec.JSON)
|
||||
if sort_keys is None:
|
||||
sort_keys = pretty
|
||||
if pretty:
|
||||
return json.dumps(jdict, indent=2, sort_keys=sort_keys)
|
||||
return json.dumps(jdict, separators=(',', ':'), sort_keys=sort_keys)
|
||||
|
||||
|
||||
def dataclass_from_dict(cls: type[T],
|
||||
values: dict,
|
||||
codec: Codec = Codec.JSON,
|
||||
coerce_to_float: bool = True,
|
||||
allow_unknown_attrs: bool = True,
|
||||
discard_unknown_attrs: bool = False) -> T:
|
||||
"""Given a dict, return a dataclass of a given type.
|
||||
|
||||
The dict must be formatted to match the specified codec (generally
|
||||
json-friendly object types). This means that sequence values such as
|
||||
tuples or sets should be passed as lists, enums should be passed as their
|
||||
associated values, nested dataclasses should be passed as dicts, etc.
|
||||
|
||||
All values are checked to ensure their types/values are valid.
|
||||
|
||||
Data for attributes of type Any will be checked to ensure they match
|
||||
types supported directly by json. This does not include types such
|
||||
as tuples which are implicitly translated by Python's json module
|
||||
(as this would break the ability to do a lossless round-trip with data).
|
||||
|
||||
If coerce_to_float is True, int values passed for float typed fields
|
||||
will be converted to float values. Otherwise, a TypeError is raised.
|
||||
|
||||
If allow_unknown_attrs is False, AttributeErrors will be raised for
|
||||
attributes present in the dict but not on the data class. Otherwise, they
|
||||
will be preserved as part of the instance and included if it is
|
||||
exported back to a dict, unless discard_unknown_attrs is True, in which
|
||||
case they will simply be discarded.
|
||||
"""
|
||||
return _Inputter(cls,
|
||||
codec=codec,
|
||||
coerce_to_float=coerce_to_float,
|
||||
allow_unknown_attrs=allow_unknown_attrs,
|
||||
discard_unknown_attrs=discard_unknown_attrs).run(values)
|
||||
|
||||
|
||||
def dataclass_from_json(cls: type[T],
|
||||
json_str: str,
|
||||
coerce_to_float: bool = True,
|
||||
allow_unknown_attrs: bool = True,
|
||||
discard_unknown_attrs: bool = False) -> T:
|
||||
"""Utility function; return a dataclass instance given a json string.
|
||||
|
||||
Basically dataclass_from_dict(json.loads(...))
|
||||
"""
|
||||
import json
|
||||
return dataclass_from_dict(cls=cls,
|
||||
values=json.loads(json_str),
|
||||
coerce_to_float=coerce_to_float,
|
||||
allow_unknown_attrs=allow_unknown_attrs,
|
||||
discard_unknown_attrs=discard_unknown_attrs)
|
||||
|
||||
|
||||
def dataclass_validate(obj: Any,
|
||||
coerce_to_float: bool = True,
|
||||
codec: Codec = Codec.JSON) -> None:
|
||||
"""Ensure that values in a dataclass instance are the correct types."""
|
||||
|
||||
# Simply run an output pass but tell it not to generate data;
|
||||
# only run validation.
|
||||
_Outputter(obj, create=False, codec=codec,
|
||||
coerce_to_float=coerce_to_float).run()
|
||||
# Have these things present themselves cleanly as 'thismodule.SomeClass'
|
||||
# instead of 'thismodule._internalmodule.SomeClass'
|
||||
set_canonical_module(module_globals=globals(), names=__all__)
|
||||
|
||||
151
tools/efro/dataclassio/_api.py
Normal file
151
tools/efro/dataclassio/_api.py
Normal file
@ -0,0 +1,151 @@
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""Functionality for importing, exporting, and validating dataclasses.
|
||||
|
||||
This allows complex nested dataclasses to be flattened to json-compatible
|
||||
data and restored from said data. It also gracefully handles and preserves
|
||||
unrecognized attribute data, allowing older clients to interact with newer
|
||||
data formats in a nondestructive manner.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
from efro.dataclassio._outputter import _Outputter
|
||||
from efro.dataclassio._inputter import _Inputter
|
||||
from efro.dataclassio._base import Codec
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Optional
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
class JsonStyle(Enum):
|
||||
"""Different style types for json."""
|
||||
|
||||
# Single line, no spaces, no sorting. Not deterministic.
|
||||
# Use this for most storage purposes.
|
||||
FAST = 'fast'
|
||||
|
||||
# Single line, no spaces, sorted keys. Deterministic.
|
||||
# Use this when output may be hashed or compared for equality.
|
||||
SORTED = 'sorted'
|
||||
|
||||
# Multiple lines, spaces, sorted keys. Deterministic.
|
||||
# Use this for pretty human readable output.
|
||||
PRETTY = 'pretty'
|
||||
|
||||
|
||||
def dataclass_to_dict(obj: Any,
|
||||
codec: Codec = Codec.JSON,
|
||||
coerce_to_float: bool = True) -> dict:
|
||||
"""Given a dataclass object, return a json-friendly dict.
|
||||
|
||||
All values will be checked to ensure they match the types specified
|
||||
on fields. Note that a limited set of types and data configurations is
|
||||
supported.
|
||||
|
||||
Values with type Any will be checked to ensure they match types supported
|
||||
directly by json. This does not include types such as tuples which are
|
||||
implicitly translated by Python's json module (as this would break
|
||||
the ability to do a lossless round-trip with data).
|
||||
|
||||
If coerce_to_float is True, integer values present on float typed fields
|
||||
will be converted to float in the dict output. If False, a TypeError
|
||||
will be triggered.
|
||||
"""
|
||||
|
||||
out = _Outputter(obj,
|
||||
create=True,
|
||||
codec=codec,
|
||||
coerce_to_float=coerce_to_float).run()
|
||||
assert isinstance(out, dict)
|
||||
return out
|
||||
|
||||
|
||||
def dataclass_to_json(obj: Any,
|
||||
coerce_to_float: bool = True,
|
||||
pretty: bool = False,
|
||||
sort_keys: Optional[bool] = None) -> str:
|
||||
"""Utility function; return a json string from a dataclass instance.
|
||||
|
||||
Basically json.dumps(dataclass_to_dict(...)).
|
||||
By default, keys are sorted for pretty output and not otherwise, but
|
||||
this can be overridden by supplying a value for the 'sort_keys' arg.
|
||||
"""
|
||||
import json
|
||||
jdict = dataclass_to_dict(obj=obj,
|
||||
coerce_to_float=coerce_to_float,
|
||||
codec=Codec.JSON)
|
||||
if sort_keys is None:
|
||||
sort_keys = pretty
|
||||
if pretty:
|
||||
return json.dumps(jdict, indent=2, sort_keys=sort_keys)
|
||||
return json.dumps(jdict, separators=(',', ':'), sort_keys=sort_keys)
|
||||
|
||||
|
||||
def dataclass_from_dict(cls: type[T],
|
||||
values: dict,
|
||||
codec: Codec = Codec.JSON,
|
||||
coerce_to_float: bool = True,
|
||||
allow_unknown_attrs: bool = True,
|
||||
discard_unknown_attrs: bool = False) -> T:
|
||||
"""Given a dict, return a dataclass of a given type.
|
||||
|
||||
The dict must be formatted to match the specified codec (generally
|
||||
json-friendly object types). This means that sequence values such as
|
||||
tuples or sets should be passed as lists, enums should be passed as their
|
||||
associated values, nested dataclasses should be passed as dicts, etc.
|
||||
|
||||
All values are checked to ensure their types/values are valid.
|
||||
|
||||
Data for attributes of type Any will be checked to ensure they match
|
||||
types supported directly by json. This does not include types such
|
||||
as tuples which are implicitly translated by Python's json module
|
||||
(as this would break the ability to do a lossless round-trip with data).
|
||||
|
||||
If coerce_to_float is True, int values passed for float typed fields
|
||||
will be converted to float values. Otherwise, a TypeError is raised.
|
||||
|
||||
If allow_unknown_attrs is False, AttributeErrors will be raised for
|
||||
attributes present in the dict but not on the data class. Otherwise, they
|
||||
will be preserved as part of the instance and included if it is
|
||||
exported back to a dict, unless discard_unknown_attrs is True, in which
|
||||
case they will simply be discarded.
|
||||
"""
|
||||
return _Inputter(cls,
|
||||
codec=codec,
|
||||
coerce_to_float=coerce_to_float,
|
||||
allow_unknown_attrs=allow_unknown_attrs,
|
||||
discard_unknown_attrs=discard_unknown_attrs).run(values)
|
||||
|
||||
|
||||
def dataclass_from_json(cls: type[T],
|
||||
json_str: str,
|
||||
coerce_to_float: bool = True,
|
||||
allow_unknown_attrs: bool = True,
|
||||
discard_unknown_attrs: bool = False) -> T:
|
||||
"""Utility function; return a dataclass instance given a json string.
|
||||
|
||||
Basically dataclass_from_dict(json.loads(...))
|
||||
"""
|
||||
import json
|
||||
return dataclass_from_dict(cls=cls,
|
||||
values=json.loads(json_str),
|
||||
coerce_to_float=coerce_to_float,
|
||||
allow_unknown_attrs=allow_unknown_attrs,
|
||||
discard_unknown_attrs=discard_unknown_attrs)
|
||||
|
||||
|
||||
def dataclass_validate(obj: Any,
|
||||
coerce_to_float: bool = True,
|
||||
codec: Codec = Codec.JSON) -> None:
|
||||
"""Ensure that values in a dataclass instance are the correct types."""
|
||||
|
||||
# Simply run an output pass but tell it not to generate data;
|
||||
# only run validation.
|
||||
_Outputter(obj, create=False, codec=codec,
|
||||
coerce_to_float=coerce_to_float).run()
|
||||
27
tools/efro/message/__init__.py
Normal file
27
tools/efro/message/__init__.py
Normal file
@ -0,0 +1,27 @@
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""Functionality for sending and responding to messages.
|
||||
Supports static typing for message types and possible return types.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from efro.util import set_canonical_module
|
||||
from efro.message._protocol import MessageProtocol
|
||||
from efro.message._sender import (MessageSender, BoundMessageSender)
|
||||
from efro.message._receiver import (MessageReceiver, BoundMessageReceiver)
|
||||
from efro.message._module import (create_sender_module, create_receiver_module)
|
||||
from efro.message._message import (Message, Response, EmptyResponse,
|
||||
ErrorResponse, StringResponse, BoolResponse,
|
||||
UnregisteredMessageIDError)
|
||||
|
||||
__all__ = [
|
||||
'Message', 'Response', 'EmptyResponse', 'ErrorResponse', 'StringResponse',
|
||||
'BoolResponse', 'MessageProtocol', 'MessageSender', 'BoundMessageSender',
|
||||
'MessageReceiver', 'BoundMessageReceiver', 'create_sender_module',
|
||||
'create_receiver_module', 'UnregisteredMessageIDError'
|
||||
]
|
||||
|
||||
# Have these things present themselves cleanly as 'thismodule.SomeClass'
|
||||
# instead of 'thismodule._internalmodule.SomeClass'
|
||||
set_canonical_module(module_globals=globals(), names=__all__)
|
||||
86
tools/efro/message/_message.py
Normal file
86
tools/efro/message/_message.py
Normal file
@ -0,0 +1,86 @@
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""Functionality for sending and responding to messages.
|
||||
Supports static typing for message types and possible return types.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
from efro.dataclassio import ioprepped, IOAttrs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
class UnregisteredMessageIDError(Exception):
|
||||
"""A message or response id is not covered by our protocol."""
|
||||
|
||||
|
||||
class Message:
|
||||
"""Base class for messages."""
|
||||
|
||||
@classmethod
|
||||
def get_response_types(cls) -> list[type[Response]]:
|
||||
"""Return all message types this Message can result in when sent.
|
||||
|
||||
The default implementation specifies EmptyResponse, so messages with
|
||||
no particular response needs can leave this untouched.
|
||||
Note that ErrorMessage is handled as a special case and does not
|
||||
need to be specified here.
|
||||
"""
|
||||
return [EmptyResponse]
|
||||
|
||||
|
||||
class Response:
|
||||
"""Base class for responses to messages."""
|
||||
|
||||
|
||||
# Some standard response types:
|
||||
|
||||
|
||||
class ErrorType(Enum):
|
||||
"""Type of error that occurred in remote message handling."""
|
||||
OTHER = 0
|
||||
CLEAN = 1
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class ErrorResponse(Response):
|
||||
"""Message saying some error has occurred on the other end.
|
||||
|
||||
This type is unique in that it is not returned to the user; it
|
||||
instead results in a local exception being raised.
|
||||
"""
|
||||
error_message: Annotated[str, IOAttrs('m')]
|
||||
error_type: Annotated[ErrorType, IOAttrs('e')] = ErrorType.OTHER
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class EmptyResponse(Response):
|
||||
"""The response equivalent of None."""
|
||||
|
||||
|
||||
# TODO: could allow handlers to deal in raw values for these
|
||||
# types similar to how we allow None in place of EmptyResponse.
|
||||
# Though not sure if they are widely used enough to warrant the
|
||||
# extra code complexity.
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class BoolResponse(Response):
|
||||
"""A simple bool value response."""
|
||||
|
||||
value: Annotated[bool, IOAttrs('v')]
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class StringResponse(Response):
|
||||
"""A simple string value response."""
|
||||
|
||||
value: Annotated[str, IOAttrs('v')]
|
||||
102
tools/efro/message/_module.py
Normal file
102
tools/efro/message/_module.py
Normal file
@ -0,0 +1,102 @@
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""Functionality for sending and responding to messages.
|
||||
Supports static typing for message types and possible return types.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from efro.message._protocol import MessageProtocol
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def create_sender_module(
|
||||
basename: str,
|
||||
protocol_create_code: str,
|
||||
enable_sync_sends: bool,
|
||||
enable_async_sends: bool,
|
||||
private: bool = False,
|
||||
protocol_module_level_import_code: Optional[str] = None,
|
||||
build_time_protocol_create_code: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Create a Python module defining a MessageSender subclass.
|
||||
|
||||
This class is primarily for type checking and will contain overrides
|
||||
for the varieties of send calls for message/response types defined
|
||||
in the protocol.
|
||||
|
||||
Code passed for 'protocol_create_code' should import necessary
|
||||
modules and assign an instance of the Protocol to a 'protocol'
|
||||
variable.
|
||||
|
||||
Class names are based on basename; a basename 'FooSender' will
|
||||
result in classes FooSender and BoundFooSender.
|
||||
|
||||
If 'private' is True, class-names will be prefixed with an '_'.
|
||||
|
||||
Note that generated line lengths are not clipped, so output may need
|
||||
to be run through a formatter to prevent lint warnings about excessive
|
||||
line lengths.
|
||||
"""
|
||||
protocol = _protocol_from_code(
|
||||
build_time_protocol_create_code if build_time_protocol_create_code
|
||||
is not None else protocol_create_code)
|
||||
return protocol.do_create_sender_module(
|
||||
basename=basename,
|
||||
protocol_create_code=protocol_create_code,
|
||||
enable_sync_sends=enable_sync_sends,
|
||||
enable_async_sends=enable_async_sends,
|
||||
private=private,
|
||||
protocol_module_level_import_code=protocol_module_level_import_code)
|
||||
|
||||
|
||||
def create_receiver_module(
|
||||
basename: str,
|
||||
protocol_create_code: str,
|
||||
is_async: bool,
|
||||
private: bool = False,
|
||||
protocol_module_level_import_code: Optional[str] = None,
|
||||
build_time_protocol_create_code: Optional[str] = None,
|
||||
) -> str:
|
||||
""""Create a Python module defining a MessageReceiver subclass.
|
||||
|
||||
This class is primarily for type checking and will contain overrides
|
||||
for the register method for message/response types defined in
|
||||
the protocol.
|
||||
|
||||
Class names are based on basename; a basename 'FooReceiver' will
|
||||
result in FooReceiver and BoundFooReceiver.
|
||||
|
||||
If 'is_async' is True, handle_raw_message() will be an async method
|
||||
and the @handler decorator will expect async methods.
|
||||
|
||||
If 'private' is True, class-names will be prefixed with an '_'.
|
||||
|
||||
Note that line lengths are not clipped, so output may need to be
|
||||
run through a formatter to prevent lint warnings about excessive
|
||||
line lengths.
|
||||
"""
|
||||
protocol = _protocol_from_code(
|
||||
build_time_protocol_create_code if build_time_protocol_create_code
|
||||
is not None else protocol_create_code)
|
||||
return protocol.do_create_receiver_module(
|
||||
basename=basename,
|
||||
protocol_create_code=protocol_create_code,
|
||||
is_async=is_async,
|
||||
private=private,
|
||||
protocol_module_level_import_code=protocol_module_level_import_code)
|
||||
|
||||
|
||||
def _protocol_from_code(protocol_create_code: str) -> MessageProtocol:
|
||||
env: dict = {}
|
||||
exec(protocol_create_code, env) # pylint: disable=exec-used
|
||||
protocol = env.get('protocol')
|
||||
if not isinstance(protocol, MessageProtocol):
|
||||
raise RuntimeError(
|
||||
f'protocol_create_code yielded'
|
||||
f' a {type(protocol)}; expected a MessageProtocol instance.')
|
||||
return protocol
|
||||
@ -6,89 +6,21 @@ Supports static typing for message types and possible return types.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, TypeVar, Annotated
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
import inspect
|
||||
from typing import TYPE_CHECKING
|
||||
import traceback
|
||||
import logging
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from efro.error import CleanError, RemoteError
|
||||
from efro.dataclassio import (ioprepped, is_ioprepped_dataclass, IOAttrs,
|
||||
dataclass_to_dict, dataclass_from_dict)
|
||||
from efro.dataclassio import (is_ioprepped_dataclass, dataclass_to_dict,
|
||||
dataclass_from_dict)
|
||||
from efro.message._message import (Message, Response, ErrorResponse,
|
||||
EmptyResponse, ErrorType,
|
||||
UnregisteredMessageIDError)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable, Optional, Sequence, Union, Awaitable
|
||||
|
||||
TM = TypeVar('TM', bound='MessageSender')
|
||||
|
||||
|
||||
class Message:
|
||||
"""Base class for messages."""
|
||||
|
||||
@classmethod
|
||||
def get_response_types(cls) -> list[type[Response]]:
|
||||
"""Return all message types this Message can result in when sent.
|
||||
|
||||
The default implementation specifies EmptyResponse, so messages with
|
||||
no particular response needs can leave this untouched.
|
||||
Note that ErrorMessage is handled as a special case and does not
|
||||
need to be specified here.
|
||||
"""
|
||||
return [EmptyResponse]
|
||||
|
||||
|
||||
class Response:
|
||||
"""Base class for responses to messages."""
|
||||
|
||||
|
||||
# Some standard response types:
|
||||
|
||||
|
||||
class ErrorType(Enum):
|
||||
"""Type of error that occurred in remote message handling."""
|
||||
OTHER = 0
|
||||
CLEAN = 1
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class ErrorResponse(Response):
|
||||
"""Message saying some error has occurred on the other end.
|
||||
|
||||
This type is unique in that it is not returned to the user; it
|
||||
instead results in a local exception being raised.
|
||||
"""
|
||||
error_message: Annotated[str, IOAttrs('m')]
|
||||
error_type: Annotated[ErrorType, IOAttrs('e')] = ErrorType.OTHER
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class EmptyResponse(Response):
|
||||
"""The response equivalent of None."""
|
||||
|
||||
|
||||
# TODO: could allow handlers to deal in raw values for these
|
||||
# types similar to how we allow None in place of EmptyResponse.
|
||||
# Though not sure if they are widely used enough to warrant the
|
||||
# extra code complexity.
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class BoolResponse(Response):
|
||||
"""A simple bool value response."""
|
||||
|
||||
value: Annotated[bool, IOAttrs('v')]
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class StringResponse(Response):
|
||||
"""A simple string value response."""
|
||||
|
||||
value: Annotated[str, IOAttrs('v')]
|
||||
|
||||
|
||||
class MessageProtocol:
|
||||
"""Wrangles a set of message types, formats, and response types.
|
||||
@ -150,8 +82,8 @@ class MessageProtocol:
|
||||
self.response_ids_by_type[r_type] = r_id
|
||||
|
||||
# Go ahead and auto-register a few common response types
|
||||
# if the user has not done so explicitly. Use unique IDs which
|
||||
# will never change or overlap with user ids.
|
||||
# if the user has not done so explicitly. Use unique negative
|
||||
# IDs which will never change or overlap with user ids.
|
||||
def _reg_if_not(reg_tp: type[Response], reg_id: int) -> None:
|
||||
if reg_tp in self.response_ids_by_type:
|
||||
return
|
||||
@ -205,6 +137,22 @@ class MessageProtocol:
|
||||
"""Encode a response to a json string for transport."""
|
||||
return self._encode(response, self.response_ids_by_type, 'response')
|
||||
|
||||
def encode_error_response(self, exc: Exception) -> str:
|
||||
"""Return a raw response for an error that occurred during handling."""
|
||||
if self.log_remote_exceptions:
|
||||
logging.exception('Error handling message.')
|
||||
|
||||
# If anything goes wrong, return a ErrorResponse instead.
|
||||
if isinstance(exc, CleanError) and self.preserve_clean_errors:
|
||||
err_response = ErrorResponse(error_message=str(exc),
|
||||
error_type=ErrorType.CLEAN)
|
||||
else:
|
||||
err_response = ErrorResponse(
|
||||
error_message=(traceback.format_exc() if self.trusted_sender
|
||||
else 'An unknown error has occurred.'),
|
||||
error_type=ErrorType.OTHER)
|
||||
return self.encode_response(err_response)
|
||||
|
||||
def _encode(self, message: Any, ids_by_type: dict[type, int],
|
||||
opname: str) -> str:
|
||||
"""Encode a message to a json string for transport."""
|
||||
@ -260,7 +208,8 @@ class MessageProtocol:
|
||||
# Decode this particular type.
|
||||
msgtype = types_by_id.get(m_id)
|
||||
if msgtype is None:
|
||||
raise TypeError(f'Got unregistered {opname} type id of {m_id}.')
|
||||
raise UnregisteredMessageIDError(
|
||||
f'Got unregistered {opname} id of {m_id}.')
|
||||
out = dataclass_from_dict(msgtype, msgdict)
|
||||
|
||||
# Special case: if we get EmptyResponse, we simply return None.
|
||||
@ -278,7 +227,9 @@ class MessageProtocol:
|
||||
|
||||
return out
|
||||
|
||||
def _get_module_header(self, part: str) -> str:
|
||||
def _get_module_header(self,
|
||||
part: str,
|
||||
extra_import_code: Optional[str] = None) -> str:
|
||||
"""Return common parts of generated modules."""
|
||||
# pylint: disable=too-many-locals, too-many-branches
|
||||
import textwrap
|
||||
@ -336,6 +287,9 @@ class MessageProtocol:
|
||||
' BoundMessageReceiver')
|
||||
tpimport_typing_extras = ', Awaitable'
|
||||
|
||||
if extra_import_code is not None:
|
||||
import_lines += f'\n{extra_import_code}\n'
|
||||
|
||||
ovld = ', overload' if not single_message_type else ''
|
||||
tpimport_lines = textwrap.indent(tpimport_lines, ' ')
|
||||
out = ('# Released under the MIT License. See LICENSE for details.\n'
|
||||
@ -356,12 +310,14 @@ class MessageProtocol:
|
||||
f'\n')
|
||||
return out
|
||||
|
||||
def do_create_sender_module(self,
|
||||
basename: str,
|
||||
protocol_create_code: str,
|
||||
enable_sync_sends: bool,
|
||||
enable_async_sends: bool,
|
||||
private: bool = False) -> str:
|
||||
def do_create_sender_module(
|
||||
self,
|
||||
basename: str,
|
||||
protocol_create_code: str,
|
||||
enable_sync_sends: bool,
|
||||
enable_async_sends: bool,
|
||||
private: bool = False,
|
||||
protocol_module_level_import_code: Optional[str] = None) -> str:
|
||||
"""Used by create_sender_module(); do not call directly."""
|
||||
# pylint: disable=too-many-locals
|
||||
import textwrap
|
||||
@ -369,7 +325,8 @@ class MessageProtocol:
|
||||
msgtypes = list(self.message_ids_by_type.keys())
|
||||
|
||||
ppre = '_' if private else ''
|
||||
out = self._get_module_header('sender')
|
||||
out = self._get_module_header(
|
||||
'sender', extra_import_code=protocol_module_level_import_code)
|
||||
ccind = textwrap.indent(protocol_create_code, ' ')
|
||||
out += (f'class {ppre}{basename}(MessageSender):\n'
|
||||
f' """Protocol-specific sender."""\n'
|
||||
@ -451,11 +408,13 @@ class MessageProtocol:
|
||||
|
||||
return out
|
||||
|
||||
def do_create_receiver_module(self,
|
||||
basename: str,
|
||||
protocol_create_code: str,
|
||||
is_async: bool,
|
||||
private: bool = False) -> str:
|
||||
def do_create_receiver_module(
|
||||
self,
|
||||
basename: str,
|
||||
protocol_create_code: str,
|
||||
is_async: bool,
|
||||
private: bool = False,
|
||||
protocol_module_level_import_code: Optional[str] = None) -> str:
|
||||
"""Used by create_receiver_module(); do not call directly."""
|
||||
# pylint: disable=too-many-locals
|
||||
import textwrap
|
||||
@ -463,7 +422,8 @@ class MessageProtocol:
|
||||
desc = 'asynchronous' if is_async else 'synchronous'
|
||||
ppre = '_' if private else ''
|
||||
msgtypes = list(self.message_ids_by_type.keys())
|
||||
out = self._get_module_header('receiver')
|
||||
out = self._get_module_header(
|
||||
'receiver', extra_import_code=protocol_module_level_import_code)
|
||||
ccind = textwrap.indent(protocol_create_code, ' ')
|
||||
out += (f'class {ppre}{basename}(MessageReceiver):\n'
|
||||
f' """Protocol-specific {desc} receiver."""\n'
|
||||
@ -550,442 +510,26 @@ class MessageProtocol:
|
||||
if is_async:
|
||||
out += (
|
||||
'\n'
|
||||
' async def handle_raw_message(self, message: str)'
|
||||
' -> str:\n'
|
||||
' async def handle_raw_message(self,\n'
|
||||
' message: str,\n'
|
||||
' raise_unregistered: bool ='
|
||||
' False) -> str:\n'
|
||||
' """Asynchronously handle a raw incoming message."""\n'
|
||||
' return await'
|
||||
' self._receiver.handle_raw_message_async(\n'
|
||||
' self._obj, message)\n')
|
||||
' return await self._receiver.handle_raw_message_async('
|
||||
'\n'
|
||||
' self._obj, message, raise_unregistered)\n')
|
||||
|
||||
else:
|
||||
out += (
|
||||
'\n'
|
||||
' def handle_raw_message(self, message: str) -> str:\n'
|
||||
' def handle_raw_message(self,\n'
|
||||
' message: str,\n'
|
||||
' raise_unregistered: bool = False)'
|
||||
' -> str:\n'
|
||||
' """Synchronously handle a raw incoming message."""\n'
|
||||
' return self._receiver.handle_raw_message'
|
||||
'(self._obj, message)\n')
|
||||
' return self._receiver.handle_raw_message('
|
||||
'self._obj, message,\n'
|
||||
' '
|
||||
'raise_unregistered)\n')
|
||||
|
||||
return out
|
||||
|
||||
|
||||
class MessageSender:
|
||||
"""Facilitates sending messages to a target and receiving responses.
|
||||
This is instantiated at the class level and used to register unbound
|
||||
class methods to handle raw message sending.
|
||||
|
||||
Example:
|
||||
|
||||
class MyClass:
|
||||
msg = MyMessageSender(some_protocol)
|
||||
|
||||
@msg.send_method
|
||||
def send_raw_message(self, message: str) -> str:
|
||||
# Actually send the message here.
|
||||
|
||||
# MyMessageSender class should provide overloads for send(), send_bg(),
|
||||
# etc. to ensure all sending happens with valid types.
|
||||
obj = MyClass()
|
||||
obj.msg.send(SomeMessageType())
|
||||
"""
|
||||
|
||||
def __init__(self, protocol: MessageProtocol) -> None:
|
||||
self.protocol = protocol
|
||||
self._send_raw_message_call: Optional[Callable[[Any, str], str]] = None
|
||||
self._send_async_raw_message_call: Optional[Callable[
|
||||
[Any, str], Awaitable[str]]] = None
|
||||
|
||||
def send_method(
|
||||
self, call: Callable[[Any, str],
|
||||
str]) -> Callable[[Any, str], str]:
|
||||
"""Function decorator for setting raw send method."""
|
||||
assert self._send_raw_message_call is None
|
||||
self._send_raw_message_call = call
|
||||
return call
|
||||
|
||||
def send_async_method(
|
||||
self, call: Callable[[Any, str], Awaitable[str]]
|
||||
) -> Callable[[Any, str], Awaitable[str]]:
|
||||
"""Function decorator for setting raw send-async method."""
|
||||
assert self._send_async_raw_message_call is None
|
||||
self._send_async_raw_message_call = call
|
||||
return call
|
||||
|
||||
def send(self, bound_obj: Any, message: Message) -> Optional[Response]:
|
||||
"""Send a message and receive a response.
|
||||
|
||||
Will encode the message for transport and call dispatch_raw_message()
|
||||
"""
|
||||
if self._send_raw_message_call is None:
|
||||
raise RuntimeError('send() is unimplemented for this type.')
|
||||
|
||||
msg_encoded = self.protocol.encode_message(message)
|
||||
response_encoded = self._send_raw_message_call(bound_obj, msg_encoded)
|
||||
response = self.protocol.decode_response(response_encoded)
|
||||
assert isinstance(response, (Response, type(None)))
|
||||
assert (response is None
|
||||
or type(response) in type(message).get_response_types())
|
||||
return response
|
||||
|
||||
async def send_async(self, bound_obj: Any,
|
||||
message: Message) -> Optional[Response]:
|
||||
"""Send a message asynchronously using asyncio.
|
||||
|
||||
The message will be encoded for transport and passed to
|
||||
dispatch_raw_message_async.
|
||||
"""
|
||||
if self._send_async_raw_message_call is None:
|
||||
raise RuntimeError('send_async() is unimplemented for this type.')
|
||||
|
||||
msg_encoded = self.protocol.encode_message(message)
|
||||
response_encoded = await self._send_async_raw_message_call(
|
||||
bound_obj, msg_encoded)
|
||||
response = self.protocol.decode_response(response_encoded)
|
||||
assert isinstance(response, (Response, type(None)))
|
||||
assert (response is None
|
||||
or type(response) in type(message).get_response_types())
|
||||
return response
|
||||
|
||||
|
||||
class BoundMessageSender:
|
||||
"""Base class for bound senders."""
|
||||
|
||||
def __init__(self, obj: Any, sender: MessageSender) -> None:
|
||||
assert obj is not None
|
||||
self._obj = obj
|
||||
self._sender = sender
|
||||
|
||||
@property
|
||||
def protocol(self) -> MessageProtocol:
|
||||
"""Protocol associated with this sender."""
|
||||
return self._sender.protocol
|
||||
|
||||
def send_untyped(self, message: Message) -> Optional[Response]:
|
||||
"""Send a message synchronously.
|
||||
|
||||
Whenever possible, use the send() call provided by generated
|
||||
subclasses instead of this; it will provide better type safety.
|
||||
"""
|
||||
return self._sender.send(self._obj, message)
|
||||
|
||||
async def send_async_untyped(self, message: Message) -> Optional[Response]:
|
||||
"""Send a message asynchronously.
|
||||
|
||||
Whenever possible, use the send_async() call provided by generated
|
||||
subclasses instead of this; it will provide better type safety.
|
||||
"""
|
||||
return await self._sender.send_async(self._obj, message)
|
||||
|
||||
|
||||
class MessageReceiver:
|
||||
"""Facilitates receiving & responding to messages from a remote source.
|
||||
|
||||
This is instantiated at the class level with unbound methods registered
|
||||
as handlers for different message types in the protocol.
|
||||
|
||||
Example:
|
||||
|
||||
class MyClass:
|
||||
receiver = MyMessageReceiver()
|
||||
|
||||
# MyMessageReceiver fills out handler() overloads to ensure all
|
||||
# registered handlers have valid types/return-types.
|
||||
@receiver.handler
|
||||
def handle_some_message_type(self, message: SomeMsg) -> SomeResponse:
|
||||
# Deal with this message type here.
|
||||
|
||||
# This will trigger the registered handler being called.
|
||||
obj = MyClass()
|
||||
obj.receiver.handle_raw_message(some_raw_data)
|
||||
|
||||
Any unhandled Exception occurring during message handling will result in
|
||||
an Exception being raised on the sending end.
|
||||
"""
|
||||
|
||||
is_async = False
|
||||
|
||||
def __init__(self, protocol: MessageProtocol) -> None:
|
||||
self.protocol = protocol
|
||||
self._handlers: dict[type[Message], Callable] = {}
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
def register_handler(
|
||||
self, call: Callable[[Any, Message], Optional[Response]]) -> None:
|
||||
"""Register a handler call.
|
||||
|
||||
The message type handled by the call is determined by its
|
||||
type annotation.
|
||||
"""
|
||||
# TODO: can use types.GenericAlias in 3.9.
|
||||
from typing import _GenericAlias # type: ignore
|
||||
from typing import get_type_hints, get_args
|
||||
|
||||
sig = inspect.getfullargspec(call)
|
||||
|
||||
# The provided callable should be a method taking one 'msg' arg.
|
||||
expectedsig = ['self', 'msg']
|
||||
if sig.args != expectedsig:
|
||||
raise ValueError(f'Expected callable signature of {expectedsig};'
|
||||
f' got {sig.args}')
|
||||
|
||||
# Make sure we are only given async methods if we are an async handler
|
||||
# and sync ones otherwise.
|
||||
is_async = inspect.iscoroutinefunction(call)
|
||||
if self.is_async != is_async:
|
||||
msg = ('Expected a sync method; found an async one.' if is_async
|
||||
else 'Expected an async method; found a sync one.')
|
||||
raise ValueError(msg)
|
||||
|
||||
# Check annotation types to determine what message types we handle.
|
||||
# Return-type annotation can be a Union, but we probably don't
|
||||
# have it available at runtime. Explicitly pull it in.
|
||||
# UPDATE: we've updated our pylint filter to where we should
|
||||
# have all annotations available.
|
||||
# anns = get_type_hints(call, localns={'Union': Union})
|
||||
anns = get_type_hints(call)
|
||||
|
||||
msgtype = anns.get('msg')
|
||||
if not isinstance(msgtype, type):
|
||||
raise TypeError(
|
||||
f'expected a type for "msg" annotation; got {type(msgtype)}.')
|
||||
assert issubclass(msgtype, Message)
|
||||
|
||||
ret = anns.get('return')
|
||||
responsetypes: tuple[Union[type[Any], type[None]], ...]
|
||||
|
||||
# Return types can be a single type or a union of types.
|
||||
if isinstance(ret, _GenericAlias):
|
||||
targs = get_args(ret)
|
||||
if not all(isinstance(a, type) for a in targs):
|
||||
raise TypeError(f'expected only types for "return" annotation;'
|
||||
f' got {targs}.')
|
||||
responsetypes = targs
|
||||
else:
|
||||
if not isinstance(ret, type):
|
||||
raise TypeError(f'expected one or more types for'
|
||||
f' "return" annotation; got a {type(ret)}.')
|
||||
responsetypes = (ret, )
|
||||
|
||||
# Return type of None translates to EmptyResponse.
|
||||
responsetypes = tuple(EmptyResponse if r is type(None) else r
|
||||
for r in responsetypes) # noqa
|
||||
|
||||
# Make sure our protocol has this message type registered and our
|
||||
# return types exactly match. (Technically we could return a subset
|
||||
# of the supported types; can allow this in the future if it makes
|
||||
# sense).
|
||||
registered_types = self.protocol.message_ids_by_type.keys()
|
||||
|
||||
if msgtype not in registered_types:
|
||||
raise TypeError(f'Message type {msgtype} is not registered'
|
||||
f' in this Protocol.')
|
||||
|
||||
if msgtype in self._handlers:
|
||||
raise TypeError(f'Message type {msgtype} already has a registered'
|
||||
f' handler.')
|
||||
|
||||
# Make sure the responses exactly matches what the message expects.
|
||||
if set(responsetypes) != set(msgtype.get_response_types()):
|
||||
raise TypeError(
|
||||
f'Provided response types {responsetypes} do not'
|
||||
f' match the set expected by message type {msgtype}: '
|
||||
f'({msgtype.get_response_types()})')
|
||||
|
||||
# Ok; we're good!
|
||||
self._handlers[msgtype] = call
|
||||
|
||||
def validate(self, warn_only: bool = False) -> None:
|
||||
"""Check for handler completeness, valid types, etc."""
|
||||
for msgtype in self.protocol.message_ids_by_type.keys():
|
||||
if issubclass(msgtype, Response):
|
||||
continue
|
||||
if msgtype not in self._handlers:
|
||||
msg = (f'Protocol message type {msgtype} is not handled'
|
||||
f' by receiver type {type(self)}.')
|
||||
if warn_only:
|
||||
logging.warning(msg)
|
||||
else:
|
||||
raise TypeError(msg)
|
||||
|
||||
def _decode_incoming_message(self,
|
||||
msg: str) -> tuple[Message, type[Message]]:
|
||||
# Decode the incoming message.
|
||||
msg_decoded = self.protocol.decode_message(msg)
|
||||
msgtype = type(msg_decoded)
|
||||
assert issubclass(msgtype, Message)
|
||||
return msg_decoded, msgtype
|
||||
|
||||
def _encode_response(self, response: Optional[Response],
|
||||
msgtype: type[Message]) -> str:
|
||||
|
||||
# A return value of None equals EmptyResponse.
|
||||
if response is None:
|
||||
response = EmptyResponse()
|
||||
|
||||
# Re-encode the response.
|
||||
assert isinstance(response, Response)
|
||||
# (user should never explicitly return these)
|
||||
assert not isinstance(response, ErrorResponse)
|
||||
assert type(response) in msgtype.get_response_types()
|
||||
return self.protocol.encode_response(response)
|
||||
|
||||
def raw_response_for_error(self, exc: Exception) -> str:
|
||||
"""Return a raw response for an error that occurred during handling."""
|
||||
if self.protocol.log_remote_exceptions:
|
||||
logging.exception('Error handling message.')
|
||||
|
||||
# If anything goes wrong, return a ErrorResponse instead.
|
||||
if (isinstance(exc, CleanError)
|
||||
and self.protocol.preserve_clean_errors):
|
||||
err_response = ErrorResponse(error_message=str(exc),
|
||||
error_type=ErrorType.CLEAN)
|
||||
else:
|
||||
err_response = ErrorResponse(
|
||||
error_message=(traceback.format_exc()
|
||||
if self.protocol.trusted_sender else
|
||||
'An unknown error has occurred.'),
|
||||
error_type=ErrorType.OTHER)
|
||||
return self.protocol.encode_response(err_response)
|
||||
|
||||
def handle_raw_message(self, bound_obj: Any, msg: str) -> str:
|
||||
"""Decode, handle, and return an response for a message."""
|
||||
assert not self.is_async, "can't call sync handler on async receiver"
|
||||
try:
|
||||
msg_decoded, msgtype = self._decode_incoming_message(msg)
|
||||
handler = self._handlers.get(msgtype)
|
||||
if handler is None:
|
||||
raise RuntimeError(f'Got unhandled message type: {msgtype}.')
|
||||
result = handler(bound_obj, msg_decoded)
|
||||
return self._encode_response(result, msgtype)
|
||||
|
||||
except Exception as exc:
|
||||
return self.raw_response_for_error(exc)
|
||||
|
||||
async def handle_raw_message_async(self, bound_obj: Any, msg: str) -> str:
|
||||
"""Should be called when the receiver gets a message.
|
||||
|
||||
The return value is the raw response to the message.
|
||||
"""
|
||||
assert self.is_async, "can't call async handler on sync receiver"
|
||||
try:
|
||||
msg_decoded, msgtype = self._decode_incoming_message(msg)
|
||||
handler = self._handlers.get(msgtype)
|
||||
if handler is None:
|
||||
raise RuntimeError(f'Got unhandled message type: {msgtype}.')
|
||||
result = await handler(bound_obj, msg_decoded)
|
||||
return self._encode_response(result, msgtype)
|
||||
|
||||
except Exception as exc:
|
||||
return self.raw_response_for_error(exc)
|
||||
|
||||
|
||||
class BoundMessageReceiver:
|
||||
"""Base bound receiver class."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
obj: Any,
|
||||
receiver: MessageReceiver,
|
||||
) -> None:
|
||||
assert obj is not None
|
||||
self._obj = obj
|
||||
self._receiver = receiver
|
||||
|
||||
@property
|
||||
def protocol(self) -> MessageProtocol:
|
||||
"""Protocol associated with this receiver."""
|
||||
return self._receiver.protocol
|
||||
|
||||
def raw_response_for_error(self, exc: Exception) -> str:
|
||||
"""Return a raw response for an error that occurred during handling.
|
||||
|
||||
This is automatically called from standard handle_raw_message_x()
|
||||
calls but can be manually invoked if errors occur outside of there.
|
||||
This gives clients a better idea of what went wrong vs simply
|
||||
returning invalid data which they might dismiss as a connection
|
||||
related error.
|
||||
"""
|
||||
return self._receiver.raw_response_for_error(exc)
|
||||
|
||||
|
||||
def create_sender_module(basename: str,
|
||||
protocol_create_code: str,
|
||||
enable_sync_sends: bool,
|
||||
enable_async_sends: bool,
|
||||
private: bool = False) -> str:
|
||||
"""Create a Python module defining a MessageSender subclass.
|
||||
|
||||
This class is primarily for type checking and will contain overrides
|
||||
for the varieties of send calls for message/response types defined
|
||||
in the protocol.
|
||||
|
||||
Code passed for 'protocol_create_code' should import necessary
|
||||
modules and assign an instance of the Protocol to a 'protocol'
|
||||
variable.
|
||||
|
||||
Class names are based on basename; a basename 'FooSender' will
|
||||
result in classes FooSender and BoundFooSender.
|
||||
|
||||
If 'private' is True, class-names will be prefixed with an '_'.
|
||||
|
||||
Note that line lengths are not clipped, so output may need to be
|
||||
run through a formatter to prevent lint warnings about excessive
|
||||
line lengths.
|
||||
"""
|
||||
|
||||
# Exec the passed code to get a protocol which we then use to
|
||||
# generate module code. The user could simply call
|
||||
# MessageProtocol.do_create_sender_module() directly, but this allows
|
||||
# us to verify that the create code works and yields the protocol used
|
||||
# to generate the code.
|
||||
protocol = _protocol_from_code(protocol_create_code)
|
||||
return protocol.do_create_sender_module(
|
||||
basename=basename,
|
||||
protocol_create_code=protocol_create_code,
|
||||
enable_sync_sends=enable_sync_sends,
|
||||
enable_async_sends=enable_async_sends,
|
||||
private=private)
|
||||
|
||||
|
||||
def create_receiver_module(basename: str,
|
||||
protocol_create_code: str,
|
||||
is_async: bool,
|
||||
private: bool = False) -> str:
|
||||
""""Create a Python module defining a MessageReceiver subclass.
|
||||
|
||||
This class is primarily for type checking and will contain overrides
|
||||
for the register method for message/response types defined in
|
||||
the protocol.
|
||||
|
||||
Class names are based on basename; a basename 'FooReceiver' will
|
||||
result in FooReceiver and BoundFooReceiver.
|
||||
|
||||
If 'is_async' is True, handle_raw_message() will be an async method
|
||||
and the @handler decorator will expect async methods.
|
||||
|
||||
If 'private' is True, class-names will be prefixed with an '_'.
|
||||
|
||||
Note that line lengths are not clipped, so output may need to be
|
||||
run through a formatter to prevent lint warnings about excessive
|
||||
line lengths.
|
||||
"""
|
||||
# Exec the passed code to get a protocol which we then use to
|
||||
# generate module code. The user could simply call
|
||||
# MessageProtocol.do_create_sender_module() directly, but this allows
|
||||
# us to verify that the create code works and yields the protocol used
|
||||
# to generate the code.
|
||||
protocol = _protocol_from_code(protocol_create_code)
|
||||
return protocol.do_create_receiver_module(
|
||||
basename=basename,
|
||||
protocol_create_code=protocol_create_code,
|
||||
is_async=is_async,
|
||||
private=private)
|
||||
|
||||
|
||||
def _protocol_from_code(protocol_create_code: str) -> MessageProtocol:
|
||||
env: dict = {}
|
||||
exec(protocol_create_code, env) # pylint: disable=exec-used
|
||||
protocol = env.get('protocol')
|
||||
if not isinstance(protocol, MessageProtocol):
|
||||
raise RuntimeError(
|
||||
f'protocol_create_code yielded'
|
||||
f' a {type(protocol)}; expected a MessageProtocol instance.')
|
||||
return protocol
|
||||
239
tools/efro/message/_receiver.py
Normal file
239
tools/efro/message/_receiver.py
Normal file
@ -0,0 +1,239 @@
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""Functionality for sending and responding to messages.
|
||||
Supports static typing for message types and possible return types.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from efro.message._message import (Message, Response, EmptyResponse,
|
||||
ErrorResponse, UnregisteredMessageIDError)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable, Optional, Awaitable, Union
|
||||
|
||||
from efro.message._protocol import MessageProtocol
|
||||
|
||||
|
||||
class MessageReceiver:
|
||||
"""Facilitates receiving & responding to messages from a remote source.
|
||||
|
||||
This is instantiated at the class level with unbound methods registered
|
||||
as handlers for different message types in the protocol.
|
||||
|
||||
Example:
|
||||
|
||||
class MyClass:
|
||||
receiver = MyMessageReceiver()
|
||||
|
||||
# MyMessageReceiver fills out handler() overloads to ensure all
|
||||
# registered handlers have valid types/return-types.
|
||||
@receiver.handler
|
||||
def handle_some_message_type(self, message: SomeMsg) -> SomeResponse:
|
||||
# Deal with this message type here.
|
||||
|
||||
# This will trigger the registered handler being called.
|
||||
obj = MyClass()
|
||||
obj.receiver.handle_raw_message(some_raw_data)
|
||||
|
||||
Any unhandled Exception occurring during message handling will result in
|
||||
an Exception being raised on the sending end.
|
||||
"""
|
||||
|
||||
is_async = False
|
||||
|
||||
def __init__(self, protocol: MessageProtocol) -> None:
|
||||
self.protocol = protocol
|
||||
self._handlers: dict[type[Message], Callable] = {}
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
def register_handler(
|
||||
self, call: Callable[[Any, Message], Optional[Response]]) -> None:
|
||||
"""Register a handler call.
|
||||
|
||||
The message type handled by the call is determined by its
|
||||
type annotation.
|
||||
"""
|
||||
# TODO: can use types.GenericAlias in 3.9.
|
||||
from typing import _GenericAlias # type: ignore
|
||||
from typing import get_type_hints, get_args
|
||||
|
||||
sig = inspect.getfullargspec(call)
|
||||
|
||||
# The provided callable should be a method taking one 'msg' arg.
|
||||
expectedsig = ['self', 'msg']
|
||||
if sig.args != expectedsig:
|
||||
raise ValueError(f'Expected callable signature of {expectedsig};'
|
||||
f' got {sig.args}')
|
||||
|
||||
# Make sure we are only given async methods if we are an async handler
|
||||
# and sync ones otherwise.
|
||||
is_async = inspect.iscoroutinefunction(call)
|
||||
if self.is_async != is_async:
|
||||
msg = ('Expected a sync method; found an async one.' if is_async
|
||||
else 'Expected an async method; found a sync one.')
|
||||
raise ValueError(msg)
|
||||
|
||||
# Check annotation types to determine what message types we handle.
|
||||
# Return-type annotation can be a Union, but we probably don't
|
||||
# have it available at runtime. Explicitly pull it in.
|
||||
# UPDATE: we've updated our pylint filter to where we should
|
||||
# have all annotations available.
|
||||
# anns = get_type_hints(call, localns={'Union': Union})
|
||||
anns = get_type_hints(call)
|
||||
|
||||
msgtype = anns.get('msg')
|
||||
if not isinstance(msgtype, type):
|
||||
raise TypeError(
|
||||
f'expected a type for "msg" annotation; got {type(msgtype)}.')
|
||||
assert issubclass(msgtype, Message)
|
||||
|
||||
ret = anns.get('return')
|
||||
responsetypes: tuple[Union[type[Any], type[None]], ...]
|
||||
|
||||
# Return types can be a single type or a union of types.
|
||||
if isinstance(ret, _GenericAlias):
|
||||
targs = get_args(ret)
|
||||
if not all(isinstance(a, type) for a in targs):
|
||||
raise TypeError(f'expected only types for "return" annotation;'
|
||||
f' got {targs}.')
|
||||
responsetypes = targs
|
||||
else:
|
||||
if not isinstance(ret, type):
|
||||
raise TypeError(f'expected one or more types for'
|
||||
f' "return" annotation; got a {type(ret)}.')
|
||||
responsetypes = (ret, )
|
||||
|
||||
# Return type of None translates to EmptyResponse.
|
||||
responsetypes = tuple(EmptyResponse if r is type(None) else r
|
||||
for r in responsetypes) # noqa
|
||||
|
||||
# Make sure our protocol has this message type registered and our
|
||||
# return types exactly match. (Technically we could return a subset
|
||||
# of the supported types; can allow this in the future if it makes
|
||||
# sense).
|
||||
registered_types = self.protocol.message_ids_by_type.keys()
|
||||
|
||||
if msgtype not in registered_types:
|
||||
raise TypeError(f'Message type {msgtype} is not registered'
|
||||
f' in this Protocol.')
|
||||
|
||||
if msgtype in self._handlers:
|
||||
raise TypeError(f'Message type {msgtype} already has a registered'
|
||||
f' handler.')
|
||||
|
||||
# Make sure the responses exactly matches what the message expects.
|
||||
if set(responsetypes) != set(msgtype.get_response_types()):
|
||||
raise TypeError(
|
||||
f'Provided response types {responsetypes} do not'
|
||||
f' match the set expected by message type {msgtype}: '
|
||||
f'({msgtype.get_response_types()})')
|
||||
|
||||
# Ok; we're good!
|
||||
self._handlers[msgtype] = call
|
||||
|
||||
def validate(self, log_only: bool = False) -> None:
|
||||
"""Check for handler completeness, valid types, etc."""
|
||||
for msgtype in self.protocol.message_ids_by_type.keys():
|
||||
if issubclass(msgtype, Response):
|
||||
continue
|
||||
if msgtype not in self._handlers:
|
||||
msg = (f'Protocol message type {msgtype} is not handled'
|
||||
f' by receiver type {type(self)}.')
|
||||
if log_only:
|
||||
logging.error(msg)
|
||||
else:
|
||||
raise TypeError(msg)
|
||||
|
||||
def _decode_incoming_message(self,
|
||||
msg: str) -> tuple[Message, type[Message]]:
|
||||
# Decode the incoming message.
|
||||
msg_decoded = self.protocol.decode_message(msg)
|
||||
msgtype = type(msg_decoded)
|
||||
assert issubclass(msgtype, Message)
|
||||
return msg_decoded, msgtype
|
||||
|
||||
def _encode_response(self, response: Optional[Response],
|
||||
msgtype: type[Message]) -> str:
|
||||
|
||||
# A return value of None equals EmptyResponse.
|
||||
if response is None:
|
||||
response = EmptyResponse()
|
||||
|
||||
assert isinstance(response, Response)
|
||||
# (user should never explicitly return error-responses)
|
||||
assert not isinstance(response, ErrorResponse)
|
||||
assert type(response) in msgtype.get_response_types()
|
||||
return self.protocol.encode_response(response)
|
||||
|
||||
def handle_raw_message(self,
|
||||
bound_obj: Any,
|
||||
msg: str,
|
||||
raise_unregistered: bool = False) -> str:
|
||||
"""Decode, handle, and return an response for a message.
|
||||
|
||||
if 'raise_unregistered' is True, will raise an
|
||||
efro.message.UnregisteredMessageIDError for messages not handled by
|
||||
the protocol. In all other cases local errors will translate to
|
||||
error responses returned to the sender.
|
||||
"""
|
||||
assert not self.is_async, "can't call sync handler on async receiver"
|
||||
try:
|
||||
msg_decoded, msgtype = self._decode_incoming_message(msg)
|
||||
handler = self._handlers.get(msgtype)
|
||||
if handler is None:
|
||||
raise RuntimeError(f'Got unhandled message type: {msgtype}.')
|
||||
result = handler(bound_obj, msg_decoded)
|
||||
return self._encode_response(result, msgtype)
|
||||
|
||||
except Exception as exc:
|
||||
if (raise_unregistered
|
||||
and isinstance(exc, UnregisteredMessageIDError)):
|
||||
raise
|
||||
return self.protocol.encode_error_response(exc)
|
||||
|
||||
async def handle_raw_message_async(
|
||||
self,
|
||||
bound_obj: Any,
|
||||
msg: str,
|
||||
raise_unregistered: bool = False) -> str:
|
||||
"""Should be called when the receiver gets a message.
|
||||
|
||||
The return value is the raw response to the message.
|
||||
"""
|
||||
assert self.is_async, "can't call async handler on sync receiver"
|
||||
try:
|
||||
msg_decoded, msgtype = self._decode_incoming_message(msg)
|
||||
handler = self._handlers.get(msgtype)
|
||||
if handler is None:
|
||||
raise RuntimeError(f'Got unhandled message type: {msgtype}.')
|
||||
result = await handler(bound_obj, msg_decoded)
|
||||
return self._encode_response(result, msgtype)
|
||||
|
||||
except Exception as exc:
|
||||
if (raise_unregistered
|
||||
and isinstance(exc, UnregisteredMessageIDError)):
|
||||
raise
|
||||
return self.protocol.encode_error_response(exc)
|
||||
|
||||
|
||||
class BoundMessageReceiver:
|
||||
"""Base bound receiver class."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
obj: Any,
|
||||
receiver: MessageReceiver,
|
||||
) -> None:
|
||||
assert obj is not None
|
||||
self._obj = obj
|
||||
self._receiver = receiver
|
||||
|
||||
@property
|
||||
def protocol(self) -> MessageProtocol:
|
||||
"""Protocol associated with this receiver."""
|
||||
return self._receiver.protocol
|
||||
130
tools/efro/message/_sender.py
Normal file
130
tools/efro/message/_sender.py
Normal file
@ -0,0 +1,130 @@
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""Functionality for sending and responding to messages.
|
||||
Supports static typing for message types and possible return types.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
from efro.message._message import Response
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable, Optional, Awaitable
|
||||
|
||||
from efro.message._message import Message
|
||||
from efro.message._protocol import MessageProtocol
|
||||
|
||||
TM = TypeVar('TM', bound='MessageSender')
|
||||
|
||||
|
||||
class MessageSender:
|
||||
"""Facilitates sending messages to a target and receiving responses.
|
||||
This is instantiated at the class level and used to register unbound
|
||||
class methods to handle raw message sending.
|
||||
|
||||
Example:
|
||||
|
||||
class MyClass:
|
||||
msg = MyMessageSender(some_protocol)
|
||||
|
||||
@msg.send_method
|
||||
def send_raw_message(self, message: str) -> str:
|
||||
# Actually send the message here.
|
||||
|
||||
# MyMessageSender class should provide overloads for send(), send_bg(),
|
||||
# etc. to ensure all sending happens with valid types.
|
||||
obj = MyClass()
|
||||
obj.msg.send(SomeMessageType())
|
||||
"""
|
||||
|
||||
def __init__(self, protocol: MessageProtocol) -> None:
|
||||
self.protocol = protocol
|
||||
self._send_raw_message_call: Optional[Callable[[Any, str], str]] = None
|
||||
self._send_async_raw_message_call: Optional[Callable[
|
||||
[Any, str], Awaitable[str]]] = None
|
||||
|
||||
def send_method(
|
||||
self, call: Callable[[Any, str],
|
||||
str]) -> Callable[[Any, str], str]:
|
||||
"""Function decorator for setting raw send method."""
|
||||
assert self._send_raw_message_call is None
|
||||
self._send_raw_message_call = call
|
||||
return call
|
||||
|
||||
def send_async_method(
|
||||
self, call: Callable[[Any, str], Awaitable[str]]
|
||||
) -> Callable[[Any, str], Awaitable[str]]:
|
||||
"""Function decorator for setting raw send-async method."""
|
||||
assert self._send_async_raw_message_call is None
|
||||
self._send_async_raw_message_call = call
|
||||
return call
|
||||
|
||||
def send(self, bound_obj: Any, message: Message) -> Optional[Response]:
|
||||
"""Send a message and receive a response.
|
||||
|
||||
Will encode the message for transport and call dispatch_raw_message()
|
||||
"""
|
||||
if self._send_raw_message_call is None:
|
||||
raise RuntimeError('send() is unimplemented for this type.')
|
||||
|
||||
msg_encoded = self.protocol.encode_message(message)
|
||||
response_encoded = self._send_raw_message_call(bound_obj, msg_encoded)
|
||||
response = self.protocol.decode_response(response_encoded)
|
||||
assert isinstance(response, (Response, type(None)))
|
||||
assert (response is None
|
||||
or type(response) in type(message).get_response_types())
|
||||
return response
|
||||
|
||||
async def send_async(self, bound_obj: Any,
|
||||
message: Message) -> Optional[Response]:
|
||||
"""Send a message asynchronously using asyncio.
|
||||
|
||||
The message will be encoded for transport and passed to
|
||||
dispatch_raw_message_async.
|
||||
"""
|
||||
if self._send_async_raw_message_call is None:
|
||||
raise RuntimeError('send_async() is unimplemented for this type.')
|
||||
|
||||
msg_encoded = self.protocol.encode_message(message)
|
||||
response_encoded = await self._send_async_raw_message_call(
|
||||
bound_obj, msg_encoded)
|
||||
response = self.protocol.decode_response(response_encoded)
|
||||
assert isinstance(response, (Response, type(None)))
|
||||
assert (response is None
|
||||
or type(response) in type(message).get_response_types())
|
||||
return response
|
||||
|
||||
|
||||
class BoundMessageSender:
|
||||
"""Base class for bound senders."""
|
||||
|
||||
def __init__(self, obj: Any, sender: MessageSender) -> None:
|
||||
# Note: not checking obj here since we want to support
|
||||
# at least our protocol property when accessed via type.
|
||||
self._obj = obj
|
||||
self._sender = sender
|
||||
|
||||
@property
|
||||
def protocol(self) -> MessageProtocol:
|
||||
"""Protocol associated with this sender."""
|
||||
return self._sender.protocol
|
||||
|
||||
def send_untyped(self, message: Message) -> Optional[Response]:
|
||||
"""Send a message synchronously.
|
||||
|
||||
Whenever possible, use the send() call provided by generated
|
||||
subclasses instead of this; it will provide better type safety.
|
||||
"""
|
||||
assert self._obj is not None
|
||||
return self._sender.send(self._obj, message)
|
||||
|
||||
async def send_async_untyped(self, message: Message) -> Optional[Response]:
|
||||
"""Send a message asynchronously.
|
||||
|
||||
Whenever possible, use the send_async() call provided by generated
|
||||
subclasses instead of this; it will provide better type safety.
|
||||
"""
|
||||
assert self._obj is not None
|
||||
return await self._sender.send_async(self._obj, message)
|
||||
574
tools/efro/rpc.py
Normal file
574
tools/efro/rpc.py
Normal file
@ -0,0 +1,574 @@
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""Remote procedure call related functionality."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import ssl
|
||||
import time
|
||||
import asyncio
|
||||
import logging
|
||||
import weakref
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass
|
||||
from threading import current_thread
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
|
||||
from efro.error import CommunicationError
|
||||
from efro.util import assert_never
|
||||
from efro.dataclassio import (dataclass_to_json, dataclass_from_json,
|
||||
ioprepped, IOAttrs)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Literal, Awaitable, Callable, Optional
|
||||
from threading import Thread
|
||||
|
||||
# Terminology:
|
||||
# Packet: A chunk of data consisting of a type and some type-dependent
|
||||
# payload. Even though we use streams we organize our transmission
|
||||
# into 'packets'.
|
||||
# Message: User data which we transmit using one or more packets.
|
||||
|
||||
|
||||
class _PacketType(Enum):
|
||||
HANDSHAKE = 0
|
||||
KEEPALIVE = 1
|
||||
MESSAGE = 2
|
||||
RESPONSE = 3
|
||||
|
||||
|
||||
_BYTE_ORDER: Literal['big'] = 'big'
|
||||
|
||||
|
||||
@ioprepped
|
||||
@dataclass
|
||||
class _PeerInfo:
|
||||
|
||||
# So we can gracefully evolve how we communicate in the future.
|
||||
protocol: Annotated[int, IOAttrs('p')]
|
||||
|
||||
# How often we'll be sending out keepalives (in seconds).
|
||||
keepalive_interval: Annotated[float, IOAttrs('k')]
|
||||
|
||||
|
||||
OUR_PROTOCOL = 1
|
||||
|
||||
|
||||
class _InFlightMessage:
|
||||
"""Represents a message that is out on the wire."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._response: Optional[bytes] = None
|
||||
self._got_response = asyncio.Event()
|
||||
self.wait_task = asyncio.create_task(self._wait())
|
||||
|
||||
async def _wait(self) -> bytes:
|
||||
await self._got_response.wait()
|
||||
assert self._response is not None
|
||||
return self._response
|
||||
|
||||
def set_response(self, data: bytes) -> None:
|
||||
"""Set response data."""
|
||||
assert self._response is None
|
||||
self._response = data
|
||||
self._got_response.set()
|
||||
|
||||
|
||||
class _KeepaliveTimeoutError(Exception):
|
||||
"""Raised if we time out due to not receiving keepalives."""
|
||||
|
||||
|
||||
class RPCEndpoint:
|
||||
"""Facilitates asynchronous multiplexed remote procedure calls.
|
||||
|
||||
Be aware that, while multiple calls can be in flight in either direction
|
||||
simultaneously, packets are still sent serially in a single
|
||||
stream. So excessively long messages/responses will delay all other
|
||||
communication. If/when this becomes an issue we can look into breaking up
|
||||
long messages into multiple packets.
|
||||
"""
|
||||
|
||||
# Set to True on an instance to test keepalive failures.
|
||||
test_suppress_keepalives: bool = False
|
||||
|
||||
# How long we should wait before giving up on a message by default.
|
||||
# Note this includes processing time on the other end.
|
||||
DEFAULT_MESSAGE_TIMEOUT = 60.0
|
||||
|
||||
# How often we send out keepalive packets by default.
|
||||
DEFAULT_KEEPALIVE_INTERVAL = 10.73 # (avoid too regular of values)
|
||||
|
||||
# How long we can go without receiving a keepalive packet before we
|
||||
# disconnect.
|
||||
DEFAULT_KEEPALIVE_TIMEOUT = 30.0
|
||||
|
||||
def __init__(self,
|
||||
handle_raw_message_call: Callable[[bytes], Awaitable[bytes]],
|
||||
reader: asyncio.StreamReader,
|
||||
writer: asyncio.StreamWriter,
|
||||
debug_print: bool,
|
||||
label: str,
|
||||
keepalive_interval: float = DEFAULT_KEEPALIVE_INTERVAL,
|
||||
keepalive_timeout: float = DEFAULT_KEEPALIVE_TIMEOUT) -> None:
|
||||
self._handle_raw_message_call = handle_raw_message_call
|
||||
self._reader = reader
|
||||
self._writer = writer
|
||||
self._debug_print = debug_print
|
||||
self._label = label
|
||||
self._thread = current_thread()
|
||||
self._closing = False
|
||||
self._did_wait_closed = False
|
||||
self._event_loop = asyncio.get_running_loop()
|
||||
self._out_packets: list[bytes] = []
|
||||
self._have_out_packets = asyncio.Event()
|
||||
self._run_called = False
|
||||
self._peer_info: Optional[_PeerInfo] = None
|
||||
self._keepalive_interval = keepalive_interval
|
||||
self._keepalive_timeout = keepalive_timeout
|
||||
|
||||
# Need to hold weak-refs to these otherwise it creates dep-loops
|
||||
# which keeps us alive.
|
||||
self._tasks: list[weakref.ref[asyncio.Task]] = []
|
||||
|
||||
# When we last got a keepalive or equivalent (time.monotonic value)
|
||||
self._last_keepalive_receive_time: Optional[float] = None
|
||||
|
||||
# (Start near the end to make sure our looping logic is sound).
|
||||
self._next_message_id = 65530
|
||||
|
||||
self._in_flight_messages: dict[int, _InFlightMessage] = {}
|
||||
|
||||
if self._debug_print:
|
||||
peername = self._writer.get_extra_info('peername')
|
||||
print(f'{self._label}: connected to {peername} at {self._tm()}.')
|
||||
|
||||
async def run(self) -> None:
|
||||
"""Run the endpoint until the connection is lost or closed.
|
||||
|
||||
Handles closing the provided reader/writer on close.
|
||||
"""
|
||||
self._check_env()
|
||||
|
||||
if self._run_called:
|
||||
raise RuntimeError('Run can be called only once per endpoint.')
|
||||
self._run_called = True
|
||||
|
||||
core_tasks = [
|
||||
asyncio.create_task(
|
||||
self._run_core_task('keepalive', self._run_keepalive_task())),
|
||||
asyncio.create_task(
|
||||
self._run_core_task('read', self._run_read_task())),
|
||||
asyncio.create_task(
|
||||
self._run_core_task('write', self._run_write_task()))
|
||||
]
|
||||
self._tasks += [weakref.ref(t) for t in core_tasks]
|
||||
|
||||
# Run our core tasks until they all complete.
|
||||
results = await asyncio.gather(*core_tasks, return_exceptions=True)
|
||||
|
||||
# Core tasks should handle their own errors; the only ones
|
||||
# we expect to bubble up are CancelledError.
|
||||
for result in results:
|
||||
# We want to know if any errors happened aside from CancelledError
|
||||
# (which are BaseExceptions, not Exception).
|
||||
if isinstance(result, Exception):
|
||||
if self._debug_print:
|
||||
logging.error('Got unexpected error from %s core task: %s',
|
||||
self._label, result)
|
||||
|
||||
# Shut ourself down.
|
||||
try:
|
||||
self.close()
|
||||
await self.wait_closed()
|
||||
except Exception:
|
||||
logging.exception('Error closing %s.', self._label)
|
||||
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: finished.')
|
||||
|
||||
async def send_message(self,
|
||||
message: bytes,
|
||||
timeout: Optional[float] = None) -> bytes:
|
||||
"""Send a message to the peer and return a response.
|
||||
|
||||
If timeout is not provided, the default will be used.
|
||||
Raises a CommunicationError if the round trip is not completed
|
||||
for any reason.
|
||||
"""
|
||||
self._check_env()
|
||||
if len(message) > 65535:
|
||||
raise RuntimeError('Message cannot be larger than 65535 bytes')
|
||||
|
||||
if self._closing:
|
||||
raise CommunicationError('Endpoint is closed')
|
||||
|
||||
# Go with 16 bit looping value for message_id.
|
||||
message_id = self._next_message_id
|
||||
self._next_message_id = (self._next_message_id + 1) % 65536
|
||||
|
||||
# Payload consists of type (1b), message_id (2b), len (2b), and data.
|
||||
self._enqueue_outgoing_packet(
|
||||
_PacketType.MESSAGE.value.to_bytes(1, _BYTE_ORDER) +
|
||||
message_id.to_bytes(2, _BYTE_ORDER) +
|
||||
len(message).to_bytes(2, _BYTE_ORDER) + message)
|
||||
|
||||
# Make an entry so we know this message is out there.
|
||||
assert message_id not in self._in_flight_messages
|
||||
msgobj = self._in_flight_messages[message_id] = _InFlightMessage()
|
||||
|
||||
# Also add its task to our list so we properly cancel it if we die.
|
||||
self._prune_tasks() # Keep our list from filling with dead tasks.
|
||||
self._tasks.append(weakref.ref(msgobj.wait_task))
|
||||
|
||||
# Note: we always want to incorporate a timeout. Individual
|
||||
# messages may hang or error on the other end and this ensures
|
||||
# we won't build up lots of zombie tasks waiting around for
|
||||
# responses that will never arrive.
|
||||
if timeout is None:
|
||||
timeout = self.DEFAULT_MESSAGE_TIMEOUT
|
||||
assert timeout is not None
|
||||
try:
|
||||
return await asyncio.wait_for(msgobj.wait_task, timeout=timeout)
|
||||
except asyncio.CancelledError as exc:
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: message {message_id} was cancelled.')
|
||||
raise CommunicationError() from exc
|
||||
except asyncio.TimeoutError as exc:
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: message {message_id} timed out.')
|
||||
|
||||
# Stop waiting on the response.
|
||||
msgobj.wait_task.cancel()
|
||||
|
||||
# Remove the record of this message.
|
||||
del self._in_flight_messages[message_id]
|
||||
|
||||
# Let the user know something went wrong.
|
||||
raise CommunicationError() from exc
|
||||
|
||||
def close(self) -> None:
|
||||
"""I said seagulls; mmmm; stop it now."""
|
||||
self._check_env()
|
||||
|
||||
if self._closing:
|
||||
return
|
||||
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: closing...')
|
||||
|
||||
self._closing = True
|
||||
|
||||
# Kill all of our in-flight tasks.
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: cancelling tasks...')
|
||||
for task in self._get_live_tasks():
|
||||
task.cancel()
|
||||
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: closing writer...')
|
||||
self._writer.close()
|
||||
|
||||
# We don't need this anymore and it is likely to be creating a
|
||||
# dependency loop.
|
||||
del self._handle_raw_message_call
|
||||
|
||||
def is_closing(self) -> bool:
|
||||
"""Have we begun the process of closing?"""
|
||||
return self._closing
|
||||
|
||||
async def wait_closed(self) -> None:
|
||||
"""I said seagulls; mmmm; stop it now."""
|
||||
self._check_env()
|
||||
|
||||
# Make sure we only *enter* this call once.
|
||||
if self._did_wait_closed:
|
||||
return
|
||||
self._did_wait_closed = True
|
||||
|
||||
if not self._closing:
|
||||
raise RuntimeError('Must be called after close()')
|
||||
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: waiting for close to complete...')
|
||||
|
||||
# Wait for all of our in-flight tasks to wrap up.
|
||||
results = await asyncio.gather(*self._get_live_tasks(),
|
||||
return_exceptions=True)
|
||||
for result in results:
|
||||
# We want to know if any errors happened aside from CancelledError
|
||||
# (which are BaseExceptions, not Exception).
|
||||
if isinstance(result, Exception):
|
||||
if self._debug_print:
|
||||
logging.error(
|
||||
'Got unexpected error cleaning up %s task: %s',
|
||||
self._label, result)
|
||||
|
||||
# At this point we shouldn't touch our tasks anymore.
|
||||
# Clearing them out allows us to go down
|
||||
# del self._tasks
|
||||
|
||||
# Now wait for our writer to finish going down.
|
||||
# When we close our writer it generally triggers errors
|
||||
# in our current blocked read/writes. However that same
|
||||
# error is also sometimes returned from _writer.wait_closed().
|
||||
# See connection_lost() in asyncio/streams.py to see why.
|
||||
# So let's silently ignore it when that happens.
|
||||
assert self._writer.is_closing()
|
||||
try:
|
||||
await self._writer.wait_closed()
|
||||
except Exception as exc:
|
||||
if not self._is_expected_connection_error(exc):
|
||||
logging.exception('Error closing _writer for %s.', self._label)
|
||||
else:
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: silently ignoring error in'
|
||||
f' _writer.wait_closed(): {exc}.')
|
||||
|
||||
def _tm(self) -> str:
|
||||
"""Simple readable time value for debugging."""
|
||||
tval = time.time() % 100.0
|
||||
return f'{tval:.2f}'
|
||||
|
||||
async def _run_read_task(self) -> None:
|
||||
"""Read from the peer."""
|
||||
self._check_env()
|
||||
assert self._peer_info is None
|
||||
|
||||
# The first thing they should send us is their handshake; then
|
||||
# we'll know if/how we can talk to them.
|
||||
mlen = await self._read_int_32()
|
||||
message = (await self._reader.readexactly(mlen))
|
||||
self._peer_info = dataclass_from_json(_PeerInfo, message.decode())
|
||||
self._last_keepalive_receive_time = time.monotonic()
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: received handshake at {self._tm()}.')
|
||||
|
||||
# Now just sit and handle stuff as it comes in.
|
||||
while True:
|
||||
assert not self._closing
|
||||
|
||||
# Read message type.
|
||||
mtype = _PacketType(await self._read_int_8())
|
||||
if mtype is _PacketType.HANDSHAKE:
|
||||
raise RuntimeError('Got multiple handshakes')
|
||||
|
||||
if mtype is _PacketType.KEEPALIVE:
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: received keepalive'
|
||||
f' at {self._tm()}.')
|
||||
self._last_keepalive_receive_time = time.monotonic()
|
||||
|
||||
elif mtype is _PacketType.MESSAGE:
|
||||
await self._handle_message_packet()
|
||||
|
||||
elif mtype is _PacketType.RESPONSE:
|
||||
await self._handle_response_packet()
|
||||
|
||||
else:
|
||||
assert_never(mtype)
|
||||
|
||||
async def _handle_message_packet(self) -> None:
|
||||
msgid = await self._read_int_16()
|
||||
msglen = await self._read_int_16()
|
||||
msg = await self._reader.readexactly(msglen)
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: received message {msgid}'
|
||||
f' of size {msglen} at {self._tm()}.')
|
||||
|
||||
# Create a message-task to handle this message and return
|
||||
# a response (we don't want to block while that happens).
|
||||
assert not self._closing
|
||||
self._prune_tasks() # Keep from filling with dead tasks.
|
||||
self._tasks.append(
|
||||
weakref.ref(
|
||||
asyncio.create_task(
|
||||
self._handle_raw_message(message_id=msgid, message=msg))))
|
||||
print(f'{self._label}: done handling message at {self._tm()}.')
|
||||
|
||||
async def _handle_response_packet(self) -> None:
|
||||
msgid = await self._read_int_16()
|
||||
rsplen = await self._read_int_16()
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: received response {msgid}'
|
||||
f' of size {rsplen} at {self._tm()}.')
|
||||
rsp = await self._reader.readexactly(rsplen)
|
||||
msgobj = self._in_flight_messages.get(msgid)
|
||||
if msgobj is None:
|
||||
# It's possible for us to get a response to a message
|
||||
# that has timed out. In this case we will have no local
|
||||
# record of it.
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: got response for nonexistent'
|
||||
f' message id {msgid}; perhaps it timed out?')
|
||||
else:
|
||||
msgobj.set_response(rsp)
|
||||
|
||||
async def _run_write_task(self) -> None:
|
||||
"""Write to the peer."""
|
||||
|
||||
self._check_env()
|
||||
|
||||
# Introduce ourself so our peer knows how it can talk to us.
|
||||
data = dataclass_to_json(
|
||||
_PeerInfo(protocol=OUR_PROTOCOL,
|
||||
keepalive_interval=self._keepalive_interval)).encode()
|
||||
self._writer.write(len(data).to_bytes(4, _BYTE_ORDER) + data)
|
||||
|
||||
# Now just write out-messages as they come in.
|
||||
while True:
|
||||
|
||||
# Wait until some data comes in.
|
||||
await self._have_out_packets.wait()
|
||||
|
||||
assert self._out_packets
|
||||
data = self._out_packets.pop(0)
|
||||
|
||||
# Important: only clear this once all packets are sent.
|
||||
if not self._out_packets:
|
||||
self._have_out_packets.clear()
|
||||
|
||||
self._writer.write(data)
|
||||
# await self._writer.drain()
|
||||
|
||||
async def _run_keepalive_task(self) -> None:
|
||||
"""Send periodic keepalive packets."""
|
||||
self._check_env()
|
||||
|
||||
# We explicitly send our own keepalive packets so we can stay
|
||||
# more on top of the connection state and possibly decide to
|
||||
# kill it when contact is lost more quickly than the OS would
|
||||
# do itself (or at least keep the user informed that the
|
||||
# connection is lagging). It sounds like we could have the TCP
|
||||
# layer do this sort of thing itself but that might be
|
||||
# OS-specific so gonna go this way for now.
|
||||
while True:
|
||||
assert not self._closing
|
||||
await asyncio.sleep(self._keepalive_interval)
|
||||
if not self.test_suppress_keepalives:
|
||||
self._enqueue_outgoing_packet(
|
||||
_PacketType.KEEPALIVE.value.to_bytes(1, _BYTE_ORDER))
|
||||
|
||||
# Also go ahead and handle dropping the connection if we
|
||||
# haven't heard from the peer in a while.
|
||||
# NOTE: perhaps we want to do something more exact than
|
||||
# this which only checks once per keepalive-interval?..
|
||||
now = time.monotonic()
|
||||
assert self._peer_info is not None
|
||||
|
||||
if (self._last_keepalive_receive_time is not None
|
||||
and now - self._last_keepalive_receive_time >
|
||||
self._keepalive_timeout):
|
||||
if self._debug_print:
|
||||
since = now - self._last_keepalive_receive_time
|
||||
print(f'{self._label}: reached keepalive time-out'
|
||||
f' ({since:.1f}s).')
|
||||
raise _KeepaliveTimeoutError()
|
||||
|
||||
async def _run_core_task(self, tasklabel: str, call: Awaitable) -> None:
|
||||
try:
|
||||
await call
|
||||
except Exception as exc:
|
||||
# We expect connection errors to put us here, but make noise
|
||||
# if something else does.
|
||||
if not self._is_expected_connection_error(exc):
|
||||
logging.exception('Unexpected error in rpc %s %s task.',
|
||||
self._label, tasklabel)
|
||||
else:
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: {tasklabel} task will exit cleanly'
|
||||
f' due to {exc!r}.')
|
||||
finally:
|
||||
# Any core task exiting triggers shutdown.
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: {tasklabel} task exiting...')
|
||||
self.close()
|
||||
|
||||
async def _handle_raw_message(self, message_id: int,
|
||||
message: bytes) -> None:
|
||||
try:
|
||||
response = await self._handle_raw_message_call(message)
|
||||
except Exception:
|
||||
# We expect local message handler to always succeed.
|
||||
# If that doesn't happen, make a fuss so we know to fix it.
|
||||
# The other end will simply never get a response to this
|
||||
# message.
|
||||
logging.exception('Error handling message')
|
||||
return
|
||||
|
||||
# Now send back our response.
|
||||
# Payload consists of type (1b), msgid (2b), len (2b), and data.
|
||||
self._enqueue_outgoing_packet(
|
||||
_PacketType.RESPONSE.value.to_bytes(1, _BYTE_ORDER) +
|
||||
message_id.to_bytes(2, _BYTE_ORDER) +
|
||||
len(response).to_bytes(2, _BYTE_ORDER) + response)
|
||||
|
||||
async def _read_int_8(self) -> int:
|
||||
return int.from_bytes(await self._reader.readexactly(1), _BYTE_ORDER)
|
||||
|
||||
async def _read_int_16(self) -> int:
|
||||
return int.from_bytes(await self._reader.readexactly(2), _BYTE_ORDER)
|
||||
|
||||
async def _read_int_32(self) -> int:
|
||||
return int.from_bytes(await self._reader.readexactly(4), _BYTE_ORDER)
|
||||
|
||||
@classmethod
|
||||
def _is_expected_connection_error(cls, exc: Exception) -> bool:
|
||||
|
||||
# We expect this stuff to be what ends us.
|
||||
if isinstance(exc, (
|
||||
ConnectionError,
|
||||
EOFError,
|
||||
_KeepaliveTimeoutError,
|
||||
)):
|
||||
return True
|
||||
|
||||
# Am occasionally getting a specific SSL error on shutdown which I
|
||||
# believe is harmless (APPLICATION_DATA_AFTER_CLOSE_NOTIFY).
|
||||
# It sounds like it may soon be ignored by Python (as of March 2022).
|
||||
# Let's still complain, however, if we get any SSL errors besides
|
||||
# this one. https://bugs.python.org/issue39951
|
||||
if isinstance(exc, ssl.SSLError):
|
||||
if 'APPLICATION_DATA_AFTER_CLOSE_NOTIFY' in str(exc):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _check_env(self) -> None:
|
||||
# I was seeing that asyncio stuff wasn't working as expected if
|
||||
# created in one thread and used in another, so let's enforce
|
||||
# a single thread for all use of an instance.
|
||||
if current_thread() is not self._thread:
|
||||
raise RuntimeError('This must be called from the same thread'
|
||||
' that the endpoint was created in.')
|
||||
|
||||
# This should always be the case if thread is the same.
|
||||
assert asyncio.get_running_loop() is self._event_loop
|
||||
|
||||
def _enqueue_outgoing_packet(self, data: bytes) -> None:
|
||||
"""Enqueue a raw packet to be sent. Must be called from our loop."""
|
||||
self._check_env()
|
||||
|
||||
if bool(True):
|
||||
if self._debug_print:
|
||||
print(f'{self._label}: enqueueing outgoing packet'
|
||||
f' {data[:50]!r} at {self._tm()}.')
|
||||
|
||||
# Add the data and let our write task know about it.
|
||||
self._out_packets.append(data)
|
||||
self._have_out_packets.set()
|
||||
|
||||
def _prune_tasks(self) -> None:
|
||||
out: list[weakref.ref[asyncio.Task]] = []
|
||||
for task_weak_ref in self._tasks:
|
||||
task = task_weak_ref()
|
||||
if task is not None and not task.done():
|
||||
out.append(task_weak_ref)
|
||||
self._tasks = out
|
||||
|
||||
def _get_live_tasks(self) -> list[asyncio.Task]:
|
||||
out: list[asyncio.Task] = []
|
||||
for task_weak_ref in self._tasks:
|
||||
task = task_weak_ref()
|
||||
if task is not None and not task.done():
|
||||
out.append(task)
|
||||
return out
|
||||
@ -663,3 +663,21 @@ def unchanging_hostname() -> str:
|
||||
check=True,
|
||||
capture_output=True).stdout.decode().strip().replace(' ', '-')
|
||||
return os.uname().nodename
|
||||
|
||||
|
||||
def set_canonical_module(module_globals: dict[str, Any],
|
||||
names: list[str]) -> None:
|
||||
"""Override any __module__ attrs on passed classes/etc.
|
||||
|
||||
This allows classes to present themselves using clean paths such as
|
||||
mymodule.MyClass instead of possibly ugly internal ones such as
|
||||
mymodule._internal._stuff.MyClass.
|
||||
"""
|
||||
modulename = module_globals.get('__name__')
|
||||
if not isinstance(modulename, str):
|
||||
raise RuntimeError('Unable to get module name.')
|
||||
for name in names:
|
||||
obj = module_globals[name]
|
||||
existing = getattr(obj, '__module__', None)
|
||||
if existing is not None and existing != modulename:
|
||||
obj.__module__ = modulename
|
||||
|
||||
@ -13,7 +13,6 @@ from __future__ import annotations
|
||||
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
import platform
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
@ -106,11 +105,6 @@ def replace_one(opstr: str, old: str, new: str) -> str:
|
||||
return opstr.replace(old, new)
|
||||
|
||||
|
||||
def run(cmd: str) -> None:
|
||||
"""Run a shell command, checking errors."""
|
||||
subprocess.run(cmd, shell=True, check=True)
|
||||
|
||||
|
||||
def get_files_hash(filenames: Sequence[Union[str, Path]],
|
||||
extrahash: str = '',
|
||||
int_only: bool = False,
|
||||
|
||||
@ -11,8 +11,8 @@ Cached files are gathered and uploaded as part of the pubsync process.
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
from multiprocessing import cpu_count
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
@ -62,7 +62,6 @@ def _project_centric_path(path: str) -> str:
|
||||
def get_target(path: str) -> None:
|
||||
"""Fetch a target path from the cache, downloading if need be."""
|
||||
from efro.error import CleanError
|
||||
from efrotools import run
|
||||
|
||||
path = _project_centric_path(path)
|
||||
|
||||
@ -111,7 +110,9 @@ def get_target(path: str) -> None:
|
||||
if result.returncode != 0:
|
||||
raise CleanError('Download failed; is your internet working?')
|
||||
|
||||
run(f'mv {local_cache_path_dl} {local_cache_path}')
|
||||
subprocess.run(f'mv {local_cache_path_dl} {local_cache_path}',
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
# Ok we should have a valid .tar.gz file in our cache dir at this point.
|
||||
# Just expand it and it get placed wherever it belongs.
|
||||
@ -138,7 +139,7 @@ def get_target(path: str) -> None:
|
||||
# The file will wind up with the timestamp it was compressed with,
|
||||
# so let's update its timestamp or else it will still be considered
|
||||
# dirty.
|
||||
run(f'touch {path}')
|
||||
subprocess.run(f'touch {path}', shell=True, check=True)
|
||||
if not os.path.exists(path):
|
||||
raise RuntimeError(f'File {path} did not wind up as expected.')
|
||||
|
||||
@ -240,7 +241,6 @@ def update_cache(makefile_dirs: list[str]) -> None:
|
||||
|
||||
def _upload_cache(fnames1: list[str], fnames2: list[str], hashes_str: str,
|
||||
hashes_existing_str: str) -> None:
|
||||
from efrotools import run
|
||||
|
||||
# First, if we've run before, print the files causing us to re-run:
|
||||
if hashes_existing_str != '':
|
||||
@ -263,8 +263,8 @@ def _upload_cache(fnames1: list[str], fnames2: list[str], hashes_str: str,
|
||||
# Now do the thing.
|
||||
staging_dir = 'build/efrocache'
|
||||
mapping_file = 'build/efrocachemap'
|
||||
run(f'rm -rf {staging_dir}')
|
||||
run(f'mkdir -p {staging_dir}')
|
||||
subprocess.run(f'rm -rf {staging_dir}', shell=True, check=True)
|
||||
subprocess.run(f'mkdir -p {staging_dir}', shell=True, check=True)
|
||||
|
||||
_write_cache_files(fnames1, fnames2, staging_dir, mapping_file)
|
||||
|
||||
@ -273,12 +273,18 @@ def _upload_cache(fnames1: list[str], fnames2: list[str], hashes_str: str,
|
||||
|
||||
# Sync all individual cache files to the staging server.
|
||||
print(f'{Clr.SBLU}Pushing cache to staging...{Clr.RST}', flush=True)
|
||||
run('rsync --progress --recursive --human-readable build/efrocache/'
|
||||
' ubuntu@ballistica.net:files.ballistica.net/cache/ba1/')
|
||||
subprocess.run(
|
||||
'rsync --progress --recursive --human-readable build/efrocache/'
|
||||
' ubuntu@ballistica.net:files.ballistica.net/cache/ba1/',
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
# Now generate the starter cache on the server..
|
||||
run('ssh -oBatchMode=yes -oStrictHostKeyChecking=yes ubuntu@ballistica.net'
|
||||
' "cd files.ballistica.net/cache/ba1 && python3 genstartercache.py"')
|
||||
subprocess.run(
|
||||
'ssh -oBatchMode=yes -oStrictHostKeyChecking=yes ubuntu@ballistica.net'
|
||||
' "cd files.ballistica.net/cache/ba1 && python3 genstartercache.py"',
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
|
||||
def _gen_hashes(fnames: list[str]) -> str:
|
||||
@ -362,7 +368,6 @@ def _write_cache_files(fnames1: list[str], fnames2: list[str],
|
||||
|
||||
def _write_cache_file(staging_dir: str, fname: str) -> tuple[str, str]:
|
||||
import hashlib
|
||||
from efrotools import run
|
||||
print(f'Caching {fname}')
|
||||
if ' ' in fname:
|
||||
raise RuntimeError('Spaces in paths not supported.')
|
||||
@ -382,7 +387,9 @@ def _write_cache_file(staging_dir: str, fname: str) -> tuple[str, str]:
|
||||
# with no embedded timestamps.
|
||||
# Note: The 'COPYFILE_DISABLE' prevents mac tar from adding
|
||||
# file attributes/resource-forks to the archive as as ._filename.
|
||||
run(f'COPYFILE_DISABLE=1 tar cf - {fname} | gzip -n > {path}')
|
||||
subprocess.run(f'COPYFILE_DISABLE=1 tar cf - {fname} | gzip -n > {path}',
|
||||
shell=True,
|
||||
check=True)
|
||||
return fname, hashpath
|
||||
|
||||
|
||||
@ -409,7 +416,6 @@ def _check_warm_start_entries(entries: list[tuple[str, str]]) -> None:
|
||||
|
||||
def warm_start_cache() -> None:
|
||||
"""Run a pre-pass on the efrocache to improve efficiency."""
|
||||
from efrotools import run
|
||||
|
||||
# We maintain a starter-cache on the staging server, which
|
||||
# is simply the latest set of cache entries compressed into a single
|
||||
@ -420,12 +426,17 @@ def warm_start_cache() -> None:
|
||||
# downloading thousands)
|
||||
if not os.path.exists(CACHE_DIR_NAME):
|
||||
print('Downloading asset starter-cache...', flush=True)
|
||||
run(f'curl --fail {BASE_URL}startercache.tar.xz'
|
||||
f' --output startercache.tar.xz')
|
||||
subprocess.run(
|
||||
f'curl --fail {BASE_URL}startercache.tar.xz'
|
||||
f' --output startercache.tar.xz',
|
||||
shell=True,
|
||||
check=True)
|
||||
print('Decompressing starter-cache...', flush=True)
|
||||
run('tar -xf startercache.tar.xz')
|
||||
run(f'mv efrocache {CACHE_DIR_NAME}')
|
||||
run('rm startercache.tar.xz')
|
||||
subprocess.run('tar -xf startercache.tar.xz', shell=True, check=True)
|
||||
subprocess.run(f'mv efrocache {CACHE_DIR_NAME}',
|
||||
shell=True,
|
||||
check=True)
|
||||
subprocess.run('rm startercache.tar.xz', shell=True, check=True)
|
||||
print('Starter-cache fetched successfully!'
|
||||
' (should speed up asset builds)')
|
||||
|
||||
|
||||
@ -11,18 +11,22 @@ from typing import TYPE_CHECKING
|
||||
from efrotools.code import format_yapf_str
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
from typing import Optional
|
||||
|
||||
from efro.message import MessageProtocol
|
||||
|
||||
|
||||
def standard_message_sender_gen_pcommand(
|
||||
projroot: Path,
|
||||
basename: str,
|
||||
source_module: str,
|
||||
enable_sync_sends: bool,
|
||||
enable_async_sends: bool,
|
||||
get_protocol_call: str = 'get_protocol') -> None:
|
||||
projroot: Path,
|
||||
basename: str,
|
||||
source_module: str,
|
||||
enable_sync_sends: bool,
|
||||
enable_async_sends: bool,
|
||||
get_protocol_call: str = 'get_protocol',
|
||||
embedded: bool = False,
|
||||
) -> None:
|
||||
"""Used by pcommands taking a single filename argument."""
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
import efro.message
|
||||
from efro.terminal import Clr
|
||||
from efro.error import CleanError
|
||||
@ -35,13 +39,32 @@ def standard_message_sender_gen_pcommand(
|
||||
get_protocol_import = (f'({get_protocol_call})' if
|
||||
len(get_protocol_call) >= 14 else get_protocol_call)
|
||||
|
||||
protocol_create_code = (
|
||||
f'from {source_module} import {get_protocol_import}\n'
|
||||
f'protocol = {get_protocol_call}()')
|
||||
# In embedded situations we have to pass different code to import
|
||||
# the protocol at build time than we do in our runtime code (where
|
||||
# there is only a dummy import for type-checking purposes)
|
||||
protocol_module_level_import_code: Optional[str]
|
||||
build_time_protocol_create_code: Optional[str]
|
||||
if embedded:
|
||||
protocol_module_level_import_code = (
|
||||
f'\n# Dummy import for type-checking purposes.\n'
|
||||
f'if bool(False):\n'
|
||||
f' from {source_module} import {get_protocol_import}')
|
||||
protocol_create_code = f'protocol = {get_protocol_call}()'
|
||||
build_time_protocol_create_code = (
|
||||
f'from {source_module} import {get_protocol_import}\n'
|
||||
f'protocol = {get_protocol_call}()')
|
||||
else:
|
||||
protocol_module_level_import_code = None
|
||||
protocol_create_code = (
|
||||
f'from {source_module} import {get_protocol_import}\n'
|
||||
f'protocol = {get_protocol_call}()')
|
||||
build_time_protocol_create_code = None
|
||||
|
||||
module_code = efro.message.create_sender_module(
|
||||
basename,
|
||||
protocol_create_code=protocol_create_code,
|
||||
protocol_module_level_import_code=protocol_module_level_import_code,
|
||||
build_time_protocol_create_code=build_time_protocol_create_code,
|
||||
enable_sync_sends=enable_sync_sends,
|
||||
enable_async_sends=enable_async_sends,
|
||||
)
|
||||
@ -54,12 +77,15 @@ def standard_message_sender_gen_pcommand(
|
||||
|
||||
|
||||
def standard_message_receiver_gen_pcommand(
|
||||
projroot: Path,
|
||||
basename: str,
|
||||
source_module: str,
|
||||
is_async: bool,
|
||||
get_protocol_call: str = 'get_protocol') -> None:
|
||||
projroot: Path,
|
||||
basename: str,
|
||||
source_module: str,
|
||||
is_async: bool,
|
||||
get_protocol_call: str = 'get_protocol',
|
||||
embedded: bool = False,
|
||||
) -> None:
|
||||
"""Used by pcommands generating efro.message receiver modules."""
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
import efro.message
|
||||
from efro.terminal import Clr
|
||||
@ -74,13 +100,32 @@ def standard_message_receiver_gen_pcommand(
|
||||
get_protocol_import = (f'({get_protocol_call})' if
|
||||
len(get_protocol_call) >= 14 else get_protocol_call)
|
||||
|
||||
protocol_create_code = (
|
||||
f'from {source_module} import {get_protocol_import}\n'
|
||||
f'protocol = {get_protocol_call}()')
|
||||
# In embedded situations we have to pass different code to import
|
||||
# the protocol at build time than we do in our runtime code (where
|
||||
# there is only a dummy import for type-checking purposes)
|
||||
protocol_module_level_import_code: Optional[str]
|
||||
build_time_protocol_create_code: Optional[str]
|
||||
if embedded:
|
||||
protocol_module_level_import_code = (
|
||||
f'\n# Dummy import for type-checking purposes.\n'
|
||||
f'if bool(False):\n'
|
||||
f' from {source_module} import {get_protocol_import}')
|
||||
protocol_create_code = f'protocol = {get_protocol_call}()'
|
||||
build_time_protocol_create_code = (
|
||||
f'from {source_module} import {get_protocol_import}\n'
|
||||
f'protocol = {get_protocol_call}()')
|
||||
else:
|
||||
protocol_module_level_import_code = None
|
||||
protocol_create_code = (
|
||||
f'from {source_module} import {get_protocol_import}\n'
|
||||
f'protocol = {get_protocol_call}()')
|
||||
build_time_protocol_create_code = None
|
||||
|
||||
module_code = efro.message.create_receiver_module(
|
||||
basename,
|
||||
protocol_create_code=protocol_create_code,
|
||||
protocol_module_level_import_code=protocol_module_level_import_code,
|
||||
build_time_protocol_create_code=build_time_protocol_create_code,
|
||||
is_async=is_async,
|
||||
)
|
||||
|
||||
|
||||
@ -573,6 +573,10 @@ def pytest() -> None:
|
||||
# which can screw up our builds.
|
||||
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
|
||||
|
||||
# Let's flip on dev mode to hopefully be informed on more bad stuff
|
||||
# happening. https://docs.python.org/3/library/devmode.html
|
||||
os.environ['PYTHONDEVMODE'] = '1'
|
||||
|
||||
# Do the thing.
|
||||
results = subprocess.run([PYTHON_BIN, '-m', 'pytest'] + sys.argv[2:],
|
||||
check=False)
|
||||
|
||||
@ -5,9 +5,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from efrotools import PYVER, run, readfile, writefile, replace_one
|
||||
from efrotools import PYVER, readfile, writefile, replace_one
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
@ -33,13 +34,14 @@ PRUNE_DLL_NAMES = ['*.ico']
|
||||
def build_apple(arch: str, debug: bool = False) -> None:
|
||||
"""Run a build for the provided apple arch (mac, ios, or tvos)."""
|
||||
import platform
|
||||
import subprocess
|
||||
from efro.error import CleanError
|
||||
|
||||
# IMPORTANT; seems we currently wind up building against /usr/local gettext
|
||||
# stuff. Hopefully the maintainer fixes this, but for now I need to
|
||||
# remind myself to blow it away while building.
|
||||
# (via brew remove gettext --ignore-dependencies)
|
||||
# NOTE: Should check to see if this is still necessary on Apple silicon
|
||||
# since homebrew stuff is no longer in /usr/local there.
|
||||
if ('MacBook-Fro' in platform.node()
|
||||
and os.environ.get('SKIP_GETTEXT_WARNING') != '1'):
|
||||
if (subprocess.run('which gettext', shell=True,
|
||||
@ -48,11 +50,15 @@ def build_apple(arch: str, debug: bool = False) -> None:
|
||||
'NEED TO TEMP-KILL GETTEXT (or set SKIP_GETTEXT_WARNING=1)')
|
||||
|
||||
builddir = 'build/python_apple_' + arch + ('_debug' if debug else '')
|
||||
run('rm -rf "' + builddir + '"')
|
||||
run('mkdir -p build')
|
||||
run('git clone '
|
||||
'https://github.com/beeware/Python-Apple-support.git "' + builddir +
|
||||
'"')
|
||||
subprocess.run(['rm', '-rf', builddir], check=True)
|
||||
subprocess.run(['mkdir', '-p', 'build'], check=True)
|
||||
subprocess.run(
|
||||
[
|
||||
'git', 'clone',
|
||||
'https://github.com/beeware/Python-Apple-support.git', builddir
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
os.chdir(builddir)
|
||||
|
||||
# TEMP: Check out a particular commit while the branch head is broken.
|
||||
@ -60,7 +66,7 @@ def build_apple(arch: str, debug: bool = False) -> None:
|
||||
# broke in the underlying build even on old commits so keeping it
|
||||
# locked for now...
|
||||
# run('git checkout bf1ed73d0d5ff46862ba69dd5eb2ffaeff6f19b6')
|
||||
run(f'git checkout {PYVER}')
|
||||
subprocess.run(['git', 'checkout', PYVER], check=True)
|
||||
|
||||
txt = readfile('Makefile')
|
||||
|
||||
@ -111,19 +117,32 @@ def build_apple(arch: str, debug: bool = False) -> None:
|
||||
f'../../../../../tools/pcommand python_apple_patch {arch}\n'
|
||||
' # Configure target Python\n',
|
||||
)
|
||||
|
||||
# Use python3 instead of python for libffi setup script
|
||||
txt = replace_one(
|
||||
txt,
|
||||
'cd $$(LIBFFI_DIR-$1) && python generate-darwin-source-and-headers.py'
|
||||
" --only-$(shell echo $1 | tr '[:upper:]' '[:lower:]')",
|
||||
'cd $$(LIBFFI_DIR-$1) && python3 generate-darwin-source-and-headers.py'
|
||||
" --only-$(shell echo $1 | tr '[:upper:]' '[:lower:]')",
|
||||
)
|
||||
|
||||
writefile('Makefile', txt)
|
||||
|
||||
# Ok; let 'er rip.
|
||||
# (we run these in parallel so limit to 1 job a piece;
|
||||
# otherwise they inherit the -j12 or whatever from the top level)
|
||||
# (also this build seems to fail with multiple threads)
|
||||
run(
|
||||
'make -j1 ' + {
|
||||
'mac': 'Python-macOS',
|
||||
# 'mac': 'build/macOS/Python-3.9.6-macOS/Makefile',
|
||||
'ios': 'Python-iOS',
|
||||
'tvos': 'Python-tvOS'
|
||||
}[arch])
|
||||
subprocess.run(
|
||||
[
|
||||
'make', '-j1', {
|
||||
'mac': 'Python-macOS',
|
||||
'ios': 'Python-iOS',
|
||||
'tvos': 'Python-tvOS'
|
||||
}[arch]
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
print('python build complete! (apple/' + arch + ')')
|
||||
|
||||
|
||||
@ -146,13 +165,17 @@ def build_android(rootdir: str, arch: str, debug: bool = False) -> None:
|
||||
|
||||
(can be arm, arm64, x86, or x86_64)
|
||||
"""
|
||||
import subprocess
|
||||
|
||||
builddir = 'build/python_android_' + arch + ('_debug' if debug else '')
|
||||
run('rm -rf "' + builddir + '"')
|
||||
run('mkdir -p build')
|
||||
run('git clone '
|
||||
'https://github.com/yan12125/python3-android.git "' + builddir + '"')
|
||||
subprocess.run(['rm', '-rf', builddir], check=True)
|
||||
subprocess.run(['mkdir', '-p', 'build'], check=True)
|
||||
subprocess.run(
|
||||
[
|
||||
'git', 'clone', 'https://github.com/yan12125/python3-android.git',
|
||||
builddir
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
os.chdir(builddir)
|
||||
|
||||
# These builds require ANDROID_NDK to be set; make sure that's the case.
|
||||
@ -202,7 +225,9 @@ def build_android(rootdir: str, arch: str, debug: bool = False) -> None:
|
||||
|
||||
# Ok, let 'er rip
|
||||
exargs = ' --with-pydebug' if debug else ''
|
||||
run(f'ARCH={arch} ANDROID_API=21 ./build.sh{exargs}')
|
||||
subprocess.run(f'ARCH={arch} ANDROID_API=21 ./build.sh{exargs}',
|
||||
shell=True,
|
||||
check=True)
|
||||
print('python build complete! (android/' + arch + ')')
|
||||
|
||||
|
||||
@ -386,11 +411,17 @@ def winprune() -> None:
|
||||
for libdir in ('assets/src/windows/Win32/Lib',
|
||||
'assets/src/windows/x64/Lib'):
|
||||
assert os.path.isdir(libdir)
|
||||
run('cd "' + libdir + '" && rm -rf ' + ' '.join(PRUNE_LIB_NAMES))
|
||||
subprocess.run('cd "' + libdir + '" && rm -rf ' +
|
||||
' '.join(PRUNE_LIB_NAMES),
|
||||
shell=True,
|
||||
check=True)
|
||||
for dlldir in ('assets/src/windows/Win32/DLLs',
|
||||
'assets/src/windows/x64/DLLs'):
|
||||
assert os.path.isdir(dlldir)
|
||||
run('cd "' + dlldir + '" && rm -rf ' + ' '.join(PRUNE_DLL_NAMES))
|
||||
subprocess.run('cd "' + dlldir + '" && rm -rf ' +
|
||||
' '.join(PRUNE_DLL_NAMES),
|
||||
shell=True,
|
||||
check=True)
|
||||
print('Win-prune successful.')
|
||||
|
||||
|
||||
@ -417,7 +448,7 @@ def gather() -> None:
|
||||
existing_dirs = [d for d in existing_dirs if 'android' not in d]
|
||||
|
||||
for existing_dir in existing_dirs:
|
||||
run('rm -rf "' + existing_dir + '"')
|
||||
subprocess.run(['rm', '-rf', existing_dir], check=True)
|
||||
|
||||
apost2 = f'src/Python-{PY_VER_EXACT_ANDROID}/Android/sysroot'
|
||||
for buildtype in ['debug', 'release']:
|
||||
@ -554,22 +585,30 @@ def gather() -> None:
|
||||
|
||||
# Do some setup only once per group.
|
||||
if not os.path.exists(builddir):
|
||||
run('mkdir -p "' + builddir + '"')
|
||||
run('mkdir -p "' + lib_dst + '"')
|
||||
subprocess.run(['mkdir', '-p', builddir], check=True)
|
||||
subprocess.run(['mkdir', '-p', lib_dst], check=True)
|
||||
|
||||
# Only pull modules into game assets on release pass.
|
||||
if not debug:
|
||||
# Copy system modules into the src assets
|
||||
# dir for this group.
|
||||
run('mkdir -p "' + assets_src_dst + '"')
|
||||
run('rsync --recursive --include "*.py"'
|
||||
' --exclude __pycache__ --include "*/" --exclude "*" "'
|
||||
+ build['pylib'] + '/" "' + assets_src_dst + '"')
|
||||
subprocess.run(['mkdir', '-p', assets_src_dst], check=True)
|
||||
subprocess.run(
|
||||
[
|
||||
'rsync', '--recursive', '--include', '*.py',
|
||||
'--exclude', '__pycache__', '--include', '*/',
|
||||
'--exclude', '*', build['pylib'] + '/',
|
||||
assets_src_dst
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Prune a bunch of modules we don't need to cut
|
||||
# down on size.
|
||||
run('cd "' + assets_src_dst + '" && rm -rf ' +
|
||||
' '.join(PRUNE_LIB_NAMES))
|
||||
subprocess.run('cd "' + assets_src_dst + '" && rm -rf ' +
|
||||
' '.join(PRUNE_LIB_NAMES),
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
# Some minor filtering to system scripts:
|
||||
# on iOS/tvOS, addusersitepackages() leads to a crash
|
||||
@ -587,11 +626,15 @@ def gather() -> None:
|
||||
|
||||
# Copy in a base set of headers (everything in a group should
|
||||
# be using the same headers)
|
||||
run(f'cp -r "{build["headers"]}" "{header_dst}"')
|
||||
subprocess.run(f'cp -r "{build["headers"]}" "{header_dst}"',
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
# Clear whatever pyconfigs came across; we'll build our own
|
||||
# universal one below.
|
||||
run('rm ' + header_dst + '/pyconfig*')
|
||||
subprocess.run('rm ' + header_dst + '/pyconfig*',
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
# Write a master pyconfig header that reroutes to each
|
||||
# platform's actual header.
|
||||
@ -637,15 +680,17 @@ def gather() -> None:
|
||||
writefile(header_dst + '/' + out, contents)
|
||||
else:
|
||||
# other configs we just rename
|
||||
run('cp "' + build['headers'] + '/' + cfg + '" "' +
|
||||
header_dst + '/' + out + '"')
|
||||
subprocess.run('cp "' + build['headers'] + '/' + cfg +
|
||||
'" "' + header_dst + '/' + out + '"',
|
||||
shell=True,
|
||||
check=True)
|
||||
|
||||
# Copy in libs. If the lib gave a specific install name,
|
||||
# use that; otherwise use name.
|
||||
targetdir = lib_dst + '/' + build.get('libinst', build['name'])
|
||||
run('rm -rf "' + targetdir + '"')
|
||||
run('mkdir -p "' + targetdir + '"')
|
||||
subprocess.run(['rm', '-rf', targetdir], check=True)
|
||||
subprocess.run(['mkdir', '-p', targetdir], check=True)
|
||||
for lib in build['libs']:
|
||||
run('cp "' + lib + '" "' + targetdir + '"')
|
||||
subprocess.run(['cp', lib, targetdir], check=True)
|
||||
|
||||
print('Great success!')
|
||||
|
||||
@ -19,7 +19,7 @@ if TYPE_CHECKING:
|
||||
# in a file we haven't seen yet, we copy it into the temp dir,
|
||||
# filter it a bit to add reveal_type() statements, and run mypy on it.
|
||||
# The temp dir should tear itself down when Python exits.
|
||||
_tempdir: Optional[tempfile.TemporaryDirectory] = None
|
||||
_tempdir: Optional[str] = None
|
||||
_statictestfiles: dict[str, StaticTestFile] = {}
|
||||
_nextfilenum: int = 1
|
||||
|
||||
@ -29,7 +29,6 @@ class StaticTestFile:
|
||||
|
||||
def __init__(self, filename: str):
|
||||
# pylint: disable=global-statement, invalid-name
|
||||
# pylint: disable=consider-using-with
|
||||
global _tempdir, _nextfilenum
|
||||
# pylint: enable=global-statement, invalid-name
|
||||
|
||||
@ -56,20 +55,23 @@ class StaticTestFile:
|
||||
# Create a single shared temp dir
|
||||
# (so that we can recycle our mypy cache).
|
||||
if _tempdir is None:
|
||||
_tempdir = tempfile.TemporaryDirectory()
|
||||
# Eww; not cleaning up this temp dir (though the
|
||||
# OS should eventually). Using TemporaryDirectory() gives us
|
||||
# a warning though because we don't explicitly clean it up.
|
||||
_tempdir = tempfile.mkdtemp()
|
||||
# print(f"Created temp dir at {_tempdir.name}")
|
||||
|
||||
# Copy our file into the temp dir with a unique name, find all
|
||||
# instances of static_type_equals(), and run mypy type checks
|
||||
# in those places to get static types.
|
||||
tempfilepath = os.path.join(_tempdir.name, self.modulename + '.py')
|
||||
tempfilepath = os.path.join(_tempdir, self.modulename + '.py')
|
||||
with open(tempfilepath, 'w', encoding='utf-8') as outfile:
|
||||
outfile.write(self.filter_file_contents(fdata))
|
||||
results = subprocess.run(
|
||||
[
|
||||
PYTHON_BIN, '-m', 'mypy', '--no-error-summary',
|
||||
'--config-file', '.mypy.ini', '--cache-dir',
|
||||
os.path.join(_tempdir.name, '.mypy_cache'), tempfilepath
|
||||
os.path.join(_tempdir, '.mypy_cache'), tempfilepath
|
||||
],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user