work on pcommandbatch

This commit is contained in:
Eric 2023-08-16 15:39:18 -07:00
parent 131d213579
commit 29fd1c2f2f
No known key found for this signature in database
GPG Key ID: 89C93F0F8D6D5A98
27 changed files with 6108 additions and 1144 deletions

88
.efrocachemap generated
View File

@ -4068,50 +4068,50 @@
"build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1",
"build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae",
"build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599",
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "1f6d9028dd5007bdb1ea940f3b1f9ffb",
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "64da0286b58456e4e1dbac45e7ba77e7",
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "0e2e107d97f6256e971ef5f5391c7641",
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "06c59df009b3f8ef7a6bc7f3dd02fa27",
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "12612fae133533d2155f5094b44b8876",
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "1ff6dee3bcdeccea98c43cefe38dbd59",
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "c761e7132d753091db6f7621a4d82507",
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "fa79d2d5fe20dde12d8631b681b8bc02",
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "28981340fb5acd51c068ce649c22019f",
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "0c0b3c9951c97b85aa6ee4e0874b5131",
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "25e4319520528b66b8ce9b75cde4667b",
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "522750cf436f7f7dd004af239d3e5c9c",
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "13b5e8621b3f681af55c8f6d809e8cdf",
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "bd8f13259ea0de5831f94bfabd6662fc",
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "39f7be43e2d4a9d992a69c09d070f7c0",
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "23e005d825c4899b04e1df8275e63366",
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "48f6a92f679aaf6a96c3dd86a930fec1",
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "9aa5991d4222207b7d2ec057af6ac7d2",
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "27f080c17d30ad819005b36a3f529d9b",
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "bec0ebf67c7eac9cf93a8ca50fc894e8",
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "85ba4e81a1f7ae2cff4b1355eb49904f",
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "498921f7eb2afd327d4b900cb70e31f9",
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "85ba4e81a1f7ae2cff4b1355eb49904f",
"build/prefab/lib/linux_arm64_server/release/libballisticaplus.a": "498921f7eb2afd327d4b900cb70e31f9",
"build/prefab/lib/linux_x86_64_gui/debug/libballisticaplus.a": "ded5f785236bf64e644ee20041ac8342",
"build/prefab/lib/linux_x86_64_gui/release/libballisticaplus.a": "c436a058b7204fa39f22eafc7ca7855f",
"build/prefab/lib/linux_x86_64_server/debug/libballisticaplus.a": "ded5f785236bf64e644ee20041ac8342",
"build/prefab/lib/linux_x86_64_server/release/libballisticaplus.a": "c436a058b7204fa39f22eafc7ca7855f",
"build/prefab/lib/mac_arm64_gui/debug/libballisticaplus.a": "fe0ba4b21528a557c5a434b8f2eeda41",
"build/prefab/lib/mac_arm64_gui/release/libballisticaplus.a": "7950a02c3d9a1088e9acd4c29bd3cb72",
"build/prefab/lib/mac_arm64_server/debug/libballisticaplus.a": "fe0ba4b21528a557c5a434b8f2eeda41",
"build/prefab/lib/mac_arm64_server/release/libballisticaplus.a": "7950a02c3d9a1088e9acd4c29bd3cb72",
"build/prefab/lib/mac_x86_64_gui/debug/libballisticaplus.a": "870d11d339fd1b3acf66cc601ff29c83",
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "0ab638b6602610bdaf432e3cc2464080",
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "92394eb19387c363471ce134ac9e6a1b",
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "0ab638b6602610bdaf432e3cc2464080",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "4c932459a387f75168a8a1eb32523300",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "41a00a3d9ea038fcde6bf43f7f88e6a2",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "3fecaabf37fdbaef3d8e7a4de4582d9e",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "95d54d70c4b9ff1ab788fd46eb0b73c4",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "abbd93b2c28fa0bdffa2f72d3bf516f5",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "bd9fe5e01ca4ee7c48d0f56158f2252d",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "e0150f022655778773c6f954e257b113",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "3c58883c79cbf4d8c66ddbeb1de935a5",
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "c26b65a24311880c74d69c4983b22ece",
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "b5f0703ed12ca1a25200b5d4114909df",
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "02f6cf0e2fe78cc1ac9c8e3094f60079",
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "1468af4af839e714c2d622caee6b9181",
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "1464adfba201f6fcf79d54068a915409",
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "60fdad12226023caa043da4685831c8a",
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "ac01bff1450ed6b66bb77d8b0af6e84f",
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "58cf449ec84b211b0bb38d1d1358c974",
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "6da4ad354507711c5857c81e3bed4e33",
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "d642aeeaeffdd5ebe07e968be2311da5",
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "0800f2ca27c13408afbb75b5bdf76bae",
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "e008727c4b62b7ef09c775b505cee886",
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "ef184ee79f268744612130743cf8369d",
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "c1ea1a2c7362b2a47b5f55f8ff112c61",
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "7997bb41bb8db4a2aa1105c498787c41",
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "939d32d2010fbcd76398fb9a08ac9152",
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "b35c2813cfa23a4d4c58f50b71617f69",
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "48eeea81dc9bba2fe9d8afae1c163b69",
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "4ae9e07d5d7b61bb5c019badfbef37a5",
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "95bbece528dfa908838caf48a496dca6",
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "2c39f4296ba083f11168beaa56256909",
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "02b17ff1ab03fb4a526ef85186baf9b3",
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "2c39f4296ba083f11168beaa56256909",
"build/prefab/lib/linux_arm64_server/release/libballisticaplus.a": "02b17ff1ab03fb4a526ef85186baf9b3",
"build/prefab/lib/linux_x86_64_gui/debug/libballisticaplus.a": "9a78f6330fea20ba8343b09a339595f1",
"build/prefab/lib/linux_x86_64_gui/release/libballisticaplus.a": "5a3358818ebea17293a1090d295e1047",
"build/prefab/lib/linux_x86_64_server/debug/libballisticaplus.a": "9a78f6330fea20ba8343b09a339595f1",
"build/prefab/lib/linux_x86_64_server/release/libballisticaplus.a": "5a3358818ebea17293a1090d295e1047",
"build/prefab/lib/mac_arm64_gui/debug/libballisticaplus.a": "6cc12ac10a557a546b6a9c3fd0792af0",
"build/prefab/lib/mac_arm64_gui/release/libballisticaplus.a": "e1fbd7e130511cd8690e0da886910d1a",
"build/prefab/lib/mac_arm64_server/debug/libballisticaplus.a": "6cc12ac10a557a546b6a9c3fd0792af0",
"build/prefab/lib/mac_arm64_server/release/libballisticaplus.a": "e1fbd7e130511cd8690e0da886910d1a",
"build/prefab/lib/mac_x86_64_gui/debug/libballisticaplus.a": "758dea018f7a06c611b9cff20e7d064f",
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "9355211cad3fae2a29eb8016f7cc062c",
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "6d309fba1c355902662343b627b6aa8c",
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "9355211cad3fae2a29eb8016f7cc062c",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "e3085c83263ccc1c13e1bb344f0a7c8e",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "38a1826608e0829e25ceded2e5a8e50d",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "9b3612f4c807362baf25daed9bd8ab01",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "671c648cb9d8f257033b6c203e33aab8",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "6b926b48877a0ecef54107be894f5dc2",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "15d1aec51cf77095399b46b7a5da5880",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "b5b4cf9234f0f4f8d657f2a98364aba9",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "8418ee35e7ae3d6564df2c011b8e5838",
"src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c",
"src/assets/ba_data/python/babase/_mgen/enums.py": "f8cd3af311ac63147882590123b78318",
"src/ballistica/base/mgen/pyembed/binding_base.inc": "eeddad968b176000e31c65be6206a2bc",

View File

@ -202,6 +202,7 @@
<w>autodetected</w>
<w>autogenerate</w>
<w>autonoassets</w>
<w>autopep</w>
<w>autopoint</w>
<w>autoremove</w>
<w>autoretain</w>
@ -229,6 +230,7 @@
<w>bacoremeta</w>
<w>badguy</w>
<w>baenv</w>
<w>baenv's</w>
<w>bafoobar</w>
<w>bafoobarmeta</w>
<w>bafoundation</w>
@ -420,6 +422,7 @@
<w>cancelbtn</w>
<w>capb</w>
<w>caplog</w>
<w>capturable</w>
<w>capturetheflag</w>
<w>carentity</w>
<w>casefix</w>
@ -471,6 +474,7 @@
<w>charstr</w>
<w>chatmessage</w>
<w>chdir</w>
<w>chdir'ing</w>
<w>cheadersline</w>
<w>checkarg</w>
<w>checkarglist</w>
@ -875,6 +879,7 @@
<w>efrotoolsinternal</w>
<w>eftools</w>
<w>efxjtp</w>
<w>eglot</w>
<w>eids</w>
<w>elapsedf</w>
<w>elementtree</w>
@ -1035,6 +1040,7 @@
<w>filelist</w>
<w>filelock</w>
<w>filenames</w>
<w>fileno</w>
<w>filepath</w>
<w>fileselector</w>
<w>filesize</w>
@ -2121,7 +2127,10 @@
<w>pcall</w>
<w>pchild</w>
<w>pcommand</w>
<w>pcommandbatch</w>
<w>pcommandbatchbin</w>
<w>pcommands</w>
<w>pcommandserver</w>
<w>pcstr</w>
<w>pdataclass</w>
<w>pdoc</w>
@ -2203,6 +2212,7 @@
<w>popupscale</w>
<w>popupstr</w>
<w>popuptext</w>
<w>portfile</w>
<w>positionadjusted</w>
<w>posixpath</w>
<w>posixshmem</w>
@ -2381,6 +2391,7 @@
<w>pylintscripts</w>
<w>pylintscriptsfast</w>
<w>pylintscriptsfull</w>
<w>pylsp</w>
<w>pymodulenames</w>
<w>pyobjc</w>
<w>pyoffs</w>
@ -2474,6 +2485,7 @@
<w>responsetype</w>
<w>responsetypes</w>
<w>responsetypevar</w>
<w>resultcode</w>
<w>resultstr</w>
<w>retcode</w>
<w>retrysecs</w>
@ -2725,6 +2737,7 @@
<w>spinoffs</w>
<w>spinofftest</w>
<w>spinup</w>
<w>spinups</w>
<w>splayer</w>
<w>splitlen</w>
<w>splitnumstr</w>
@ -2868,6 +2881,7 @@
<w>syncitem</w>
<w>syncitems</w>
<w>synclist</w>
<w>sysargv</w>
<w>syscall</w>
<w>sysconfigdata</w>
<w>sysctl</w>
@ -3138,6 +3152,7 @@
<w>unstrl</w>
<w>unsubscriptable</w>
<w>untracked</w>
<w>unwritable</w>
<w>upcase</w>
<w>updatecheck</w>
<w>updatethencheck</w>

6
.idea/misc.xml generated
View File

@ -1,5 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="cmdArguments" value="--line-length 80 --skip-string-normalization" />
<option name="enabledOnReformat" value="true" />
<option name="pathToExecutable" value="/opt/homebrew/bin/black" />
<option name="sdkUUID" value="1b270adb-5261-4492-85e8-d79b3894255d" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.11" project-jdk-type="Python SDK" />
<component name="PythonCompatibilityInspectionAdvertiser">
<option name="version" value="3" />

View File

@ -1,10 +1,15 @@
### 1.7.26 (build 21212, api 8, 2023-08-03)
- Various general improvements to the pcommand (project command) system.
- Modules containing pcommand functions are now named with an 's' - so
`pcommands.py` instead of `pcommand.py`. `pcommand.py` in efrotools is now
solely related to the functioning of the pcommand system.
### 1.7.25 (build 21211, api 8, 2023-08-03)
- Fixed an issue where the main thread was holding the Python GIL by default in
monolithic builds with environment-managed event loops. This theoretically
could have lead to stuttery performanace in the Android or Mac builds.
could have lead to stuttery performance in the Android or Mac builds.
- Did a bit of cleanup on `baenv.py` in preparation for some additional setup it
will soon be doing to give users more control over logging.
- `getconfig` and `setconfig` in `efrotools` are now `getprojectconfig` and

View File

@ -35,10 +35,21 @@ ifeq ($(BA_ENABLE_COMPILE_COMMANDS_DB),1)
PREREQ_COMPILE_COMMANDS_DB = .cache/compile_commands_db/compile_commands.json
endif
# Support for running pcommands in 'batch' mode in which a simple local server
# handles command requests from a lightweight client binary. This largely
# takes Python's startup time out of the equation, which can add up when
# running lots of small pcommands in cases such as asset builds.
PCOMMANDBATCHBIN := .cache/pcommandbatch/pcommandbatch
ifeq ($(BA_PCOMMANDBATCH_DISABLE),1)
PCOMMANDBATCH = tools/pcommand
else
PCOMMANDBATCH = $(PCOMMANDBATCHBIN)
endif
# Prereq targets that should be safe to run anytime; even if project-files
# are out of date.
PREREQS_SAFE = .cache/checkenv .dir-locals.el .mypy.ini .pyrightconfig.json \
.pycheckers .pylintrc .style.yapf .clang-format \
PREREQS_SAFE = .cache/checkenv $(PCOMMANDBATCH) .dir-locals.el .mypy.ini \
.pyrightconfig.json .pycheckers .pylintrc .style.yapf .clang-format \
ballisticakit-cmake/.clang-format .editorconfig
# Prereq targets that may break if the project needs updating should go here.
@ -170,11 +181,15 @@ docs:
docs-pdoc:
@tools/pcommand gen_docs_pdoc
pcommandbatch_speed_test: prereqs
@tools/pcommand pcommandbatch_speed_test $(PCOMMANDBATCH)
# Tell make which of these targets don't represent files.
.PHONY: help prereqs prereqs-pre-update prereqs-clean assets assets-cmake \
assets-cmake-scripts assets-windows assets-windows-Win32 assets-windows-x64 \
assets-mac assets-ios assets-android assets-clean resources resources-clean \
meta meta-clean clean clean-list dummymodules docs
.PHONY: help prereqs prereqs-pre-update prereqs-clean assets assets-cmake \
assets-cmake-scripts assets-windows assets-windows-Win32 assets-windows-x64 \
assets-mac assets-ios assets-android assets-clean resources resources-clean \
meta meta-clean clean clean-list dummymodules docs docs-pdoc \
pcommandbatch_speed_test
################################################################################
@ -1207,6 +1222,14 @@ SKIP_ENV_CHECKS ?= 0
tools/pcommand checkenv && mkdir -p .cache && touch .cache/checkenv; \
fi
foof: CHANGELOG.md CONTRIBUTORS.md Makefile
echo OUT IS $@
echo IN IS $^
$(PCOMMANDBATCHBIN): src/tools/pcommandbatch/pcommandbatch.c \
src/tools/pcommandbatch/cJSON.c
@tools/pcommand build_pcommandbatch $^ $@
# CMake build-type lowercase
CM_BT_LC = $(shell echo $(CMAKE_BUILD_TYPE) | tr A-Z a-z)

View File

@ -116,6 +116,7 @@
<w>audiocache</w>
<w>autodetected</w>
<w>automagically</w>
<w>autopep</w>
<w>autoselect</w>
<w>availmins</w>
<w>avel</w>
@ -135,6 +136,7 @@
<w>baclassicmeta</w>
<w>bacoremeta</w>
<w>baenv</w>
<w>baenv's</w>
<w>bafoobar</w>
<w>bafoobarmeta</w>
<w>bainternal</w>
@ -280,6 +282,7 @@
<w>cancelbtn</w>
<w>capitan</w>
<w>caplog</w>
<w>capturable</w>
<w>cargs</w>
<w>casefix</w>
<w>cbegin</w>
@ -308,6 +311,7 @@
<w>charstr</w>
<w>chatmessage</w>
<w>chdir</w>
<w>chdir'ing</w>
<w>checkarglist</w>
<w>checkboxwidget</w>
<w>checkchisel</w>
@ -534,6 +538,7 @@
<w>efrohack</w>
<w>efrohome</w>
<w>efrotoolsinternal</w>
<w>eglot</w>
<w>elapsedf</w>
<w>elems</w>
<w>elevenbase</w>
@ -634,6 +639,7 @@
<w>fifteenbits</w>
<w>filefilter</w>
<w>filelock</w>
<w>fileno</w>
<w>filt</w>
<w>filterdoc</w>
<w>filterstr</w>
@ -1268,7 +1274,10 @@
<w>pbasename</w>
<w>pbxgrp</w>
<w>pbxgrps</w>
<w>pcommandbatch</w>
<w>pcommandbatchbin</w>
<w>pcommands</w>
<w>pcommandserver</w>
<w>pdataclass</w>
<w>pdoc</w>
<w>pdst</w>
@ -1302,6 +1311,7 @@
<w>podcasts</w>
<w>popd</w>
<w>portaudio</w>
<w>portfile</w>
<w>positivex</w>
<w>positivey</w>
<w>positivez</w>
@ -1392,6 +1402,7 @@
<w>pyhome</w>
<w>pylib</w>
<w>pylibpath</w>
<w>pylsp</w>
<w>pymodulenames</w>
<w>pyobj</w>
<w>pyobjs</w>
@ -1467,6 +1478,7 @@
<w>responsecount</w>
<w>responsetypes</w>
<w>responsetypevar</w>
<w>resultcode</w>
<w>resync</w>
<w>retcode</w>
<w>retrysecs</w>
@ -1595,6 +1607,7 @@
<w>spinoffconfig</w>
<w>spinofftest</w>
<w>spinup</w>
<w>spinups</w>
<w>spivak</w>
<w>spwd</w>
<w>srcabs</w>
@ -1682,6 +1695,7 @@
<w>swiftmergegeneratedheaders</w>
<w>symbolification</w>
<w>symlinking</w>
<w>sysargv</w>
<w>syscall</w>
<w>syscalls</w>
<w>sysresponse</w>
@ -1824,6 +1838,7 @@
<w>unsignaled</w>
<w>unstuff</w>
<w>unsynchronized</w>
<w>unwritable</w>
<w>uppercased</w>
<w>userspace</w>
<w>usid</w>

View File

@ -40,7 +40,8 @@
"psutil",
"pbxproj.XcodeProject",
"pbxproj.pbxextensions",
"openstep_parser"
"openstep_parser",
"daemon"
],
"python_paths": [
"src/assets/ba_data/python",

View File

@ -172,7 +172,7 @@ ctx.filter_file_names = {
'assets_phase_xcode',
'ballistica_maya_tools.mel',
'check_python_syntax',
'compile_python_files',
'compile_python_file',
'pcommand',
'vmshell',
'cloudshell',

View File

@ -36,3 +36,6 @@ ignore_missing_imports = True
[mypy-openstep_parser.*]
ignore_missing_imports = True
[mypy-daemon.*]
ignore_missing_imports = True

View File

@ -21,55 +21,68 @@ PROJ_DIR = ../..
TOOLS_DIR = $(PROJ_DIR)/tools
BUILD_DIR = $(PROJ_DIR)/build/assets
PCOMMAND = $(TOOLS_DIR)/pcommand
# Support for running pcommands in 'batch' mode in which a simple local server
# handles command requests from a lightweight client binary. This largely
# takes Python's startup time out of the equation, which can add up when
# running lots of small pcommands in cases such as asset builds.
PCOMMANDBATCHBIN := $(PROJ_DIR)/.cache/pcommandbatch/pcommandbatch
ifeq ($(BA_PCOMMANDBATCH_DISABLE),1)
PCOMMANDBATCH = $(TOOLS_DIR)/pcommand
else
PCOMMANDBATCH = $(PCOMMANDBATCHBIN)
endif
# High level targets: generally these are what should be used here.
# Build everything needed for all platforms.
all:
@$(TOOLS_DIR)/pcommand warm_start_asset_build
@$(PCOMMAND) warm_start_asset_build
@$(MAKE) assets
@$(TOOLS_DIR)/pcommand clean_orphaned_assets
@$(PCOMMAND) clean_orphaned_assets
# Build everything needed for our cmake builds (linux, mac).
cmake:
@$(TOOLS_DIR)/pcommand warm_start_asset_build
@$(PCOMMAND) warm_start_asset_build
@$(MAKE) assets-cmake
@$(TOOLS_DIR)/pcommand clean_orphaned_assets
@$(PCOMMAND) clean_orphaned_assets
# Build everything needed for our server builds.
server:
@echo Note - skipping warm_start_asset_build for server target.
@$(MAKE) assets-server
@$(TOOLS_DIR)/pcommand clean_orphaned_assets
@$(PCOMMAND) clean_orphaned_assets
# Build everything needed for x86 windows builds.
win-Win32:
@$(TOOLS_DIR)/pcommand warm_start_asset_build
@$(PCOMMAND) warm_start_asset_build
@$(MAKE) assets-win-Win32
@$(TOOLS_DIR)/pcommand clean_orphaned_assets
@$(PCOMMAND) clean_orphaned_assets
# Build everything needed for x86-64 windows builds.
win-x64:
@$(TOOLS_DIR)/pcommand warm_start_asset_build
@$(PCOMMAND) warm_start_asset_build
@$(MAKE) assets-win-x64
@$(TOOLS_DIR)/pcommand clean_orphaned_assets
@$(PCOMMAND) clean_orphaned_assets
# Build everything needed for our mac xcode builds.
mac:
@$(TOOLS_DIR)/pcommand warm_start_asset_build
@$(PCOMMAND) warm_start_asset_build
@$(MAKE) assets-mac
@$(TOOLS_DIR)/pcommand clean_orphaned_assets
@$(PCOMMAND) clean_orphaned_assets
# Build everything needed for our ios/tvos builds.
ios:
@$(TOOLS_DIR)/pcommand warm_start_asset_build
@$(PCOMMAND) warm_start_asset_build
@$(MAKE) assets-ios
@$(TOOLS_DIR)/pcommand clean_orphaned_assets
@$(PCOMMAND) clean_orphaned_assets
# Build everything needed for android.
android:
@$(TOOLS_DIR)/pcommand warm_start_asset_build
@$(PCOMMAND) warm_start_asset_build
@$(MAKE) assets-android
@$(TOOLS_DIR)/pcommand clean_orphaned_assets
@$(PCOMMAND) clean_orphaned_assets
MAKE_AUDIO = 1
MAKE_TEXTURES = 1
@ -137,9 +150,8 @@ ASSET_TARGETS_WIN_X64 += $(EXTRAS_TARGETS_WIN_X64)
# Note: Code below needs updating when Python version changes (currently 3.11)
define make-opt-pyc-target
$1: $$(subst /__pycache__,,$$(subst .cpython-311.opt-1.pyc,.py,$1))
@echo Compiling script: $$(subst $(BUILD_DIR)/,,$$^)
@rm -rf $$@ && PYTHONHASHSEED=1 \
$$(TOOLS_DIR)/pcommand compile_python_files $$^ && chmod 444 $$@
# @echo Compiling script: $$(subst $(BUILD_DIR)/,,$$^)
@$$(PCOMMANDBATCH) compile_python_file $$^
endef
# This section is generated by batools.assetsmakefile; do not edit by hand.
@ -690,11 +702,8 @@ SCRIPT_TARGETS_PYC_PUBLIC = \
# Rule to copy src asset scripts to dst.
# (and make non-writable so I'm less likely to accidentally edit them there)
$(SCRIPT_TARGETS_PY_PUBLIC) : $(BUILD_DIR)/%.py : %.py
@echo Copying script: $(subst $(BUILD_DIR)/,,$@)
@mkdir -p $(dir $@)
@rm -f $@
@cp $^ $@
@chmod 444 $@
# @echo Copying script: $(subst $(BUILD_DIR)/,,$@)
@$(PCOMMANDBATCH) copy_python_file $^ $@
# These are too complex to define in a pattern rule;
# Instead we generate individual targets in a loop.
@ -772,11 +781,8 @@ SCRIPT_TARGETS_PYC_PUBLIC_TOOLS = \
# Rule to copy src asset scripts to dst.
# (and make non-writable so I'm less likely to accidentally edit them there)
$(SCRIPT_TARGETS_PY_PUBLIC_TOOLS) : $(BUILD_DIR)/ba_data/python/%.py : $(TOOLS_DIR)/%.py
@echo Copying script: $(subst $(BUILD_DIR)/,,$@)
@mkdir -p $(dir $@)
@rm -f $@
@cp $^ $@
@chmod 444 $@
# @echo Copying script: $(subst $(BUILD_DIR)/,,$@)
@$(PCOMMANDBATCH) copy_python_file $^ $@
# These are too complex to define in a pattern rule;
# Instead we generate individual targets in a loop.

View File

@ -263,7 +263,6 @@ class PluginWindow(bui.Window):
def _show_plugins(self) -> None:
# pylint: disable=too-many-locals
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
plugspecs = bui.app.plugins.plugin_specs
plugstates: dict[str, dict] = bui.app.config.setdefault('Plugins', {})
assert isinstance(plugstates, dict)
@ -301,7 +300,6 @@ class PluginWindow(bui.Window):
else:
# Make sure we handle all cases.
assert_never(self._category)
sub_height = 0
num_shown = 0
for classpath, plugspec in plugspecs_sorted:
@ -316,7 +314,7 @@ class PluginWindow(bui.Window):
show = not enabled
else:
assert_never(self._category)
show = False
# show = False
if not show:
continue

View File

@ -61,7 +61,7 @@ $(PROJ_SRC_DIR)/ballistica/template_fs/mgen/pyembed/binding_template_fs.inc : ba
$(PROJ_SRC_DIR)/ballistica/ui_v1/mgen/pyembed/binding_ui_v1.inc : bauiv1meta/pyembed/binding_ui_v1.py
@$(PCOMMAND) gen_binding_code $< $@
$(PROJ_SRC_DIR)/assets/ba_data/python/babase/_mgen/__init__.py : $(TOOLS_DIR)/batools/pcommand.py
$(PROJ_SRC_DIR)/assets/ba_data/python/babase/_mgen/__init__.py : $(TOOLS_DIR)/batools/pcommands.py
@$(PCOMMAND) gen_python_init_module $@
$(PROJ_SRC_DIR)/assets/ba_data/python/babase/_mgen/enums.py : $(PROJ_DIR)/src/ballistica/shared/foundation/types.h $(TOOLS_DIR)/batools/pythonenumsmodule.py

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,300 @@
/*
Copyright (c) 2009-2017 Dave Gamble and cJSON contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#ifndef cJSON__h
#define cJSON__h
#ifdef __cplusplus
extern "C"
{
#endif
#if !defined(__WINDOWS__) && (defined(WIN32) || defined(WIN64) || defined(_MSC_VER) || defined(_WIN32))
#define __WINDOWS__
#endif
#ifdef __WINDOWS__
/* When compiling for windows, we specify a specific calling convention to avoid issues where we are being called from a project with a different default calling convention. For windows you have 3 define options:
CJSON_HIDE_SYMBOLS - Define this in the case where you don't want to ever dllexport symbols
CJSON_EXPORT_SYMBOLS - Define this on library build when you want to dllexport symbols (default)
CJSON_IMPORT_SYMBOLS - Define this if you want to dllimport symbol
For *nix builds that support visibility attribute, you can define similar behavior by
setting default visibility to hidden by adding
-fvisibility=hidden (for gcc)
or
-xldscope=hidden (for sun cc)
to CFLAGS
then using the CJSON_API_VISIBILITY flag to "export" the same symbols the way CJSON_EXPORT_SYMBOLS does
*/
#define CJSON_CDECL __cdecl
#define CJSON_STDCALL __stdcall
/* export symbols by default, this is necessary for copy pasting the C and header file */
#if !defined(CJSON_HIDE_SYMBOLS) && !defined(CJSON_IMPORT_SYMBOLS) && !defined(CJSON_EXPORT_SYMBOLS)
#define CJSON_EXPORT_SYMBOLS
#endif
#if defined(CJSON_HIDE_SYMBOLS)
#define CJSON_PUBLIC(type) type CJSON_STDCALL
#elif defined(CJSON_EXPORT_SYMBOLS)
#define CJSON_PUBLIC(type) __declspec(dllexport) type CJSON_STDCALL
#elif defined(CJSON_IMPORT_SYMBOLS)
#define CJSON_PUBLIC(type) __declspec(dllimport) type CJSON_STDCALL
#endif
#else /* !__WINDOWS__ */
#define CJSON_CDECL
#define CJSON_STDCALL
#if (defined(__GNUC__) || defined(__SUNPRO_CC) || defined (__SUNPRO_C)) && defined(CJSON_API_VISIBILITY)
#define CJSON_PUBLIC(type) __attribute__((visibility("default"))) type
#else
#define CJSON_PUBLIC(type) type
#endif
#endif
/* project version */
#define CJSON_VERSION_MAJOR 1
#define CJSON_VERSION_MINOR 7
#define CJSON_VERSION_PATCH 16
#include <stddef.h>
/* cJSON Types: */
#define cJSON_Invalid (0)
#define cJSON_False (1 << 0)
#define cJSON_True (1 << 1)
#define cJSON_NULL (1 << 2)
#define cJSON_Number (1 << 3)
#define cJSON_String (1 << 4)
#define cJSON_Array (1 << 5)
#define cJSON_Object (1 << 6)
#define cJSON_Raw (1 << 7) /* raw json */
#define cJSON_IsReference 256
#define cJSON_StringIsConst 512
/* The cJSON structure: */
typedef struct cJSON
{
/* next/prev allow you to walk array/object chains. Alternatively, use GetArraySize/GetArrayItem/GetObjectItem */
struct cJSON *next;
struct cJSON *prev;
/* An array or object item will have a child pointer pointing to a chain of the items in the array/object. */
struct cJSON *child;
/* The type of the item, as above. */
int type;
/* The item's string, if type==cJSON_String and type == cJSON_Raw */
char *valuestring;
/* writing to valueint is DEPRECATED, use cJSON_SetNumberValue instead */
int valueint;
/* The item's number, if type==cJSON_Number */
double valuedouble;
/* The item's name string, if this item is the child of, or is in the list of subitems of an object. */
char *string;
} cJSON;
typedef struct cJSON_Hooks
{
/* malloc/free are CDECL on Windows regardless of the default calling convention of the compiler, so ensure the hooks allow passing those functions directly. */
void *(CJSON_CDECL *malloc_fn)(size_t sz);
void (CJSON_CDECL *free_fn)(void *ptr);
} cJSON_Hooks;
typedef int cJSON_bool;
/* Limits how deeply nested arrays/objects can be before cJSON rejects to parse them.
* This is to prevent stack overflows. */
#ifndef CJSON_NESTING_LIMIT
#define CJSON_NESTING_LIMIT 1000
#endif
/* returns the version of cJSON as a string */
CJSON_PUBLIC(const char*) cJSON_Version(void);
/* Supply malloc, realloc and free functions to cJSON */
CJSON_PUBLIC(void) cJSON_InitHooks(cJSON_Hooks* hooks);
/* Memory Management: the caller is always responsible to free the results from all variants of cJSON_Parse (with cJSON_Delete) and cJSON_Print (with stdlib free, cJSON_Hooks.free_fn, or cJSON_free as appropriate). The exception is cJSON_PrintPreallocated, where the caller has full responsibility of the buffer. */
/* Supply a block of JSON, and this returns a cJSON object you can interrogate. */
CJSON_PUBLIC(cJSON *) cJSON_Parse(const char *value);
CJSON_PUBLIC(cJSON *) cJSON_ParseWithLength(const char *value, size_t buffer_length);
/* ParseWithOpts allows you to require (and check) that the JSON is null terminated, and to retrieve the pointer to the final byte parsed. */
/* If you supply a ptr in return_parse_end and parsing fails, then return_parse_end will contain a pointer to the error so will match cJSON_GetErrorPtr(). */
CJSON_PUBLIC(cJSON *) cJSON_ParseWithOpts(const char *value, const char **return_parse_end, cJSON_bool require_null_terminated);
CJSON_PUBLIC(cJSON *) cJSON_ParseWithLengthOpts(const char *value, size_t buffer_length, const char **return_parse_end, cJSON_bool require_null_terminated);
/* Render a cJSON entity to text for transfer/storage. */
CJSON_PUBLIC(char *) cJSON_Print(const cJSON *item);
/* Render a cJSON entity to text for transfer/storage without any formatting. */
CJSON_PUBLIC(char *) cJSON_PrintUnformatted(const cJSON *item);
/* Render a cJSON entity to text using a buffered strategy. prebuffer is a guess at the final size. guessing well reduces reallocation. fmt=0 gives unformatted, =1 gives formatted */
CJSON_PUBLIC(char *) cJSON_PrintBuffered(const cJSON *item, int prebuffer, cJSON_bool fmt);
/* Render a cJSON entity to text using a buffer already allocated in memory with given length. Returns 1 on success and 0 on failure. */
/* NOTE: cJSON is not always 100% accurate in estimating how much memory it will use, so to be safe allocate 5 bytes more than you actually need */
CJSON_PUBLIC(cJSON_bool) cJSON_PrintPreallocated(cJSON *item, char *buffer, const int length, const cJSON_bool format);
/* Delete a cJSON entity and all subentities. */
CJSON_PUBLIC(void) cJSON_Delete(cJSON *item);
/* Returns the number of items in an array (or object). */
CJSON_PUBLIC(int) cJSON_GetArraySize(const cJSON *array);
/* Retrieve item number "index" from array "array". Returns NULL if unsuccessful. */
CJSON_PUBLIC(cJSON *) cJSON_GetArrayItem(const cJSON *array, int index);
/* Get item "string" from object. Case insensitive. */
CJSON_PUBLIC(cJSON *) cJSON_GetObjectItem(const cJSON * const object, const char * const string);
CJSON_PUBLIC(cJSON *) cJSON_GetObjectItemCaseSensitive(const cJSON * const object, const char * const string);
CJSON_PUBLIC(cJSON_bool) cJSON_HasObjectItem(const cJSON *object, const char *string);
/* For analysing failed parses. This returns a pointer to the parse error. You'll probably need to look a few chars back to make sense of it. Defined when cJSON_Parse() returns 0. 0 when cJSON_Parse() succeeds. */
CJSON_PUBLIC(const char *) cJSON_GetErrorPtr(void);
/* Check item type and return its value */
CJSON_PUBLIC(char *) cJSON_GetStringValue(const cJSON * const item);
CJSON_PUBLIC(double) cJSON_GetNumberValue(const cJSON * const item);
/* These functions check the type of an item */
CJSON_PUBLIC(cJSON_bool) cJSON_IsInvalid(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsFalse(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsTrue(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsBool(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsNull(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsNumber(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsString(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsArray(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsObject(const cJSON * const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsRaw(const cJSON * const item);
/* These calls create a cJSON item of the appropriate type. */
CJSON_PUBLIC(cJSON *) cJSON_CreateNull(void);
CJSON_PUBLIC(cJSON *) cJSON_CreateTrue(void);
CJSON_PUBLIC(cJSON *) cJSON_CreateFalse(void);
CJSON_PUBLIC(cJSON *) cJSON_CreateBool(cJSON_bool boolean);
CJSON_PUBLIC(cJSON *) cJSON_CreateNumber(double num);
CJSON_PUBLIC(cJSON *) cJSON_CreateString(const char *string);
/* raw json */
CJSON_PUBLIC(cJSON *) cJSON_CreateRaw(const char *raw);
CJSON_PUBLIC(cJSON *) cJSON_CreateArray(void);
CJSON_PUBLIC(cJSON *) cJSON_CreateObject(void);
/* Create a string where valuestring references a string so
* it will not be freed by cJSON_Delete */
CJSON_PUBLIC(cJSON *) cJSON_CreateStringReference(const char *string);
/* Create an object/array that only references it's elements so
* they will not be freed by cJSON_Delete */
CJSON_PUBLIC(cJSON *) cJSON_CreateObjectReference(const cJSON *child);
CJSON_PUBLIC(cJSON *) cJSON_CreateArrayReference(const cJSON *child);
/* These utilities create an Array of count items.
* The parameter count cannot be greater than the number of elements in the number array, otherwise array access will be out of bounds.*/
CJSON_PUBLIC(cJSON *) cJSON_CreateIntArray(const int *numbers, int count);
CJSON_PUBLIC(cJSON *) cJSON_CreateFloatArray(const float *numbers, int count);
CJSON_PUBLIC(cJSON *) cJSON_CreateDoubleArray(const double *numbers, int count);
CJSON_PUBLIC(cJSON *) cJSON_CreateStringArray(const char *const *strings, int count);
/* Append item to the specified array/object. */
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemToArray(cJSON *array, cJSON *item);
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemToObject(cJSON *object, const char *string, cJSON *item);
/* Use this when string is definitely const (i.e. a literal, or as good as), and will definitely survive the cJSON object.
* WARNING: When this function was used, make sure to always check that (item->type & cJSON_StringIsConst) is zero before
* writing to `item->string` */
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemToObjectCS(cJSON *object, const char *string, cJSON *item);
/* Append reference to item to the specified array/object. Use this when you want to add an existing cJSON to a new cJSON, but don't want to corrupt your existing cJSON. */
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item);
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemReferenceToObject(cJSON *object, const char *string, cJSON *item);
/* Remove/Detach items from Arrays/Objects. */
CJSON_PUBLIC(cJSON *) cJSON_DetachItemViaPointer(cJSON *parent, cJSON * const item);
CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromArray(cJSON *array, int which);
CJSON_PUBLIC(void) cJSON_DeleteItemFromArray(cJSON *array, int which);
CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromObject(cJSON *object, const char *string);
CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromObjectCaseSensitive(cJSON *object, const char *string);
CJSON_PUBLIC(void) cJSON_DeleteItemFromObject(cJSON *object, const char *string);
CJSON_PUBLIC(void) cJSON_DeleteItemFromObjectCaseSensitive(cJSON *object, const char *string);
/* Update array items. */
CJSON_PUBLIC(cJSON_bool) cJSON_InsertItemInArray(cJSON *array, int which, cJSON *newitem); /* Shifts pre-existing items to the right. */
CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemViaPointer(cJSON * const parent, cJSON * const item, cJSON * replacement);
CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemInArray(cJSON *array, int which, cJSON *newitem);
CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemInObject(cJSON *object,const char *string,cJSON *newitem);
CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemInObjectCaseSensitive(cJSON *object,const char *string,cJSON *newitem);
/* Duplicate a cJSON item */
CJSON_PUBLIC(cJSON *) cJSON_Duplicate(const cJSON *item, cJSON_bool recurse);
/* Duplicate will create a new, identical cJSON item to the one you pass, in new memory that will
* need to be released. With recurse!=0, it will duplicate any children connected to the item.
* The item->next and ->prev pointers are always zero on return from Duplicate. */
/* Recursively compare two cJSON items for equality. If either a or b is NULL or invalid, they will be considered unequal.
* case_sensitive determines if object keys are treated case sensitive (1) or case insensitive (0) */
CJSON_PUBLIC(cJSON_bool) cJSON_Compare(const cJSON * const a, const cJSON * const b, const cJSON_bool case_sensitive);
/* Minify a strings, remove blank characters(such as ' ', '\t', '\r', '\n') from strings.
* The input pointer json cannot point to a read-only address area, such as a string constant,
* but should point to a readable and writable address area. */
CJSON_PUBLIC(void) cJSON_Minify(char *json);
/* Helper functions for creating and adding items to an object at the same time.
* They return the added item or NULL on failure. */
CJSON_PUBLIC(cJSON*) cJSON_AddNullToObject(cJSON * const object, const char * const name);
CJSON_PUBLIC(cJSON*) cJSON_AddTrueToObject(cJSON * const object, const char * const name);
CJSON_PUBLIC(cJSON*) cJSON_AddFalseToObject(cJSON * const object, const char * const name);
CJSON_PUBLIC(cJSON*) cJSON_AddBoolToObject(cJSON * const object, const char * const name, const cJSON_bool boolean);
CJSON_PUBLIC(cJSON*) cJSON_AddNumberToObject(cJSON * const object, const char * const name, const double number);
CJSON_PUBLIC(cJSON*) cJSON_AddStringToObject(cJSON * const object, const char * const name, const char * const string);
CJSON_PUBLIC(cJSON*) cJSON_AddRawToObject(cJSON * const object, const char * const name, const char * const raw);
CJSON_PUBLIC(cJSON*) cJSON_AddObjectToObject(cJSON * const object, const char * const name);
CJSON_PUBLIC(cJSON*) cJSON_AddArrayToObject(cJSON * const object, const char * const name);
/* When assigning an integer value, it needs to be propagated to valuedouble too. */
#define cJSON_SetIntValue(object, number) ((object) ? (object)->valueint = (object)->valuedouble = (number) : (number))
/* helper for the cJSON_SetNumberValue macro */
CJSON_PUBLIC(double) cJSON_SetNumberHelper(cJSON *object, double number);
#define cJSON_SetNumberValue(object, number) ((object != NULL) ? cJSON_SetNumberHelper(object, (double)number) : (number))
/* Change the valuestring of a cJSON_String object, only takes effect when type of object is cJSON_String */
CJSON_PUBLIC(char*) cJSON_SetValuestring(cJSON *object, const char *valuestring);
/* If the object is not a boolean type this does nothing and returns cJSON_Invalid else it returns the new type*/
#define cJSON_SetBoolValue(object, boolValue) ( \
(object != NULL && ((object)->type & (cJSON_False|cJSON_True))) ? \
(object)->type=((object)->type &(~(cJSON_False|cJSON_True)))|((boolValue)?cJSON_True:cJSON_False) : \
cJSON_Invalid\
)
/* Macro for iterating over an array or object */
#define cJSON_ArrayForEach(element, array) for(element = (array != NULL) ? (array)->child : NULL; element != NULL; element = element->next)
/* malloc/free objects using the malloc/free functions that have been set with cJSON_InitHooks */
CJSON_PUBLIC(void *) cJSON_malloc(size_t size);
CJSON_PUBLIC(void) cJSON_free(void *object);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -0,0 +1,464 @@
// Released under the MIT License. See LICENSE for details.
// An ultra-simple client app to forward commands to a pcommand server. This
// lets us run *lots* of small pcommands very fast. Normally the limiting
// factor in such cases is the startup time of Python which this mostly
// eliminates. See tools/efrotools/pcommandbatch.py for more info.
#include <arpa/inet.h>
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
#include <netinet/in.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/param.h>
#include <sys/socket.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <time.h>
#include <unistd.h>
#include "cJSON.h"
struct Context_ {
const char* state_dir_path;
const char* instance_prefix;
int instance_num;
int pid;
int verbose;
int debug;
int server_idle_seconds;
const char* pcommandpath;
int sockfd;
};
int path_exists_(const char* path);
int establish_connection_(const struct Context_* ctx);
int calc_paths_(struct Context_* ctx);
int send_command_(struct Context_* ctx, int argc, char** argv);
int handle_response_(const struct Context_* ctx);
int get_running_server_port_(const struct Context_* ctx,
const char* state_file_path_full);
int main(int argc, char** argv) {
struct Context_ ctx;
memset(&ctx, 0, sizeof(ctx));
ctx.state_dir_path = NULL;
ctx.instance_prefix = NULL;
ctx.pcommandpath = NULL;
ctx.server_idle_seconds = 5;
ctx.pid = getpid();
// Verbose mode enables more printing here. Debug mode enables that plus
// extra stuff. The extra stuff is mostly the server side though.
{
const char* debug_env = getenv("BA_PCOMMANDBATCH_DEBUG");
ctx.debug = debug_env && !strcmp(debug_env, "1");
const char* verbose_env = getenv("BA_PCOMMANDBATCH_VERBOSE");
ctx.verbose = ctx.debug || (verbose_env && !strcmp(verbose_env, "1"));
}
// Seed rand() using the current time in microseconds.
struct timeval tv;
gettimeofday(&tv, NULL);
unsigned int seed = tv.tv_usec;
srand(seed);
// Figure our which file path we'll use to get server state.
if (calc_paths_(&ctx) != 0) {
return 1;
}
// Establish communication with said server (spinning it up if needed).
ctx.sockfd = establish_connection_(&ctx);
if (ctx.sockfd == -1) {
return 1;
}
if (send_command_(&ctx, argc, argv) != 0) {
return 1;
}
int result_val = handle_response_(&ctx);
if (result_val != 0) {
return 1;
}
if (close(ctx.sockfd) != 0) {
fprintf(
stderr,
"Error: pcommandbatch client %s_%d (pid %d): error on socket close.\n",
ctx.instance_prefix, ctx.instance_num, ctx.pid);
return 1;
}
return result_val;
}
// If a valid state file is present at the provided path and not older than
// server_idle_seconds, return said port as an int. Otherwise return -1;
int get_running_server_port_(const struct Context_* ctx,
const char* state_file_path_full) {
struct stat file_stat;
time_t current_time = time(NULL);
if (current_time == -1) {
perror("time");
return -1;
}
int fd = open(state_file_path_full, O_RDONLY);
if (fd < 0) {
return -1;
}
if (fstat(fd, &file_stat) == -1) {
close(fd);
return -1;
}
int age_seconds = current_time - file_stat.st_mtime;
if (ctx->verbose) {
if (age_seconds <= ctx->server_idle_seconds) {
fprintf(
stderr,
"pcommandbatch client %s_%d (pid %d) found state file with age %d at "
"time %ld.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid, age_seconds,
time(NULL));
}
}
if (age_seconds > ctx->server_idle_seconds) {
close(fd);
return -1;
} else if (age_seconds < 0) {
fprintf(stderr, "pcommandbatch got negative age; unexpected.");
}
char buf[256];
ssize_t amt = read(fd, buf, sizeof(buf) - 1);
close(fd);
if (amt == -1 || amt == sizeof(buf) - 1) {
return -1;
}
buf[amt] = 0; // Null-terminate it.
cJSON* state_dict = cJSON_Parse(buf);
if (!state_dict) {
fprintf(stderr,
"Error: pcommandbatch client %s_%d (pid %d): failed to parse state "
"value.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
return -1;
}
// If results included output, print it.
cJSON* port_obj = cJSON_GetObjectItem(state_dict, "p");
if (!port_obj || !cJSON_IsNumber(port_obj)) {
fprintf(stderr,
"Error: pcommandbatch client %s_%d (pid %d): failed to get port "
"value from state.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
cJSON_Delete(state_dict);
return -1;
}
int port = cJSON_GetNumberValue(port_obj);
cJSON_Delete(state_dict);
return port;
// return val;
}
int path_exists_(const char* path) {
struct stat file_stat;
return (stat(path, &file_stat) != -1);
}
int establish_connection_(const struct Context_* ctx) {
char state_file_path_full[256];
snprintf(state_file_path_full, sizeof(state_file_path_full),
"%s/worker_state_%s_%d", ctx->state_dir_path, ctx->instance_prefix,
ctx->instance_num);
int sockfd = 0;
if ((sockfd = socket(AF_INET, SOCK_STREAM, 0)) < 0) {
fprintf(stderr,
"Error: pcommandbatch client %s_%d (pid %d): could not create "
"socket.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
return -1;
}
// On Mac I'm running into EADDRNOTAVAIL errors if I spit out too many
// requests in a short enough period of time. I'm guessing its exhausting
// free ports when cooldown time is taken into account. Sleeping and
// trying again in a moment seems to work.
int retry_attempt = 0;
int retry_sleep_secs = 1;
while (1) {
// First look for an already-running batch server.
int port = get_running_server_port_(ctx, state_file_path_full);
if (port == -1) {
// Ok; no running server. Spin one up.
if (ctx->verbose) {
fprintf(stderr,
"pcommandbatch client %s_%d (pid %d) requesting batch server "
"spinup...\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
}
// In non-debug-mode, route to a log file.
char endbuf[512];
if (ctx->debug) {
snprintf(endbuf, sizeof(endbuf), " &");
} else {
snprintf(endbuf, sizeof(endbuf), " >>%s/worker_log_%s_%d 2>&1 &",
ctx->state_dir_path, ctx->instance_prefix, ctx->instance_num);
}
char buf[512];
snprintf(buf, sizeof(buf),
"%s run_pcommandbatch_server --timeout %d --state-dir %s "
"--instance %s_%d %s",
ctx->pcommandpath, ctx->server_idle_seconds, ctx->state_dir_path,
ctx->instance_prefix, ctx->instance_num, endbuf);
system(buf);
// Spin and wait up to a few seconds for the file to appear.
time_t start_time = time(NULL);
int cycles = 0;
while (time(NULL) - start_time < 5) {
port = get_running_server_port_(ctx, state_file_path_full);
if (port != -1) {
break;
}
usleep(10000);
cycles += 1;
}
if (ctx->verbose) {
fprintf(stderr,
"pcommandbatch client %s_%d (pid %d) waited %d"
" cycles for state file to appear at '%s'.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid, cycles,
state_file_path_full);
}
if (port == -1) {
// We failed but we can retry.
if (ctx->verbose) {
fprintf(stderr,
"Error: pcommandbatch client %s_%d (pid %d): failed to open "
"server on attempt %d.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid,
retry_attempt);
}
}
}
// Ok we got a port; now try to connect to it.
if (port != -1) {
if (ctx->verbose) {
fprintf(
stderr,
"pcommandbatch client %s_%d (pid %d) will use server on port %d at "
"time %ld.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid, port,
time(NULL));
}
struct sockaddr_in serv_addr;
memset(&serv_addr, '0', sizeof(serv_addr));
serv_addr.sin_family = AF_INET;
serv_addr.sin_port = htons(port);
serv_addr.sin_addr.s_addr = inet_addr("127.0.0.1");
int cresult =
connect(sockfd, (struct sockaddr*)&serv_addr, sizeof(serv_addr));
if (cresult == 0) {
break;
} else if (errno == EADDRNOTAVAIL) {
if (ctx->verbose) {
fprintf(stderr,
"pcommandbatch client %s_%d (pid %d): got EADDRNOTAVAIL"
" on connect attempt %d.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid,
retry_attempt + 1);
}
} else {
// Currently not retrying on other errors.
fprintf(
stderr,
"Error: pcommandbatch client %s_%d (pid %d): connect failed (errno "
"%d).\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid, errno);
close(sockfd);
return -1;
}
}
if (retry_attempt >= 10) {
fprintf(stderr,
"Error: pcommandbatch client %s_%d (pid %d): too many "
"retry attempts; giving up.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
close(sockfd);
return -1;
}
if (ctx->verbose) {
fprintf(
stderr,
"pcommandbatch client %s_%d (pid %d) connection attempt %d failed;"
" will sleep %d secs and try again.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid, retry_attempt + 1,
retry_sleep_secs);
}
sleep(retry_sleep_secs);
retry_attempt += 1;
retry_sleep_secs *= 2;
}
return sockfd;
}
int calc_paths_(struct Context_* ctx) {
// Because the server needs to be in the same cwd as we are for things to
// work, we only support a specific few locations to run from. Currently
// this is project-root and src/assets
if (path_exists_("config/projectconfig.json")) {
// Looks like we're in project root.
ctx->state_dir_path = ".cache/pcommandbatch";
ctx->instance_prefix = "root";
ctx->pcommandpath = "tools/pcommand";
} else if (path_exists_("ba_data")
&& path_exists_("../../config/projectconfig.json")) {
// Looks like we're in src/assets.
ctx->state_dir_path = "../../.cache/pcommandbatch";
ctx->instance_prefix = "assets";
ctx->pcommandpath = "../../tools/pcommand";
}
if (ctx->state_dir_path == NULL) {
char cwdbuf[MAXPATHLEN];
if (getcwd(cwdbuf, sizeof(cwdbuf)) < 0) {
fprintf(stderr,
"Error: pcommandbatch client %s (pid %d): unable to get cwd.\n",
ctx->instance_prefix, ctx->pid);
return -1;
}
fprintf(
stderr,
"Error: pcommandbatch client %s (pid %d): pcommandbatch from cwd '%s' "
"is not supported.\n",
ctx->instance_prefix, ctx->pid, cwdbuf);
return -1;
}
assert(ctx->pcommandpath != NULL);
assert(ctx->instance_prefix != NULL);
// Spread requests for each location out randomly across a few instances.
// This greatly increases scalability though is probably wasteful when
// running just a few commands. Maybe there's some way to smartly scale
// this. The best setup might be to have a single 'controller' server
// instance that spins up worker instances as needed. Though such a fancy
// setup might be overkill.
ctx->instance_num = rand() % 6;
return 0;
}
int send_command_(struct Context_* ctx, int argc, char** argv) {
// Build a json array of our args.
cJSON* array = cJSON_CreateArray();
for (int i = 0; i < argc; ++i) {
cJSON_AddItemToArray(array, cJSON_CreateString(argv[i]));
}
char* json_out = cJSON_Print(array);
// Send our command.
int msglen = strlen(json_out);
if (write(ctx->sockfd, json_out, msglen) != msglen) {
fprintf(stderr,
"Error: pcommandbatch client %s_%d (pid %d): write failed.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
return -1;
}
// Issue a write shutdown so they get EOF on the other end.
if (shutdown(ctx->sockfd, SHUT_WR) < 0) {
fprintf(
stderr,
"Error: pcommandbatch client %s_%d (pid %d): write shutdown failed.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
return -1;
}
// Clean up our mess after we've sent them on their way.
free(json_out);
cJSON_Delete(array);
return 0;
}
int handle_response_(const struct Context_* ctx) {
// Read the response. Currently expecting short-ish responses only; will
// have to revisit this if/when they get long.
char inbuf[512];
ssize_t result = read(ctx->sockfd, inbuf, sizeof(inbuf) - 1);
if (result < 0 || result == sizeof(inbuf) - 1) {
fprintf(stderr,
"Error: pcommandbatch client %s_%d (pid %d): failed to read result "
"(errno %d).\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid, errno);
close(ctx->sockfd);
return -1;
}
if (ctx->verbose) {
fprintf(stderr,
"pcommandbatch client %s_%d (pid %d) read %zd byte response.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid, result);
}
inbuf[result] = 0; // null terminate result str.
cJSON* result_dict = cJSON_Parse(inbuf);
if (!result_dict) {
fprintf(
stderr,
"Error: pcommandbatch client %s_%d (pid %d): failed to parse result "
"value.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
return -1;
}
// If results included output, print it.
cJSON* result_output = cJSON_GetObjectItem(result_dict, "o");
if (!result_output || !cJSON_IsString(result_output)) {
fprintf(
stderr,
"Error: pcommandbatch client %s_%d (pid %d): failed to parse result "
"output value.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
return -1;
}
char* output_str = cJSON_GetStringValue(result_output);
assert(output_str);
if (output_str[0] != 0) {
printf("%s", output_str);
}
cJSON* result_code = cJSON_GetObjectItem(result_dict, "r");
if (!result_code || !cJSON_IsNumber(result_code)) {
fprintf(
stderr,
"Error: pcommandbatch client %s_%d (pid %d): failed to parse result "
"code value.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid);
return -1;
}
int result_val = cJSON_GetNumberValue(result_code);
if (ctx->verbose) {
fprintf(stderr, "pcommandbatch client %s_%d (pid %d) final result is %d.\n",
ctx->instance_prefix, ctx->instance_num, ctx->pid, result_val);
}
cJSON_Delete(result_dict);
return result_val;
}

View File

@ -265,13 +265,22 @@ def _get_py_targets_subset(
'# (and make non-writable so I\'m less likely to '
'accidentally edit them there)\n'
f'{efc}$(SCRIPT_TARGETS_PY{suffix}) : {copyrule}\n'
'\t@echo Copying script: $(subst $(BUILD_DIR)/,,$@)\n'
'\t@mkdir -p $(dir $@)\n'
'\t@rm -f $@\n'
'\t@cp $^ $@\n'
'\t@chmod 444 $@\n'
'#\t@echo Copying script: $(subst $(BUILD_DIR)/,,$@)\n'
'\t@$(PCOMMANDBATCH) copy_python_file $^ $@\n'
)
# out += (
# '\n# Rule to copy src asset scripts to dst.\n'
# '# (and make non-writable so I\'m less likely to '
# 'accidentally edit them there)\n'
# f'{efc}$(SCRIPT_TARGETS_PY{suffix}) : {copyrule}\n'
# '\t@echo Copying script: $(subst $(BUILD_DIR)/,,$@)\n'
# '\t@mkdir -p $(dir $@)\n'
# '\t@rm -f $@\n'
# '\t@cp $^ $@\n'
# '\t@chmod 444 $@\n'
# )
# Fancy new simple loop-based target generation.
out += (
f'\n# These are too complex to define in a pattern rule;\n'
@ -301,7 +310,7 @@ def _get_py_targets_subset(
+ py_targets[i]
+ '\n\t@echo Compiling script: $(subst $(BUILD_DIR),,$^)\n'
'\t@rm -rf $@ && PYTHONHASHSEED=1 $(TOOLS_DIR)/pcommand'
' compile_python_files $^'
' compile_python_file $^'
' && chmod 444 $@\n'
)

View File

@ -32,32 +32,37 @@ class PyRequirement:
# entries; this accounts for manual installations or other nonstandard
# setups.
# Note 2: That is probably overkill. We can probably just replace this
# with a simple requirements.txt file, can't we? Feels like we're mostly
# reinventing the wheel here. We just need a clean way to check/list
# missing stuff without necessarily installing it. And as far as
# manually-installed bits, pip itself must have some way to allow for
# that, right?...
# Note 2: That is probably unnecessary. I'm certainly not using it. We
# can probably just replace this with a simple requirements.txt file,
# can't we? Feels like we're mostly reinventing the wheel here. We just
# need a clean way to check/list missing stuff without necessarily
# installing it. And as far as manually-installed bits, pip itself must
# have some way to allow for that, right?...
# Note 3: Have transitioned all these to pipname only; can at least
# remove our custom module based stuff soon if nobody complains, which
# would free us to theoretically move to a requirements.txt based setup.
PY_REQUIREMENTS = [
PyRequirement(modulename='pylint', minversion=[2, 17, 3]),
PyRequirement(modulename='mypy', minversion=[1, 2, 0]),
PyRequirement(modulename='cpplint', minversion=[1, 6, 1]),
PyRequirement(modulename='pytest', minversion=[7, 3, 1]),
PyRequirement(modulename='pytz'),
PyRequirement(modulename='ansiwrap'),
PyRequirement(modulename='yaml', pipname='PyYAML'),
PyRequirement(modulename='requests'),
PyRequirement(modulename='pdoc'),
PyRequirement(pipname='black', minversion=[23, 3, 0]),
PyRequirement(pipname='typing_extensions', minversion=[4, 5, 0]),
PyRequirement(pipname='pylint', minversion=[2, 17, 5]),
PyRequirement(pipname='mypy', minversion=[1, 4, 1]),
PyRequirement(pipname='cpplint', minversion=[1, 6, 1]),
PyRequirement(pipname='pytest', minversion=[7, 4, 0]),
PyRequirement(pipname='pytz', minversion=[2023, 3]),
PyRequirement(pipname='ansiwrap', minversion=[0, 8, 4]),
PyRequirement(pipname='requests', minversion=[2, 31, 0]),
PyRequirement(pipname='pdoc', minversion=[14, 0, 0]),
PyRequirement(pipname='PyYAML', minversion=[6, 0, 1]),
PyRequirement(pipname='black', minversion=[23, 7, 0]),
PyRequirement(pipname='typing_extensions', minversion=[4, 7, 1]),
PyRequirement(pipname='types-filelock', minversion=[3, 2, 7]),
PyRequirement(pipname='types-requests', minversion=[2, 28, 11, 17]),
PyRequirement(pipname='types-requests', minversion=[2, 31, 0, 2]),
PyRequirement(pipname='types-pytz', minversion=[2023, 3, 0, 0]),
PyRequirement(pipname='types-PyYAML', minversion=[6, 0, 12, 9]),
PyRequirement(pipname='certifi', minversion=[2022, 12, 7]),
PyRequirement(pipname='types-PyYAML', minversion=[6, 0, 12, 11]),
PyRequirement(pipname='certifi', minversion=[2023, 7, 22]),
PyRequirement(pipname='types-certifi', minversion=[2021, 10, 8, 3]),
PyRequirement(pipname='pbxproj', minversion=[3, 5, 0]),
PyRequirement(pipname='filelock', minversion=[3, 12, 0]),
PyRequirement(pipname='filelock', minversion=[3, 12, 2]),
PyRequirement(pipname='python-daemon', minversion=[3, 0, 1]),
]
@ -207,7 +212,7 @@ def lazybuild(target: str, category: LazyBuildCategory, command: str) -> None:
# Even though this category currently doesn't run any clean
# commands, going to restrict to one use at a time for now
# in case we want to add that.
buildlockname=category.value,
# buildlockname=category.value,
srcpaths=[
'Makefile',
'tools',

View File

@ -242,7 +242,7 @@ class MetaMakefileGenerator:
) -> None:
targets.append(
Target(
src=['$(TOOLS_DIR)/batools/pcommand.py'],
src=['$(TOOLS_DIR)/batools/pcommands.py'],
dst=os.path.join(moduledir, '__init__.py'),
cmd='$(PCOMMAND) gen_python_init_module $@',
)

View File

@ -7,13 +7,15 @@ from __future__ import annotations
# keep launch times fast for small snippets.
import sys
from efrotools.pcommand import PROJROOT
from efrotools import pcommand
def prune_includes() -> None:
"""Check for unnecessary includes in C++ files."""
from batools.pruneincludes import Pruner
pcommand.disallow_in_batch()
args = sys.argv.copy()[2:]
commit = False
if '--commit' in args:
@ -32,6 +34,8 @@ def resize_image() -> None:
import os
import subprocess
pcommand.disallow_in_batch()
if len(sys.argv) != 6:
raise RuntimeError('Expected 5 args.')
width = int(sys.argv[2])
@ -61,18 +65,21 @@ def check_clean_safety() -> None:
from efro.terminal import Clr
from efro.error import CleanError
import efrotools.pcommand
import efrotools.pcommands
pcommand.disallow_in_batch()
ignorevar = 'BA_IGNORE_CLEAN_SAFETY_CHECK'
if os.environ.get(ignorevar) == '1':
return
try:
# First do standard checks.
efrotools.pcommand.check_clean_safety()
efrotools.pcommands.check_clean_safety()
# Then also make sure there are no untracked changes to core files
# (since we may be blowing core away here).
spinoff_bin = os.path.join(str(PROJROOT), 'tools', 'spinoff')
spinoff_bin = os.path.join(str(pcommand.PROJROOT), 'tools', 'spinoff')
if os.path.exists(spinoff_bin):
result = subprocess.run(
[spinoff_bin, 'cleancheck', '--soft'], check=False
@ -94,6 +101,8 @@ def archive_old_builds() -> None:
"""
import batools.build
pcommand.disallow_in_batch()
if len(sys.argv) < 3:
raise RuntimeError('Invalid arguments.')
ssh_server = sys.argv[2]
@ -117,10 +126,12 @@ def lazy_increment_build() -> None:
from efrotools import get_files_hash
from efrotools.code import get_code_filenames
pcommand.disallow_in_batch()
if sys.argv[2:] not in [[], ['--update-hash-only']]:
raise CleanError('Invalid arguments')
update_hash_only = '--update-hash-only' in sys.argv
codefiles = get_code_filenames(PROJROOT, include_generated=False)
codefiles = get_code_filenames(pcommand.PROJROOT, include_generated=False)
codehash = get_files_hash(codefiles)
hashfilename = '.cache/lazy_increment_build'
try:
@ -151,6 +162,8 @@ def get_master_asset_src_dir() -> None:
import subprocess
import os
pcommand.disallow_in_batch()
master_assets_dir = '/Users/ericf/Documents/ballisticakit_master_assets'
dummy_dir = '/__DUMMY_MASTER_SRC_DISABLED_PATH__'
@ -188,6 +201,8 @@ def androidaddr() -> None:
import batools.android
from efro.error import CleanError
pcommand.disallow_in_batch()
if len(sys.argv) != 5:
raise CleanError(
f'ERROR: expected 3 args; got {len(sys.argv) - 2}\n'
@ -206,13 +221,17 @@ def push_ipa() -> None:
from efrotools import extract_arg
import efrotools.ios
pcommand.disallow_in_batch()
args = sys.argv[2:]
signing_config = extract_arg(args, '--signing-config')
if len(args) != 1:
raise RuntimeError('Expected 1 mode arg (debug or release).')
modename = args[0].lower()
efrotools.ios.push_ipa(PROJROOT, modename, signing_config=signing_config)
efrotools.ios.push_ipa(
pcommand.PROJROOT, modename, signing_config=signing_config
)
def printcolors() -> None:
@ -220,6 +239,8 @@ def printcolors() -> None:
from efro.error import CleanError
from efro.terminal import TerminalColor, Clr
pcommand.disallow_in_batch()
if Clr.RED == '':
raise CleanError('Efro color terminal output is disabled.')
@ -244,6 +265,8 @@ def python_version_android_base() -> None:
"""Print built Python base version."""
from efrotools.pybuild import PY_VER_ANDROID
pcommand.disallow_in_batch()
print(PY_VER_ANDROID, end='')
@ -251,6 +274,8 @@ def python_version_android() -> None:
"""Print Android embedded Python version."""
from efrotools.pybuild import PY_VER_EXACT_ANDROID
pcommand.disallow_in_batch()
print(PY_VER_EXACT_ANDROID, end='')
@ -258,16 +283,24 @@ def python_version_apple() -> None:
"""Print Apple embedded Python version."""
from efrotools.pybuild import PY_VER_EXACT_APPLE
pcommand.disallow_in_batch()
print(PY_VER_EXACT_APPLE, end='')
def python_build_apple() -> None:
"""Build an embeddable python for mac/ios/tvos."""
pcommand.disallow_in_batch()
_python_build_apple(debug=False)
def python_build_apple_debug() -> None:
"""Build embeddable python for mac/ios/tvos (dbg ver)."""
pcommand.disallow_in_batch()
_python_build_apple(debug=True)
@ -277,7 +310,9 @@ def _python_build_apple(debug: bool) -> None:
from efro.error import CleanError
from efrotools import pybuild
os.chdir(PROJROOT)
pcommand.disallow_in_batch()
os.chdir(pcommand.PROJROOT)
archs = ('mac', 'ios', 'tvos')
if len(sys.argv) != 3:
raise CleanError('Error: expected one <ARCH> arg: ' + ', '.join(archs))
@ -291,11 +326,17 @@ def _python_build_apple(debug: bool) -> None:
def python_build_android() -> None:
"""Build an embeddable Python lib for Android."""
pcommand.disallow_in_batch()
_python_build_android(debug=False)
def python_build_android_debug() -> None:
"""Build embeddable Android Python lib (debug ver)."""
pcommand.disallow_in_batch()
_python_build_android(debug=True)
@ -304,7 +345,9 @@ def _python_build_android(debug: bool) -> None:
from efro.error import CleanError
from efrotools import pybuild
os.chdir(PROJROOT)
pcommand.disallow_in_batch()
os.chdir(pcommand.PROJROOT)
archs = ('arm', 'arm64', 'x86', 'x86_64')
if len(sys.argv) != 3:
raise CleanError('Error: Expected one <ARCH> arg: ' + ', '.join(archs))
@ -313,7 +356,7 @@ def _python_build_android(debug: bool) -> None:
raise CleanError(
'Error: invalid arch. valid values are: ' + ', '.join(archs)
)
pybuild.build_android(str(PROJROOT), arch, debug=debug)
pybuild.build_android(str(pcommand.PROJROOT), arch, debug=debug)
def python_android_patch() -> None:
@ -321,6 +364,8 @@ def python_android_patch() -> None:
import os
from efrotools import pybuild
pcommand.disallow_in_batch()
os.chdir(sys.argv[2])
pybuild.android_patch()
@ -329,6 +374,8 @@ def python_android_patch_ssl() -> None:
"""Patches Python ssl to prep for building for Android."""
from efrotools import pybuild
pcommand.disallow_in_batch()
pybuild.android_patch_ssl()
@ -337,6 +384,8 @@ def python_apple_patch() -> None:
from efro.error import CleanError
from efrotools import pybuild
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise CleanError('Expected 1 arg.')
@ -357,7 +406,9 @@ def python_gather() -> None:
import os
from efrotools import pybuild
os.chdir(PROJROOT)
pcommand.disallow_in_batch()
os.chdir(pcommand.PROJROOT)
pybuild.gather(do_android=True, do_apple=True)
@ -366,7 +417,9 @@ def python_gather_android() -> None:
import os
from efrotools import pybuild
os.chdir(PROJROOT)
pcommand.disallow_in_batch()
os.chdir(pcommand.PROJROOT)
pybuild.gather(do_android=True, do_apple=False)
@ -375,7 +428,9 @@ def python_gather_apple() -> None:
import os
from efrotools import pybuild
os.chdir(PROJROOT)
pcommand.disallow_in_batch()
os.chdir(pcommand.PROJROOT)
pybuild.gather(do_android=False, do_apple=True)
@ -384,17 +439,25 @@ def python_winprune() -> None:
import os
from efrotools import pybuild
os.chdir(PROJROOT)
pcommand.disallow_in_batch()
os.chdir(pcommand.PROJROOT)
pybuild.winprune()
def capitalize() -> None:
"""Print args capitalized."""
pcommand.disallow_in_batch()
print(' '.join(w.capitalize() for w in sys.argv[2:]), end='')
def upper() -> None:
"""Print args uppercased."""
pcommand.disallow_in_batch()
print(' '.join(w.upper() for w in sys.argv[2:]), end='')
@ -402,6 +465,8 @@ def efrocache_update() -> None:
"""Build & push files to efrocache for public access."""
from efrotools.efrocache import update_cache
pcommand.disallow_in_batch()
makefile_dirs = ['', 'src/assets', 'src/resources', 'src/meta']
update_cache(makefile_dirs)
@ -410,6 +475,8 @@ def efrocache_get() -> None:
"""Get a file from efrocache."""
from efrotools.efrocache import get_target
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise RuntimeError('Expected exactly 1 arg')
get_target(sys.argv[2])
@ -420,6 +487,8 @@ def get_modern_make() -> None:
import platform
import subprocess
pcommand.disallow_in_batch()
# Mac gnu make is outdated (due to newer versions using GPL3 I believe).
# so let's return 'gmake' there which will point to homebrew make which
# should be up to date.
@ -448,19 +517,21 @@ def warm_start_asset_build() -> None:
from pathlib import Path
from efrotools import getprojectconfig
public: bool = getprojectconfig(PROJROOT)['public']
pcommand.disallow_in_batch()
public: bool = getprojectconfig(pcommand.PROJROOT)['public']
if public:
from efrotools.efrocache import warm_start_cache
os.chdir(PROJROOT)
os.chdir(pcommand.PROJROOT)
warm_start_cache()
else:
# For internal builds we don't use efrocache but we do use an
# internal build cache. Download an initial cache/etc. if need be.
subprocess.run(
[
str(Path(PROJROOT, 'tools/pcommand')),
str(Path(pcommand.PROJROOT, 'tools/pcommand')),
'convert_util',
'--init-asset-cache',
],
@ -473,14 +544,18 @@ def gen_docs_pdoc() -> None:
from efro.terminal import Clr
import batools.docs
pcommand.disallow_in_batch()
print(f'{Clr.BLU}Generating documentation...{Clr.RST}')
batools.docs.generate_pdoc(projroot=str(PROJROOT))
batools.docs.generate_pdoc(projroot=str(pcommand.PROJROOT))
def list_pip_reqs() -> None:
"""List Python Pip packages needed for this project."""
from batools.build import get_pip_reqs
pcommand.disallow_in_batch()
print(' '.join(get_pip_reqs()))
@ -491,6 +566,8 @@ def install_pip_reqs() -> None:
from efro.terminal import Clr
from batools.build import get_pip_reqs
pcommand.disallow_in_batch()
# Make sure pip itself is up to date first.
subprocess.run(
[PYTHON_BIN, '-m', 'pip', 'install', '--upgrade', 'pip'], check=True
@ -507,132 +584,11 @@ def checkenv() -> None:
"""Check for tools necessary to build and run the app."""
import batools.build
pcommand.disallow_in_batch()
batools.build.checkenv()
def wsl_build_check_win_drive() -> None:
"""Make sure we're building on a windows drive."""
import os
import subprocess
import textwrap
from efro.error import CleanError
if (
subprocess.run(
['which', 'wslpath'], check=False, capture_output=True
).returncode
!= 0
):
raise CleanError(
'wslpath not found; you must run this from a WSL environment'
)
if os.environ.get('WSL_BUILD_CHECK_WIN_DRIVE_IGNORE') == '1':
return
# Get a windows path to the current dir.
path = (
subprocess.run(
['wslpath', '-w', '-a', os.getcwd()],
capture_output=True,
check=True,
)
.stdout.decode()
.strip()
)
# If we're sitting under the linux filesystem, our path
# will start with \\wsl$; fail in that case and explain why.
if not path.startswith('\\\\wsl$'):
return
def _wrap(txt: str) -> str:
return textwrap.fill(txt, 76)
raise CleanError(
'\n\n'.join(
[
_wrap(
'ERROR: This project appears to live'
' on the Linux filesystem.'
),
_wrap(
'Visual Studio compiles will error here for reasons related'
' to Linux filesystem case-sensitivity, and thus are'
' disallowed.'
' Clone the repo to a location that maps to a native'
' Windows drive such as \'/mnt/c/ballistica\''
' and try again.'
),
_wrap(
'Note that WSL2 filesystem performance'
' is poor when accessing'
' native Windows drives, so if Visual Studio builds are not'
' needed it may be best to keep things'
' on the Linux filesystem.'
' This behavior may differ under WSL1 (untested).'
),
_wrap(
'Set env-var WSL_BUILD_CHECK_WIN_DRIVE_IGNORE=1 to skip'
' this check.'
),
]
)
)
def wsl_path_to_win() -> None:
"""Forward escape slashes in a provided win path arg."""
import subprocess
import logging
import os
from efro.error import CleanError
try:
create = False
escape = False
if len(sys.argv) < 3:
raise CleanError('Expected at least 1 path arg.')
wsl_path: str | None = None
for arg in sys.argv[2:]:
if arg == '--create':
create = True
elif arg == '--escape':
escape = True
else:
if wsl_path is not None:
raise CleanError('More than one path provided.')
wsl_path = arg
if wsl_path is None:
raise CleanError('No path provided.')
# wslpath fails on nonexistent paths; make it clear when that happens.
if create:
os.makedirs(wsl_path, exist_ok=True)
if not os.path.exists(wsl_path):
raise CleanError(f'Path \'{wsl_path}\' does not exist.')
results = subprocess.run(
['wslpath', '-w', '-a', wsl_path], capture_output=True, check=True
)
except Exception:
# This gets used in a makefile so our returncode is ignored;
# let's try to make our failure known in other ways.
logging.exception('wsl_to_escaped_win_path failed.')
print('wsl_to_escaped_win_path_error_occurred', end='')
return
out = results.stdout.decode().strip()
# If our input ended with a slash, match in the output.
if wsl_path.endswith('/') and not out.endswith('\\'):
out += '\\'
if escape:
out = out.replace('\\', '\\\\')
print(out, end='')
def ensure_prefab_platform() -> None:
"""Ensure we are running on a particular prefab platform.
@ -644,6 +600,8 @@ def ensure_prefab_platform() -> None:
import batools.build
from efro.error import CleanError
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise CleanError('Expected 1 platform name arg.')
needed = sys.argv[2]
@ -658,6 +616,8 @@ def prefab_run_var() -> None:
"""Print the current platform prefab run target var."""
import batools.build
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise RuntimeError('Expected 1 arg.')
base = sys.argv[2].replace('-', '_').upper()
@ -669,6 +629,8 @@ def prefab_binary_path() -> None:
"""Print the current platform prefab binary path."""
import batools.build
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise RuntimeError('Expected 1 arg.')
buildtype, buildmode = sys.argv[2].split('-')
@ -690,6 +652,8 @@ def make_prefab() -> None:
import subprocess
import batools.build
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise RuntimeError('Expected one argument')
target = batools.build.PrefabTarget(sys.argv[2])
@ -713,6 +677,8 @@ def lazybuild() -> None:
import batools.build
from efro.error import CleanError
pcommand.disallow_in_batch()
if len(sys.argv) < 5:
raise CleanError('Expected at least 3 args')
try:
@ -733,6 +699,8 @@ def logcat() -> None:
from efro.terminal import Clr
from efro.error import CleanError
pcommand.disallow_in_batch()
if len(sys.argv) != 4:
raise CleanError('Expected 2 args')
adb = sys.argv[2]
@ -754,6 +722,8 @@ def logcat() -> None:
def _camel_case_split(string: str) -> list[str]:
pcommand.disallow_in_batch()
words = [[string[0]]]
for char in string[1:]:
if words[-1][-1].islower() and char.isupper():
@ -769,6 +739,8 @@ def efro_gradle() -> None:
from efro.terminal import Clr
from efrotools.android import filter_gradle_file
pcommand.disallow_in_batch()
args = ['./gradlew'] + sys.argv[2:]
print(f'{Clr.BLU}Running gradle with args:{Clr.RST} {args}.', flush=True)
enabled_tags: set[str] = {'true'}
@ -809,8 +781,12 @@ def stage_build() -> None:
import batools.staging
from efro.error import CleanError
pcommand.disallow_in_batch()
try:
batools.staging.stage_build(projroot=str(PROJROOT), args=sys.argv[2:])
batools.staging.stage_build(
projroot=str(pcommand.PROJROOT), args=sys.argv[2:]
)
except CleanError as exc:
exc.pretty_print()
sys.exit(1)
@ -835,6 +811,8 @@ def update_project() -> None:
import os
from batools.project import ProjectUpdater
pcommand.disallow_in_batch()
check = '--check' in sys.argv
fix = '--fix' in sys.argv
@ -856,6 +834,8 @@ def cmake_prep_dir() -> None:
from efro.error import CleanError
import batools.build
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise CleanError('Expected 1 arg (dir name)')
dirname = sys.argv[2]
@ -869,11 +849,13 @@ def gen_binding_code() -> None:
from efro.error import CleanError
import batools.metabuild
pcommand.disallow_in_batch()
if len(sys.argv) != 4:
raise CleanError('Expected 2 args (srcfile, dstfile)')
inpath = sys.argv[2]
outpath = sys.argv[3]
batools.metabuild.gen_binding_code(str(PROJROOT), inpath, outpath)
batools.metabuild.gen_binding_code(str(pcommand.PROJROOT), inpath, outpath)
def gen_flat_data_code() -> None:
@ -881,13 +863,15 @@ def gen_flat_data_code() -> None:
from efro.error import CleanError
import batools.metabuild
pcommand.disallow_in_batch()
if len(sys.argv) != 5:
raise CleanError('Expected 3 args (srcfile, dstfile, varname)')
inpath = sys.argv[2]
outpath = sys.argv[3]
varname = sys.argv[4]
batools.metabuild.gen_flat_data_code(
str(PROJROOT), inpath, outpath, varname
str(pcommand.PROJROOT), inpath, outpath, varname
)
@ -895,24 +879,32 @@ def genchangelog() -> None:
"""Gen a pretty html changelog."""
from batools.changelog import generate
generate(projroot=str(PROJROOT))
pcommand.disallow_in_batch()
generate(projroot=str(pcommand.PROJROOT))
def android_sdk_utils() -> None:
"""Wrangle android sdk stuff."""
from batools.androidsdkutils import run
run(projroot=str(PROJROOT), args=sys.argv[2:])
pcommand.disallow_in_batch()
run(projroot=str(pcommand.PROJROOT), args=sys.argv[2:])
def gen_python_enums_module() -> None:
"""Update our procedurally generated python enums."""
from batools.pythonenumsmodule import generate
pcommand.disallow_in_batch()
if len(sys.argv) != 4:
raise RuntimeError('Expected infile and outfile args.')
generate(
projroot=str(PROJROOT), infilename=sys.argv[2], outfilename=sys.argv[3]
projroot=str(pcommand.PROJROOT),
infilename=sys.argv[2],
outfilename=sys.argv[3],
)
@ -921,14 +913,18 @@ def gen_dummy_modules() -> None:
from efro.error import CleanError
from batools.dummymodule import generate_dummy_modules
pcommand.disallow_in_batch()
if len(sys.argv) != 2:
raise CleanError(f'Expected no args; got {len(sys.argv)-2}.')
generate_dummy_modules(projroot=str(PROJROOT))
generate_dummy_modules(projroot=str(pcommand.PROJROOT))
def version() -> None:
"""Check app versions."""
from batools.version import run
run(projroot=str(PROJROOT), args=sys.argv[2:])
pcommand.disallow_in_batch()
run(projroot=str(pcommand.PROJROOT), args=sys.argv[2:])

View File

@ -7,7 +7,7 @@ from __future__ import annotations
# keep launch times fast for small snippets.
import sys
from efrotools.pcommand import PROJROOT
from efrotools import pcommand
def gen_monolithic_register_modules() -> None:
@ -18,11 +18,13 @@ def gen_monolithic_register_modules() -> None:
from efro.error import CleanError
from batools.featureset import FeatureSet
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise CleanError('Expected 1 arg.')
outpath = sys.argv[2]
featuresets = FeatureSet.get_all_for_project(str(PROJROOT))
featuresets = FeatureSet.get_all_for_project(str(pcommand.PROJROOT))
# Filter out ones without native modules.
featuresets = [f for f in featuresets if f.has_python_binary_module]
@ -124,6 +126,8 @@ def py_examine() -> None:
from pathlib import Path
import efrotools
pcommand.disallow_in_batch()
if len(sys.argv) != 7:
print('ERROR: expected 7 args')
sys.exit(255)
@ -134,7 +138,7 @@ def py_examine() -> None:
operation = sys.argv[6]
# This stuff assumes it is being run from project root.
os.chdir(PROJROOT)
os.chdir(pcommand.PROJROOT)
# Set up pypaths so our main distro stuff works.
scriptsdir = os.path.abspath(
@ -149,7 +153,9 @@ def py_examine() -> None:
sys.path.append(scriptsdir)
if toolsdir not in sys.path:
sys.path.append(toolsdir)
efrotools.py_examine(PROJROOT, filename, line, column, selection, operation)
efrotools.py_examine(
pcommand.PROJROOT, filename, line, column, selection, operation
)
def clean_orphaned_assets() -> None:
@ -158,8 +164,10 @@ def clean_orphaned_assets() -> None:
import json
import subprocess
pcommand.disallow_in_batch()
# Operate from dist root..
os.chdir(PROJROOT)
os.chdir(pcommand.PROJROOT)
# Our manifest is split into 2 files (public and private)
with open(
@ -191,6 +199,8 @@ def win_ci_install_prereqs() -> None:
import json
from efrotools.efrocache import get_target
pcommand.disallow_in_batch()
# We'll need to pull a handful of things out of efrocache for the
# build to succeed. Normally this would happen through our Makefile
# targets but we can't use them under raw window so we need to just
@ -227,6 +237,8 @@ def win_ci_binary_build() -> None:
"""Simple windows binary build for ci."""
import subprocess
pcommand.disallow_in_batch()
# Do the thing.
subprocess.run(
[
@ -249,6 +261,8 @@ def update_cmake_prefab_lib() -> None:
from efro.error import CleanError
import batools.build
pcommand.disallow_in_batch()
if len(sys.argv) != 5:
raise CleanError(
'Expected 3 args (standard/server, debug/release, build-dir)'
@ -293,6 +307,8 @@ def android_archive_unstripped_libs() -> None:
from efro.error import CleanError
from efro.terminal import Clr
pcommand.disallow_in_batch()
if len(sys.argv) != 4:
raise CleanError('Expected 2 args; src-dir and dst-dir')
src = Path(sys.argv[2])
@ -334,6 +350,8 @@ def spinoff_check_submodule_parent() -> None:
import os
from efro.error import CleanError
pcommand.disallow_in_batch()
# Make sure we're a spinoff dst project. The spinoff command will be
# a symlink if this is the case.
if not os.path.exists('tools/spinoff'):
@ -353,14 +371,21 @@ def spinoff_check_submodule_parent() -> None:
def gen_python_init_module() -> None:
"""Generate a basic __init__.py."""
import os
from efro.error import CleanError
from efro.terminal import Clr
from batools.project import project_centric_path
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise RuntimeError('Expected an outfile arg.')
raise CleanError('Expected an outfile arg.')
outfilename = sys.argv[2]
os.makedirs(os.path.dirname(outfilename), exist_ok=True)
prettypath = project_centric_path(projroot=str(PROJROOT), path=outfilename)
prettypath = project_centric_path(
projroot=str(pcommand.PROJROOT), path=outfilename
)
print(f'Meta-building {Clr.BLD}{prettypath}{Clr.RST}')
with open(outfilename, 'w', encoding='utf-8') as outfile:
outfile.write(
@ -379,7 +404,136 @@ def tests_warm_start() -> None:
"""
from batools import apprun
pcommand.disallow_in_batch()
# We do lots of apprun.python_command() within test. Pre-build the
# binary that they need to do their thing.
if not apprun.test_runs_disabled():
apprun.acquire_binary_for_python_command(purpose='running tests')
def wsl_build_check_win_drive() -> None:
"""Make sure we're building on a windows drive."""
import os
import subprocess
import textwrap
from efro.error import CleanError
pcommand.disallow_in_batch()
if (
subprocess.run(
['which', 'wslpath'], check=False, capture_output=True
).returncode
!= 0
):
raise CleanError(
'wslpath not found; you must run this from a WSL environment'
)
if os.environ.get('WSL_BUILD_CHECK_WIN_DRIVE_IGNORE') == '1':
return
# Get a windows path to the current dir.
path = (
subprocess.run(
['wslpath', '-w', '-a', os.getcwd()],
capture_output=True,
check=True,
)
.stdout.decode()
.strip()
)
# If we're sitting under the linux filesystem, our path
# will start with \\wsl$; fail in that case and explain why.
if not path.startswith('\\\\wsl$'):
return
def _wrap(txt: str) -> str:
return textwrap.fill(txt, 76)
raise CleanError(
'\n\n'.join(
[
_wrap(
'ERROR: This project appears to live'
' on the Linux filesystem.'
),
_wrap(
'Visual Studio compiles will error here for reasons related'
' to Linux filesystem case-sensitivity, and thus are'
' disallowed.'
' Clone the repo to a location that maps to a native'
' Windows drive such as \'/mnt/c/ballistica\''
' and try again.'
),
_wrap(
'Note that WSL2 filesystem performance'
' is poor when accessing'
' native Windows drives, so if Visual Studio builds are not'
' needed it may be best to keep things'
' on the Linux filesystem.'
' This behavior may differ under WSL1 (untested).'
),
_wrap(
'Set env-var WSL_BUILD_CHECK_WIN_DRIVE_IGNORE=1 to skip'
' this check.'
),
]
)
)
def wsl_path_to_win() -> None:
"""Forward escape slashes in a provided win path arg."""
import subprocess
import logging
import os
from efro.error import CleanError
pcommand.disallow_in_batch()
try:
create = False
escape = False
if len(sys.argv) < 3:
raise CleanError('Expected at least 1 path arg.')
wsl_path: str | None = None
for arg in sys.argv[2:]:
if arg == '--create':
create = True
elif arg == '--escape':
escape = True
else:
if wsl_path is not None:
raise CleanError('More than one path provided.')
wsl_path = arg
if wsl_path is None:
raise CleanError('No path provided.')
# wslpath fails on nonexistent paths; make it clear when that happens.
if create:
os.makedirs(wsl_path, exist_ok=True)
if not os.path.exists(wsl_path):
raise CleanError(f'Path \'{wsl_path}\' does not exist.')
results = subprocess.run(
['wslpath', '-w', '-a', wsl_path], capture_output=True, check=True
)
except Exception:
# This gets used in a makefile so our returncode is ignored;
# let's try to make our failure known in other ways.
logging.exception('wsl_to_escaped_win_path failed.')
print('wsl_to_escaped_win_path_error_occurred', end='')
return
out = results.stdout.decode().strip()
# If our input ended with a slash, match in the output.
if wsl_path.endswith('/') and not out.endswith('\\'):
out += '\\'
if escape:
out = out.replace('\\', '\\\\')
print(out, end='')

View File

@ -58,6 +58,51 @@ def spinoff_test(args: list[str]) -> None:
flush=True,
)
# Normally we spin the project off from where we currently
# are, but for cloud builds we may want to use a dedicated
# shared source instead. (since we need a git managed source
# we need to pull *something* fresh from git instead of just
# using the files that were synced up by cloudshell).
# Here we make sure that shared source is up to date.
spinoff_src = '.'
spinoff_path = path
if shared_test_parent:
spinoff_src = 'build/spinoff_shared_test_parent'
# Need an abs target path since we change cwd in this case.
spinoff_path = os.path.abspath(path)
if bool(False):
print('TEMP BLOWING AWAY')
subprocess.run(['rm', '-rf', spinoff_src], check=True)
if os.path.exists(spinoff_src):
print(
'Pulling latest spinoff_shared_test_parent...',
flush=True,
)
subprocess.run(
['git', 'pull', '--ff-only'],
check=True,
cwd=spinoff_src,
)
else:
os.makedirs(spinoff_src, exist_ok=True)
cmd = [
'git',
'clone',
'git@github.com:efroemling/ballistica-internal.git',
spinoff_src,
]
print(
f'{Clr.BLU}Creating spinoff shared test parent'
f" at '{spinoff_src}' with command {cmd}...{Clr.RST}"
)
subprocess.run(
cmd,
check=True,
)
# If the spinoff project already exists and is submodule-based,
# bring the submodule up to date.
if os.path.exists(path):
if bool(False):
subprocess.run(['rm', '-rf', path], check=True)
@ -73,51 +118,8 @@ def spinoff_test(args: list[str]) -> None:
shell=True,
check=True,
)
else:
# Normally we spin the project off from where we currently
# are, but for cloud builds we may want to use a dedicated
# shared source instead. (since we need a git managed source
# we need to pull something fresh from git instead of just
# using the files that were synced up by cloudshell).
spinoff_src = '.'
spinoff_path = path
if shared_test_parent:
spinoff_src = 'build/spinoff_shared_test_parent'
# Need an abs target path since we change cwd in this case.
spinoff_path = os.path.abspath(path)
if bool(False):
print('TEMP BLOWING AWAY')
subprocess.run(['rm', '-rf', spinoff_src], check=True)
if os.path.exists(spinoff_src):
print(
'Pulling latest spinoff_shared_test_parent...',
flush=True,
)
subprocess.run(
['git', 'pull', '--ff-only'],
check=True,
cwd=spinoff_src,
)
else:
os.makedirs(spinoff_src, exist_ok=True)
cmd = [
'git',
'clone',
'git@github.com:efroemling/ballistica-internal.git',
spinoff_src,
]
print(
f'{Clr.BLU}Creating spinoff shared test parent'
f" at '{spinoff_src}' with command {cmd}...{Clr.RST}"
)
subprocess.run(
cmd,
check=True,
)
# raise CleanError('SO FAR SO GOOD5')
# No spinoff project there yet; create it.
cmd = [
'./tools/spinoff',
'create',

View File

@ -15,23 +15,37 @@ from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
import threading
from typing import Any
# Absolute path of the project root.
PROJROOT = Path(__file__).resolve().parents[2]
# Set of arguments for the currently running command.
# Note that, unlike sys.argv, this will not include the script path or
# the name of the pcommand; only the arguments *to* the command.
_g_thread_local_storage: threading.local | None = None
# Discovered functions for the currently running pcommand instance.
_g_funcs: dict | None = None
# Are we running as a server?
_g_batch_server_mode: bool = False
def pcommand_main(globs: dict[str, Any]) -> None:
"""Run a snippet contained in the pcommand script.
"""Main entry point to pcommand scripts.
We simply look for all public functions and call
the one corresponding to the first passed arg.
"""
import types
from efro.error import CleanError
from efro.terminal import Clr
funcs = dict(
global _g_funcs # pylint: disable=global-statement
assert _g_funcs is None
# Build our list of available funcs.
_g_funcs = dict(
(
(name, obj)
for name, obj in globs.items()
@ -40,42 +54,97 @@ def pcommand_main(globs: dict[str, Any]) -> None:
and isinstance(obj, types.FunctionType)
)
)
show_help = False
# Call the one based on sys args.
sys.exit(_run_pcommand(sys.argv))
def get_args() -> list[str]:
"""Return the args for the current pcommand."""
# pylint: disable=unsubscriptable-object, not-an-iterable
if not _g_batch_server_mode:
return sys.argv[2:]
# Ok, we're in batch mode. We should have stuffed some args into
# thread-local storage.
assert _g_thread_local_storage is not None
argv: list[str] | None = getattr(_g_thread_local_storage, 'argv', None)
if argv is None:
raise RuntimeError('Thread local args not found where expected.')
assert isinstance(argv, list)
assert all(isinstance(i, str) for i in argv)
return argv[2:]
def set_output(output: str, newline: bool = True) -> None:
"""Set an output string for the current pcommand.
This will be printed to stdout on the client even in batch mode.
"""
if newline:
output = f'{output}\n'
if not _g_batch_server_mode:
print(output, end='')
return
# Ok, we're in batch mode. Stuff this into thread-local storage to
# be returned once we're done.
assert _g_thread_local_storage is not None
if hasattr(_g_thread_local_storage, 'output'):
raise RuntimeError('Output is already set for this pcommand.')
_g_thread_local_storage.output = output
def _run_pcommand(sysargv: list[str]) -> int:
"""Do the thing."""
from efro.error import CleanError
from efro.terminal import Clr
assert _g_funcs is not None
# If we're in batch mode, stuff these args into our thread-local
# storage.
if _g_batch_server_mode:
assert _g_thread_local_storage is not None
_g_thread_local_storage.argv = sysargv
retval = 0
if len(sys.argv) < 2:
show_help = False
if len(sysargv) < 2:
print(f'{Clr.RED}ERROR: command expected.{Clr.RST}')
show_help = True
retval = 255
retval = 1
else:
if sys.argv[1] == 'help':
if len(sys.argv) == 2:
if sysargv[1] == 'help':
if len(sysargv) == 2:
show_help = True
elif sys.argv[2] not in funcs:
elif sysargv[2] not in _g_funcs:
print('Invalid help command.')
retval = 255
retval = 1
else:
docs = _trim_docstring(
getattr(funcs[sys.argv[2]], '__doc__', '<no docs>')
getattr(_g_funcs[sysargv[2]], '__doc__', '<no docs>')
)
print(
f'\n{Clr.MAG}{Clr.BLD}pcommand {sys.argv[2]}:{Clr.RST}\n'
f'\n{Clr.MAG}{Clr.BLD}pcommand {sysargv[2]}:{Clr.RST}\n'
f'{Clr.MAG}{docs}{Clr.RST}\n'
)
elif sys.argv[1] in funcs:
elif sysargv[1] in _g_funcs:
try:
funcs[sys.argv[1]]()
_g_funcs[sysargv[1]]()
except KeyboardInterrupt as exc:
print(f'{Clr.RED}{exc}{Clr.RST}')
sys.exit(1)
retval = 1
except CleanError as exc:
exc.pretty_print()
sys.exit(1)
retval = 1
else:
print(
f'{Clr.RED}Unknown pcommand: "{sys.argv[1]}"{Clr.RST}',
f'{Clr.RED}Unknown pcommand: "{sysargv[1]}"{Clr.RST}',
file=sys.stderr,
)
retval = 255
retval = 1
if show_help:
print(
@ -91,10 +160,76 @@ def pcommand_main(globs: dict[str, Any]) -> None:
f'{Clr.RST} for full documentation for a command.'
)
print('Available commands:')
for func, obj in sorted(funcs.items()):
for func, obj in sorted(_g_funcs.items()):
doc = getattr(obj, '__doc__', '').splitlines()[0].strip()
print(f'{Clr.MAG}{func}{Clr.BLU} - {doc}{Clr.RST}')
sys.exit(retval)
return retval
def enter_batch_server_mode() -> None:
"""Called by pcommandserver when we start serving."""
# (try to avoid importing this in non-batch mode in case it shaves
# off a bit of time)
import threading
# pylint: disable=global-statement
global _g_batch_server_mode, _g_thread_local_storage
assert not _g_batch_server_mode
_g_batch_server_mode = True
# Spin up our thread-local storage.
assert _g_thread_local_storage is None
_g_thread_local_storage = threading.local()
def is_batch() -> bool:
"""Is the current pcommand running under a batch server?
Commands that do things that are unsafe to do in server mode
such as chdir should assert that this is not true.
"""
return _g_batch_server_mode
def run_client_pcommand(args: list[str], log_path: str) -> tuple[int, str]:
"""Call a pcommand function when running as a batch server."""
assert _g_batch_server_mode
assert _g_thread_local_storage is not None
# Clear any output from previous commands on this thread.
if hasattr(_g_thread_local_storage, 'output'):
delattr(_g_thread_local_storage, 'output')
if hasattr(_g_thread_local_storage, 'output'):
delattr(_g_thread_local_storage, 'output')
# Run the command.
resultcode: int = _run_pcommand(args)
# Return the result code and any output the command provided.
output = getattr(_g_thread_local_storage, 'output', '')
if resultcode != 0:
if output:
output += '\n'
output += (
f'Error: pcommandbatch command failed: {args}.'
f" See '{log_path}' for more info.\n"
)
assert isinstance(output, str)
return (resultcode, output)
def disallow_in_batch() -> None:
"""Utility call to raise a clean error if running under batch mode."""
from efro.error import CleanError
if _g_batch_server_mode:
raise CleanError(
'This pcommand does not support batch mode.\n'
'See docs in efrotools.pcommand if you want to add it.'
)
def _trim_docstring(docstring: str) -> str:
@ -130,707 +265,3 @@ def _trim_docstring(docstring: str) -> str:
# Return a single string.
return '\n'.join(trimmed)
def _spelling(words: list[str]) -> None:
from efrotools.code import sort_jetbrains_dict
import os
num_modded_dictionaries = 0
for fname in [
'.idea/dictionaries/ericf.xml',
'ballisticakit-cmake/.idea/dictionaries/ericf.xml',
]:
if not os.path.exists(fname):
continue
with open(fname, encoding='utf-8') as infile:
lines = infile.read().splitlines()
if lines[2] != ' <words>':
raise RuntimeError('Unexpected dictionary format.')
added_count = 0
for word in words:
line = f' <w>{word.lower()}</w>'
if line not in lines:
lines.insert(3, line)
added_count += 1
with open(fname, 'w', encoding='utf-8') as outfile:
outfile.write(sort_jetbrains_dict('\n'.join(lines)))
print(f'Added {added_count} words to {fname}.')
num_modded_dictionaries += 1
print(f'Modified {num_modded_dictionaries} dictionaries.')
def pur() -> None:
"""Run pur using project's Python version."""
import subprocess
subprocess.run([sys.executable, '-m', 'pur'] + sys.argv[2:], check=True)
def spelling_all() -> None:
"""Add all misspellings from a pycharm run."""
import subprocess
print('Running "make pycharm-full"...')
lines = [
line
for line in subprocess.run(
['make', 'pycharm-full'], check=False, capture_output=True
)
.stdout.decode()
.splitlines()
if 'Typo: In word' in line
]
words = [line.split('Typo: In word')[1].strip() for line in lines]
# Strip enclosing quotes but not internal ones.
for i, word in enumerate(words):
assert word[0] == "'"
assert word[-1] == "'"
words[i] = word[1:-1]
_spelling(words)
def spelling() -> None:
"""Add words to the PyCharm dictionary."""
_spelling(sys.argv[2:])
def xcodebuild() -> None:
"""Run xcodebuild with added smarts."""
from efrotools.xcodebuild import XCodeBuild
XCodeBuild(projroot=str(PROJROOT), args=sys.argv[2:]).run()
def xcoderun() -> None:
"""Run an xcode build in the terminal."""
import os
import subprocess
from efro.error import CleanError
from efrotools.xcodebuild import project_build_path
if len(sys.argv) != 5:
raise CleanError(
'Expected 3 args: <xcode project path> <configuration name>'
)
project_path = os.path.abspath(sys.argv[2])
scheme = sys.argv[3]
configuration = sys.argv[4]
path = project_build_path(
projroot=str(PROJROOT),
project_path=project_path,
scheme=scheme,
configuration=configuration,
)
subprocess.run(path, check=True)
def pyver() -> None:
"""Prints the Python version used by this project."""
from efrotools import PYVER
print(PYVER, end='')
def try_repeat() -> None:
"""Run a command with repeat attempts on failure.
First arg is the number of retries; remaining args are the command.
"""
import subprocess
from efro.error import CleanError
# We require one number arg and at least one command arg.
if len(sys.argv) < 4:
raise CleanError(
'Expected a retry-count arg and at least one command arg'
)
try:
repeats = int(sys.argv[2])
except Exception:
raise CleanError('Expected int as first arg') from None
if repeats < 0:
raise CleanError('Retries must be >= 0')
cmd = sys.argv[3:]
for i in range(repeats + 1):
result = subprocess.run(cmd, check=False)
if result.returncode == 0:
return
print(
f'try_repeat attempt {i + 1} of {repeats + 1} failed for {cmd}.',
file=sys.stderr,
flush=True,
)
raise CleanError(f'Command failed {repeats + 1} time(s): {cmd}')
def check_clean_safety() -> None:
"""Ensure all files are are added to git or in gitignore.
Use to avoid losing work if we accidentally do a clean without
adding something.
"""
import os
import subprocess
from efro.error import CleanError
if len(sys.argv) != 2:
raise CleanError('invalid arguments')
# Make sure we wouldn't be deleting anything not tracked by git
# or ignored.
output = subprocess.check_output(
['git', 'status', '--porcelain=v2']
).decode()
if any(line.startswith('?') for line in output.splitlines()):
raise CleanError(
'Untracked file(s) found; aborting.'
' (see "git status" from "'
+ os.getcwd()
+ '") Either \'git add\' them, add them to .gitignore,'
' or remove them and try again.'
)
def gen_empty_py_init() -> None:
"""Generate an empty __init__.py for a package dir.
Used as part of meta builds.
"""
from efro.terminal import Clr
from efro.error import CleanError
if len(sys.argv) != 3:
raise CleanError('Expected a single path arg.')
outpath = Path(sys.argv[2])
outpath.parent.mkdir(parents=True, exist_ok=True)
print(f'Meta-building {Clr.BLD}{outpath}{Clr.RST}')
with open(outpath, 'w', encoding='utf-8') as outfile:
outfile.write('# This file is autogenerated; do not hand-edit.\n')
def formatcode() -> None:
"""Format all of our C/C++/etc. code."""
import efrotools.code
full = '-full' in sys.argv
efrotools.code.format_project_cpp_files(PROJROOT, full)
def formatscripts() -> None:
"""Format all of our Python/etc. code."""
import efrotools.code
full = '-full' in sys.argv
efrotools.code.format_project_python_files(PROJROOT, full)
def formatmakefile() -> None:
"""Format the main makefile."""
from efrotools.makefile import Makefile
with open('Makefile', encoding='utf-8') as infile:
original = infile.read()
formatted = Makefile(original).get_output()
# Only write if it changed.
if formatted != original:
with open('Makefile', 'w', encoding='utf-8') as outfile:
outfile.write(formatted)
def cpplint() -> None:
"""Run lint-checking on all code deemed lint-able."""
import efrotools.code
full = '-full' in sys.argv
efrotools.code.check_cpplint(PROJROOT, full)
def scriptfiles() -> None:
"""List project script files.
Pass -lines to use newlines as separators. The default is spaces.
"""
import efrotools.code
paths = efrotools.code.get_script_filenames(projroot=PROJROOT)
assert not any(' ' in path for path in paths)
if '-lines' in sys.argv:
print('\n'.join(paths))
else:
print(' '.join(paths))
def pylint() -> None:
"""Run pylint checks on our scripts."""
import efrotools.code
full = '-full' in sys.argv
fast = '-fast' in sys.argv
efrotools.code.pylint(PROJROOT, full, fast)
def pylint_files() -> None:
"""Run pylint checks on provided filenames."""
from efro.terminal import Clr
from efro.error import CleanError
import efrotools.code
if len(sys.argv) < 3:
raise CleanError('Expected at least 1 filename arg.')
filenames = sys.argv[2:]
efrotools.code.runpylint(PROJROOT, filenames)
print(f'{Clr.GRN}Pylint Passed.{Clr.RST}')
def mypy() -> None:
"""Run mypy checks on our scripts."""
import efrotools.code
full = '-full' in sys.argv
efrotools.code.mypy(PROJROOT, full)
def mypy_files() -> None:
"""Run mypy checks on provided filenames."""
from efro.terminal import Clr
from efro.error import CleanError
import efrotools.code
if len(sys.argv) < 3:
raise CleanError('Expected at least 1 filename arg.')
filenames = sys.argv[2:]
try:
efrotools.code.mypy_files(PROJROOT, filenames)
print(f'{Clr.GRN}Mypy Passed.{Clr.RST}')
except Exception as exc:
raise CleanError('Mypy Failed.') from exc
def dmypy() -> None:
"""Run mypy checks on our scripts using the mypy daemon."""
import efrotools.code
efrotools.code.dmypy(PROJROOT)
def pycharm() -> None:
"""Run PyCharm checks on our scripts."""
import efrotools.code
full = '-full' in sys.argv
verbose = '-v' in sys.argv
efrotools.code.check_pycharm(PROJROOT, full, verbose)
def clioncode() -> None:
"""Run CLion checks on our code."""
import efrotools.code
full = '-full' in sys.argv
verbose = '-v' in sys.argv
efrotools.code.check_clioncode(PROJROOT, full, verbose)
def androidstudiocode() -> None:
"""Run Android Studio checks on our code."""
import efrotools.code
full = '-full' in sys.argv
verbose = '-v' in sys.argv
efrotools.code.check_android_studio(PROJROOT, full, verbose)
def tool_config_install() -> None:
"""Install a tool config file (with some filtering)."""
from efro.error import CleanError
import efrotools.toolconfig
if len(sys.argv) != 4:
raise CleanError('expected 2 args')
src = Path(sys.argv[2])
dst = Path(sys.argv[3])
efrotools.toolconfig.install_tool_config(PROJROOT, src, dst)
def sync_all() -> None:
"""Runs full syncs between all efrotools projects.
This list is defined in the EFROTOOLS_SYNC_PROJECTS env var.
This assumes that there is a 'sync-full' and 'sync-list' Makefile target
under each project.
"""
import os
import subprocess
import concurrent.futures
from efro.error import CleanError
from efro.terminal import Clr
print(f'{Clr.BLD}Updating formatting for all projects...{Clr.RST}')
projects_str = os.environ.get('EFROTOOLS_SYNC_PROJECTS')
if projects_str is None:
raise CleanError('EFROTOOL_SYNC_PROJECTS is not defined.')
projects = projects_str.split(':')
def _format_project(fproject: str) -> None:
fcmd = f'cd "{fproject}" && make format'
# print(fcmd)
subprocess.run(fcmd, shell=True, check=True)
# No matter what we're doing (even if just listing), run formatting
# in all projects before beginning. Otherwise if we do a sync and then
# a preflight we'll often wind up getting out-of-sync errors due to
# formatting changing after the sync.
with concurrent.futures.ThreadPoolExecutor(
max_workers=len(projects)
) as executor:
# Converting this to a list will propagate any errors.
list(executor.map(_format_project, projects))
if len(sys.argv) > 2 and sys.argv[2] == 'list':
# List mode
for project in projects_str.split(':'):
cmd = f'cd "{project}" && make sync-list'
print(cmd)
subprocess.run(cmd, shell=True, check=True)
else:
# Real mode
for i in range(2):
if i == 0:
print(
f'{Clr.BLD}Running sync pass 1'
f' (ensures all changes at dsts are pushed to src):'
f'{Clr.RST}'
)
else:
print(
f'{Clr.BLD}Running sync pass 2'
f' (ensures latest src is pulled to all dsts):{Clr.RST}'
)
for project in projects_str.split(':'):
cmd = f'cd "{project}" && make sync-full'
subprocess.run(cmd, shell=True, check=True)
print(Clr.BLD + 'Sync-all successful!' + Clr.RST)
def sync() -> None:
"""Runs standard syncs between this project and others."""
from efrotools import getprojectconfig
from efrotools.sync import Mode, SyncItem, run_standard_syncs
mode = Mode(sys.argv[2]) if len(sys.argv) > 2 else Mode.PULL
# Load sync-items from project config and run them
sync_items = [
SyncItem(**i) for i in getprojectconfig(PROJROOT).get('sync_items', [])
]
run_standard_syncs(PROJROOT, mode, sync_items)
def compile_python_files() -> None:
"""Compile pyc files for packaging.
This creates hash-based PYC files in opt level 1 with hash checks
defaulting to off, so we don't have to worry about timestamps or
loading speed hits due to hash checks. (see PEP 552).
We just need to tell modders that they'll need to clear these
cache files out or turn on debugging mode if they want to tweak
the built-in scripts directly (or go through the asset build system which
properly recreates the .pyc files).
"""
import py_compile
for arg in sys.argv[2:]:
mode = py_compile.PycInvalidationMode.UNCHECKED_HASH
py_compile.compile(
arg,
# dfile=os.path.basename(arg),
doraise=True,
optimize=1,
invalidation_mode=mode,
)
def pytest() -> None:
"""Run pytest with project environment set up properly."""
import os
import platform
import subprocess
from efrotools import getprojectconfig, PYTHON_BIN
from efro.error import CleanError
# Grab our python paths for the project and stuff them in PYTHONPATH.
pypaths = getprojectconfig(PROJROOT).get('python_paths')
if pypaths is None:
raise CleanError('python_paths not found in project config.')
separator = ';' if platform.system() == 'Windows' else ':'
os.environ['PYTHONPATH'] = separator.join(pypaths)
# Also tell Python interpreters not to write __pycache__ dirs everywhere
# which can screw up our builds.
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
# Let's flip on dev mode to hopefully be informed on more bad stuff
# happening. https://docs.python.org/3/library/devmode.html
os.environ['PYTHONDEVMODE'] = '1'
# Do the thing.
results = subprocess.run(
[PYTHON_BIN, '-m', 'pytest'] + sys.argv[2:], check=False
)
if results.returncode != 0:
sys.exit(results.returncode)
def makefile_target_list() -> None:
"""Prints targets in a makefile.
Takes a single argument: a path to a Makefile.
"""
from dataclasses import dataclass
from efro.error import CleanError
from efro.terminal import Clr
@dataclass
class _Entry:
kind: str
line: int
title: str
if len(sys.argv) != 3:
raise CleanError('Expected exactly one filename arg.')
with open(sys.argv[2], encoding='utf-8') as infile:
lines = infile.readlines()
def _docstr(lines2: list[str], linenum: int) -> str:
doc = ''
j = linenum - 1
while j >= 0 and lines2[j].startswith('#'):
doc = lines2[j][1:].strip()
j -= 1
if doc != '':
return ' - ' + doc
return doc
print(
'----------------------\n'
'Available Make Targets\n'
'----------------------'
)
entries: list[_Entry] = []
for i, line in enumerate(lines):
# Targets.
if (
':' in line
and line.split(':')[0].replace('-', '').replace('_', '').isalnum()
and not line.startswith('_')
):
entries.append(
_Entry(kind='target', line=i, title=line.split(':')[0])
)
# Section titles.
if (
line.startswith('# ')
and line.endswith(' #\n')
and len(line.split()) > 2
):
entries.append(
_Entry(kind='section', line=i, title=line[1:-2].strip())
)
for i, entry in enumerate(entries):
if entry.kind == 'section':
# Don't print headers for empty sections.
if i + 1 >= len(entries) or entries[i + 1].kind == 'section':
continue
print('\n' + entry.title + '\n' + '-' * len(entry.title))
elif entry.kind == 'target':
print(
Clr.MAG
+ entry.title
+ Clr.BLU
+ _docstr(lines, entry.line)
+ Clr.RST
)
def echo() -> None:
"""Echo with support for efro.terminal.Clr args (RED, GRN, BLU, etc).
Prints a Clr.RST at the end so that can be omitted.
"""
from efro.terminal import Clr
clrnames = {n for n in dir(Clr) if n.isupper() and not n.startswith('_')}
first = True
out: list[str] = []
for arg in sys.argv[2:]:
if arg in clrnames:
out.append(getattr(Clr, arg))
else:
if not first:
out.append(' ')
first = False
out.append(arg)
out.append(Clr.RST)
print(''.join(out))
def urandom_pretty() -> None:
"""Spits out urandom bytes formatted for source files."""
# Note; this is not especially efficient. It should probably be rewritten
# if ever needed in a performance-sensitive context.
import os
from efro.error import CleanError
if len(sys.argv) not in (3, 4):
raise CleanError(
'Expected one arg (count) and possibly two (line len).'
)
size = int(sys.argv[2])
linemax = 72 if len(sys.argv) < 4 else int(sys.argv[3])
val = os.urandom(size)
lines: list[str] = []
line = b''
for i in range(len(val)):
char = val[i : i + 1]
thislinelen = len(repr(line + char))
if thislinelen > linemax:
lines.append(repr(line))
line = b''
line += char
if line:
lines.append(repr(line))
bstr = '\n'.join(str(l) for l in lines)
print(f'({bstr})')
def tweak_empty_py_files() -> None:
"""Find any zero-length Python files and make them length 1."""
from efro.error import CleanError
import efrotools.pybuild
if len(sys.argv) != 3:
raise CleanError('Expected exactly 1 path arg.')
efrotools.pybuild.tweak_empty_py_files(sys.argv[2])
def make_ensure() -> None:
"""Make sure a makefile target is up-to-date.
This can technically be done by simply `make --question`, but this
has some extra bells and whistles such as printing some of the commands
that would run.
Can be useful to run after cloud-builds to ensure the local results
consider themselves up-to-date.
"""
# pylint: disable=too-many-locals
from efro.error import CleanError
from efro.terminal import Clr
import subprocess
dirpath: str | None = None
args = sys.argv[2:]
if '--dir' in args:
argindex = args.index('--dir')
dirpath = args[argindex + 1]
del args[argindex : argindex + 2]
if len(args) not in (0, 1):
raise CleanError('Expected zero or one target args.')
target = args[0] if args else None
cmd = ['make', '--no-print-directory', '--dry-run']
if target is not None:
cmd.append(target)
results = subprocess.run(cmd, check=False, capture_output=True, cwd=dirpath)
out = results.stdout.decode()
err = results.stderr.decode()
if results.returncode != 0:
print(f'Failed command stdout:\n{out}\nFailed command stderr:\n{err}')
raise CleanError(f"Command failed during make_ensure: '{cmd}'.")
targetname: str = '<default>' if target is None else target
lines = out.splitlines()
in_str = '' if dirpath is None else f"in directory '{dirpath}' "
if len(lines) == 1 and 'Nothing to be done for ' in lines[0]:
print(f"make_ensure: '{targetname}' target {in_str}is up to date.")
else:
maxlines = 20
if len(lines) > maxlines:
outlines = '\n'.join(
lines[:maxlines] + [f'(plus {len(lines)-maxlines} more lines)']
)
else:
outlines = '\n'.join(lines)
print(
f"make_ensure: '{targetname}' target {in_str}"
f'is out of date; would run:\n\n'
'-------------------------- MAKE-ENSURE COMMANDS BEGIN '
f'--------------------------\n{Clr.YLW}'
f'{outlines}{Clr.RST}\n'
'--------------------------- MAKE-ENSURE COMMANDS END '
'---------------------------\n'
)
raise CleanError(
f"make_ensure: '{targetname}' target {in_str}is out of date."
)
def make_target_debug() -> None:
"""Debug makefile src/target mod times given src and dst path.
Built to debug stubborn Makefile targets that insist on being
rebuilt just after being built via a cloud target.
"""
import os
import datetime
from efro.error import CleanError
# from efro.util import ago_str, utc_now
args = sys.argv[2:]
if len(args) != 2:
raise CleanError('Expected 2 args.')
def _utc_mod_time(path: str) -> datetime.datetime:
mtime = os.path.getmtime(path)
mdtime = datetime.datetime.fromtimestamp(mtime, datetime.timezone.utc)
# mdtime.replace(tzinfo=datetime.timezone.utc)
return mdtime
# srcname = os.path.basename(args[0])
# dstname = os.path.basename(args[1])
srctime = _utc_mod_time(args[0])
dsttime = _utc_mod_time(args[1])
# now = utc_now()
# src_ago = ago_str(srctime, maxparts=3, decimals=2, now=now)
# dst_ago = ago_str(dsttime, maxparts=3, decimals=2, now=now)
srctimestr = (
f'{srctime.hour}:{srctime.minute}:{srctime.second}:'
f'{srctime.microsecond}'
)
dsttimestr = (
f'{dsttime.hour}:{dsttime.minute}:{dsttime.second}:'
f'{dsttime.microsecond}'
)
print(f'SRC modified at {srctimestr}.')
print(f'DST modified at {dsttimestr}.')

View File

@ -1,86 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Standard snippets that can be pulled into project pcommand scripts.
A snippet is a mini-program that directly takes input from stdin and does
some focused task. This module is a repository of common snippets that can
be imported into projects' pcommand script for easy reuse.
"""
from __future__ import annotations
import sys
from typing import TYPE_CHECKING
if TYPE_CHECKING:
pass
def with_build_lock() -> None:
"""Run a shell command wrapped in a build-lock."""
from efro.error import CleanError
from efrotools.buildlock import BuildLock
import subprocess
args = sys.argv[2:]
if len(args) < 2:
raise CleanError(
'Expected one lock-name arg and at least one command arg'
)
with BuildLock(args[0]):
subprocess.run(' '.join(args[1:]), check=True, shell=True)
def sortlines() -> None:
"""Sort provided lines. For tidying import lists, etc."""
from efro.error import CleanError
if len(sys.argv) != 3:
raise CleanError('Expected 1 arg.')
val = sys.argv[2]
lines = val.splitlines()
print('\n'.join(sorted(lines, key=lambda l: l.lower())))
def openal_build_android() -> None:
"""Build openalsoft for android."""
from efro.error import CleanError
from efrotools.openalbuild import build
args = sys.argv[2:]
if len(args) != 2:
raise CleanError(
'Expected one <ARCH> arg: arm, arm64, x86, x86_64'
' and one <MODE> arg: debug, release'
)
build(args[0], args[1])
def openal_gather() -> None:
"""Gather built opealsoft libs into src."""
from efro.error import CleanError
from efrotools.openalbuild import gather
args = sys.argv[2:]
if args:
raise CleanError('No args expected.')
gather()
def pyright() -> None:
"""Run Pyright checks on project Python code."""
import subprocess
from efro.terminal import Clr
from efro.error import CleanError
print(f'{Clr.BLU}Running Pyright (experimental)...{Clr.RST}')
try:
subprocess.run(
['pyright', '--project', '.pyrightconfig.json'], check=True
)
except Exception as exc:
raise CleanError('Pyright failed.') from exc

View File

@ -0,0 +1,450 @@
# Released under the MIT License. See LICENSE for details.
#
"""Wrangles pcommandbatch; an efficient way to run small pcommands.
The whole purpose of pcommand is to be a lightweight way to run small
snippets of Python to do bits of work in a project. The pcommand script
tries to minimize imports and work done in order to keep runtime as
short as possible. However, even an 'empty' pcommand still takes a
fraction of a second due to the time needed to spin up Python and import
a minimal set of modules. This can add up for large builds where
hundreds or thousands of pcommands are being run.
To help fight that problem, pcommandbatch introduces a way to run
pcommands by submitting requests to a temporary local server daemon.
This allows individual pcommand calls to go through a very lightweight
client binary that simply forwards the command to a running server.
This cuts minimal client runtime down to nearly zero. Building and
managing the server and client are handled automatically, and systems
which are unable to compile a client binary can fall back to using
vanilla pcommand in those cases.
A few considerations must be made when writing pcommands that work
in batch mode. By default, all existing pcommands have been fitted with
a disallow_in_batch() call which triggers an error under batch mode.
These calls should be removed if/when each call is updated to work
cleanly in batch mode. Requirements for batch-friendly pcommands follow:
- Batch mode runs parallel pcommands in different background threads.
Commands must be ok with that.
- Batch-enabled pcommands must not look at sys.argv. They should instead
use pcommand.get_args(). Be aware that this value does not include
the first two values from sys.argv (executable path and pcommand name)
so is generally cleaner to use anyway. Also be aware that args are
thread-local, so only call get_args() from the thread your pcommand
was called in.
- Batch-enabled pcommands should not call os.chdir() or sys.exit() or
anything else having global effects. This should be self-explanatory
considering the shared server model in use.
- Standard print and log calls will wind up in the pcommandbatch server
log and will not be seen by the user or capturable by the calling
process. By default, only a return code is passed back to the client,
where an error messages instructs the user to refer to the log or run
again with batch mode disabled. Commands that should print some output
even in batch mode can use pcommand.set_output() to do so. Note that
this is currently limited to small bits of output (but that can be
changed).
"""
from __future__ import annotations
import os
import sys
import time
import json
import asyncio
import tempfile
import subprocess
from typing import TYPE_CHECKING
import filelock
from efro.error import CleanError
from efro.terminal import Clr
if TYPE_CHECKING:
pass
# Enable debug-mode, in which server commands are *not* spun off into
# daemons. This means some commands will block waiting for background
# servers they launched to exit, but it can make everything easier to
# debug as a whole since all client and server output will go a single
# terminal.
DEBUG = os.environ.get('BA_PCOMMANDBATCH_DEBUG', 0) == '1'
# Enable extra logging during server runs/etc. Debug mode implicitly
# enables this as well.
VERBOSE = DEBUG or os.environ.get('BA_PCOMMANDBATCH_VERBOSE', 0) == '1'
def build_pcommandbatch(inpaths: list[str], outpath: str) -> None:
"""Create the binary or link regular pcommand."""
# Make an quiet attempt to build a batch binary, but just symlink
# the plain old pcommand if anything goes wrong. That should work in
# all cases; it'll just be slower.
try:
# TEMP - clean up old path (our dir used to be just a binary).
if os.path.isfile(os.path.dirname(outpath)):
os.remove(os.path.dirname(outpath))
if os.path.islink(outpath):
os.unlink(outpath)
os.makedirs(os.path.dirname(outpath), exist_ok=True)
# Note: this is kinda a project-specific path; perhaps we'd
# want to specify this in project-config?
subprocess.run(
['cc'] + inpaths + ['-o', outpath],
check=True,
capture_output=os.environ.get('BA_PCOMMANDBATCH_BUILD_VERBOSE')
!= '1',
)
except Exception:
print(
f'{Clr.YLW}Warning: Unable to build pcommandbatch executable;'
f' falling back to regular pcommand. Build with env var'
f' BA_PCOMMANDBATCH_BUILD_VERBOSE=1 to see what went wrong.'
f'{Clr.RST}',
file=sys.stderr,
)
subprocess.run(
['ln', '-sf', '../../tools/pcommand', outpath], check=True
)
def run_pcommandbatch_server(
idle_timeout_secs: int, state_dir: str, instance: str
) -> None:
"""Run a server for handling batches of pcommands.
If a matching instance is already running, is a no-op.
"""
import daemon
# Be aware that when running without daemons, various build commands
# will block waiting for the server processes that they spawned to
# exit. It can be worth it to debug things with everything spitting
# output to the same terminal though.
use_daemon = not DEBUG
# Our stdout/stderr should already be directed to a file so we can
# just keep the existing ones.
server = Server(
idle_timeout_secs=idle_timeout_secs,
state_dir=state_dir,
instance=instance,
daemon=use_daemon,
)
if use_daemon:
with daemon.DaemonContext(
working_directory=os.getcwd(), stdout=sys.stdout, stderr=sys.stderr
):
server.run()
else:
server.run()
class IdleError(RuntimeError):
"""Error we raise to quit peacefully."""
class Server:
"""A server that handles requests from pcommandbatch clients."""
def __init__(
self,
idle_timeout_secs: int,
state_dir: str,
instance: str,
daemon: bool,
) -> None:
self._daemon = daemon
self._state_dir = state_dir
self._idle_timeout_secs = idle_timeout_secs
self._worker_state_file_path = f'{state_dir}/worker_state_{instance}'
self._worker_log_file_path = f'{self._state_dir}/worker_log_{instance}'
self._client_count_since_last_check = 0
self._running_client_count = 0
self._port: int | None = None
self._pid = os.getpid()
self._next_request_id = 0
self._instance = instance
self._spinup_lock_path = f'{self._state_dir}/lock'
self._spinup_lock = filelock.FileLock(self._spinup_lock_path)
self._server_error: str | None = None
# def __del__(self) -> None:
# if self._spinup_lock.is_locked:
# self._spinup_lock.release()
# pass
def run(self) -> None:
"""Do the thing."""
try:
self._spinup_lock.acquire(timeout=10)
if VERBOSE:
print(
f'pcommandbatch server {self._instance}'
f' (pid {os.getpid()}) acquired spinup-lock'
f' at time {time.time():.3f}.',
file=sys.stderr,
)
except filelock.Timeout:
# Attempt to error and inform clients if we weren't able to
# acquire the file-lock. Unfortunately I can't really test this
# case because file-lock releases itself in its destructor.
if VERBOSE:
print(
f'pcommandbatch server {self._instance}'
f' (pid {os.getpid()}) timed out acquiring spinup-lock'
f' at time {time.time():.3f}; this should not happen.',
file=sys.stderr,
)
self._server_error = (
f'Error: pcommandbatch unable to acquire file-lock at'
f' {self._spinup_lock_path}. Something is probably broken.'
)
# In daemon mode we get multiple processes dumping to the same
# instance log file. We want to try and clear the log whenever a
# new batch run starts so it doesn't grow infinitely. So let's
# have any holder of the spinup lock (including aborted spinups)
# truncate it if it appears to have been idle long enough to
# have shut down.
if self._daemon:
try:
existing_log_age = int(
time.time() - os.path.getmtime(self._worker_log_file_path)
)
if existing_log_age > self._idle_timeout_secs:
sys.stderr.truncate(0)
except FileNotFoundError:
pass
# If there's an existing file younger than idle-seconds,
# consider it still valid and abort our creation.
try:
existing_age = int(
time.time() - os.path.getmtime(self._worker_state_file_path)
)
if existing_age <= self._idle_timeout_secs:
if VERBOSE:
print(
f'pcommandbatch server {self._instance}'
f' (pid {os.getpid()}) found existing batch'
f' server (age {existing_age})'
f' at time {time.time():.3f};'
f' aborting run...',
file=sys.stderr,
)
return
except FileNotFoundError:
# No state; no problem. Keep spinning up ours.
if VERBOSE:
print(
f'pcommandbatch server {self._instance}'
f' (pid {os.getpid()})'
f' found no existing batch server at time'
f' {time.time():.3f};'
f' proceeding with run...',
file=sys.stderr,
)
asyncio.run(self._run())
async def _run(self) -> None:
"""Do the thing."""
import efrotools.pcommand
# Tell the running pcommand that we're the captain now.
efrotools.pcommand.enter_batch_server_mode()
server = await asyncio.start_server(self._handle_client, '127.0.0.1', 0)
self._port = server.sockets[0].getsockname()[1]
print(
f'pcommandbatch server {self._instance} (pid {self._pid})'
f' running on port {self._port} at time {time.time():.3f}...',
file=sys.stderr,
)
# Write our first state and then unlock the spinup lock. New
# spinup attempts will now see that we're here and back off.
self._update_worker_state_file(-1)
if self._spinup_lock.is_locked:
self._spinup_lock.release()
# Now run until our upkeep task kills us.
try:
await asyncio.gather(
asyncio.create_task(
self._upkeep_task(), name='pcommandbatch upkeep'
),
server.serve_forever(),
)
except IdleError:
pass
print(
f'pcommandbatch server {self._instance} (pid {self._pid})'
f' exiting at time {time.time():.3f}.',
file=sys.stderr,
)
async def _handle_client(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
) -> None:
"""Handle a client."""
from efrotools.pcommand import run_client_pcommand
request_id = self._next_request_id
self._next_request_id += 1
self._client_count_since_last_check += 1
self._running_client_count += 1
try:
argv: list[str] = json.loads((await reader.read()).decode())
assert isinstance(argv, list)
assert all(isinstance(i, str) for i in argv)
print(
f'pcommandbatch server {self._instance} (pid {self._pid})'
f' handling request {request_id} at time {time.time():.3f}:'
f' {argv}.',
file=sys.stderr,
)
try:
if self._server_error is not None:
resultcode = 1
output = self._server_error
else:
(
resultcode,
output,
) = await asyncio.get_running_loop().run_in_executor(
None,
lambda: run_client_pcommand(
argv, self._worker_log_file_path
),
)
if VERBOSE:
print(
f'pcommandbatch server {self._instance}'
f' (pid {self._pid})'
f' request {request_id} finished with code'
f' {resultcode}.',
file=sys.stderr,
)
except Exception as exc:
if VERBOSE:
print(
f'pcommandbatch server {self._instance}'
f' (pid {self._pid}):'
f'error on request {request_id}: {exc}.',
file=sys.stderr,
)
resultcode = 1
output = ''
writer.write(json.dumps({'r': resultcode, 'o': output}).encode())
writer.close()
await writer.wait_closed()
finally:
self._running_client_count -= 1
assert self._running_client_count >= 0
async def _upkeep_task(self) -> None:
"""Handle timeouts, updating port file timestamp, etc."""
start_time = time.monotonic()
abs_timeout_secs = 60 * 5
idle_secs = 0
idle_buffer = 5
while True:
await asyncio.sleep(1.0)
now = time.monotonic()
since_start = now - start_time
# Whenever we've run client(s) within the last second, we
# reset our idle time and freshen our state file so clients
# know they can still use us.
# Consider ourself idle if there are no currently running
# jobs AND nothing has been run since our last check. This
# covers both long running jobs and super short ones that
# would otherwise slip between our discrete checks.
if (
self._client_count_since_last_check
or self._running_client_count
):
idle_secs = 0
self._update_worker_state_file(idle_secs)
else:
idle_secs += 1
if VERBOSE:
print(
f'pcommandbatch server {self._instance}'
f' (pid {self._pid})'
f' idle {idle_secs}/'
f'{self._idle_timeout_secs + idle_buffer} seconds at'
f' time {int(time.time())}.',
file=sys.stderr,
)
self._client_count_since_last_check = 0
# Clients should stop trying to contact us when our state
# file hits idle_timeout_secs in age, but we actually stay
# alive for a few extra seconds extra just to make sure we
# don't spin down right as someone is trying to use us.
if idle_secs >= self._idle_timeout_secs + idle_buffer:
# This insta-kills our server so it should never be
# happening while something is running.
if self._running_client_count:
raise CleanError(
f'pcommandbatch server {self._instance}'
f' (pid {self._pid}):'
f' idle-exiting but have running_client_count'
f' {self._running_client_count}; something'
f' is probably broken.'
)
raise IdleError()
if since_start > abs_timeout_secs:
raise CleanError(
f'pcommandbatch server {self._instance}'
f' (pid {self._pid}): max'
f' run-time of {abs_timeout_secs}s reached.'
' Something is probably broken.'
)
def _update_worker_state_file(self, idle_secs: int) -> None:
assert self._port is not None
# Dump our port to a temp file and move it into place.
# Hopefully this will be nice and atomic.
if VERBOSE:
print(
f'pcommandbatch server {self._instance} (pid {self._pid})'
f' refreshing state file {self._worker_state_file_path}'
f' with port {self._port} and idle-secs {idle_secs}'
f' at time {time.time():.3f}.',
file=sys.stderr,
)
with tempfile.TemporaryDirectory() as tempdir:
outpath = os.path.join(tempdir, 'f')
with open(outpath, 'w', encoding='utf-8') as outfile:
outfile.write(json.dumps({'p': self._port}))
subprocess.run(
['mv', outpath, self._worker_state_file_path], check=True
)

View File

@ -0,0 +1,830 @@
# Released under the MIT License. See LICENSE for details.
#
"""A set of lovely pcommands ready for use."""
from __future__ import annotations
import sys
from typing import TYPE_CHECKING
from efrotools import pcommand
if TYPE_CHECKING:
pass
def _spelling(words: list[str]) -> None:
from efrotools.code import sort_jetbrains_dict
import os
pcommand.disallow_in_batch()
num_modded_dictionaries = 0
for fname in [
'.idea/dictionaries/ericf.xml',
'ballisticakit-cmake/.idea/dictionaries/ericf.xml',
]:
if not os.path.exists(fname):
continue
with open(fname, encoding='utf-8') as infile:
lines = infile.read().splitlines()
if lines[2] != ' <words>':
raise RuntimeError('Unexpected dictionary format.')
added_count = 0
for word in words:
line = f' <w>{word.lower()}</w>'
if line not in lines:
lines.insert(3, line)
added_count += 1
with open(fname, 'w', encoding='utf-8') as outfile:
outfile.write(sort_jetbrains_dict('\n'.join(lines)))
print(f'Added {added_count} words to {fname}.')
num_modded_dictionaries += 1
print(f'Modified {num_modded_dictionaries} dictionaries.')
def pur() -> None:
"""Run pur using project's Python version."""
import subprocess
pcommand.disallow_in_batch()
subprocess.run([sys.executable, '-m', 'pur'] + sys.argv[2:], check=True)
def spelling_all() -> None:
"""Add all misspellings from a pycharm run."""
import subprocess
pcommand.disallow_in_batch()
print('Running "make pycharm-full"...')
lines = [
line
for line in subprocess.run(
['make', 'pycharm-full'], check=False, capture_output=True
)
.stdout.decode()
.splitlines()
if 'Typo: In word' in line
]
words = [line.split('Typo: In word')[1].strip() for line in lines]
# Strip enclosing quotes but not internal ones.
for i, word in enumerate(words):
assert word[0] == "'"
assert word[-1] == "'"
words[i] = word[1:-1]
_spelling(words)
def spelling() -> None:
"""Add words to the PyCharm dictionary."""
pcommand.disallow_in_batch()
_spelling(sys.argv[2:])
def xcodebuild() -> None:
"""Run xcodebuild with added smarts."""
from efrotools.xcodebuild import XCodeBuild
pcommand.disallow_in_batch()
XCodeBuild(projroot=str(pcommand.PROJROOT), args=sys.argv[2:]).run()
def xcoderun() -> None:
"""Run an xcode build in the terminal."""
import os
import subprocess
from efro.error import CleanError
from efrotools.xcodebuild import project_build_path
pcommand.disallow_in_batch()
if len(sys.argv) != 5:
raise CleanError(
'Expected 3 args: <xcode project path> <configuration name>'
)
project_path = os.path.abspath(sys.argv[2])
scheme = sys.argv[3]
configuration = sys.argv[4]
path = project_build_path(
projroot=str(pcommand.PROJROOT),
project_path=project_path,
scheme=scheme,
configuration=configuration,
)
subprocess.run(path, check=True)
def pyver() -> None:
"""Prints the Python version used by this project."""
from efrotools import PYVER
pcommand.disallow_in_batch()
print(PYVER, end='')
def try_repeat() -> None:
"""Run a command with repeat attempts on failure.
First arg is the number of retries; remaining args are the command.
"""
import subprocess
from efro.error import CleanError
pcommand.disallow_in_batch()
# We require one number arg and at least one command arg.
if len(sys.argv) < 4:
raise CleanError(
'Expected a retry-count arg and at least one command arg'
)
try:
repeats = int(sys.argv[2])
except Exception:
raise CleanError('Expected int as first arg') from None
if repeats < 0:
raise CleanError('Retries must be >= 0')
cmd = sys.argv[3:]
for i in range(repeats + 1):
result = subprocess.run(cmd, check=False)
if result.returncode == 0:
return
print(
f'try_repeat attempt {i + 1} of {repeats + 1} failed for {cmd}.',
file=sys.stderr,
flush=True,
)
raise CleanError(f'Command failed {repeats + 1} time(s): {cmd}')
def check_clean_safety() -> None:
"""Ensure all files are are added to git or in gitignore.
Use to avoid losing work if we accidentally do a clean without
adding something.
"""
import os
import subprocess
from efro.error import CleanError
pcommand.disallow_in_batch()
if len(sys.argv) != 2:
raise CleanError('invalid arguments')
# Make sure we wouldn't be deleting anything not tracked by git
# or ignored.
output = subprocess.check_output(
['git', 'status', '--porcelain=v2']
).decode()
if any(line.startswith('?') for line in output.splitlines()):
raise CleanError(
'Untracked file(s) found; aborting.'
' (see "git status" from "'
+ os.getcwd()
+ '") Either \'git add\' them, add them to .gitignore,'
' or remove them and try again.'
)
def gen_empty_py_init() -> None:
"""Generate an empty __init__.py for a package dir.
Used as part of meta builds.
"""
from pathlib import Path
from efro.terminal import Clr
from efro.error import CleanError
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise CleanError('Expected a single path arg.')
outpath = Path(sys.argv[2])
outpath.parent.mkdir(parents=True, exist_ok=True)
print(f'Meta-building {Clr.BLD}{outpath}{Clr.RST}')
with open(outpath, 'w', encoding='utf-8') as outfile:
outfile.write('# This file is autogenerated; do not hand-edit.\n')
def formatcode() -> None:
"""Format all of our C/C++/etc. code."""
import efrotools.code
pcommand.disallow_in_batch()
full = '-full' in sys.argv
efrotools.code.format_project_cpp_files(pcommand.PROJROOT, full)
def formatscripts() -> None:
"""Format all of our Python/etc. code."""
import efrotools.code
pcommand.disallow_in_batch()
full = '-full' in sys.argv
efrotools.code.format_project_python_files(pcommand.PROJROOT, full)
def formatmakefile() -> None:
"""Format the main makefile."""
from efrotools.makefile import Makefile
with open('Makefile', encoding='utf-8') as infile:
original = infile.read()
pcommand.disallow_in_batch()
formatted = Makefile(original).get_output()
# Only write if it changed.
if formatted != original:
with open('Makefile', 'w', encoding='utf-8') as outfile:
outfile.write(formatted)
def cpplint() -> None:
"""Run lint-checking on all code deemed lint-able."""
import efrotools.code
pcommand.disallow_in_batch()
full = '-full' in sys.argv
efrotools.code.check_cpplint(pcommand.PROJROOT, full)
def scriptfiles() -> None:
"""List project script files.
Pass -lines to use newlines as separators. The default is spaces.
"""
import efrotools.code
pcommand.disallow_in_batch()
paths = efrotools.code.get_script_filenames(projroot=pcommand.PROJROOT)
assert not any(' ' in path for path in paths)
if '-lines' in sys.argv:
print('\n'.join(paths))
else:
print(' '.join(paths))
def pylint() -> None:
"""Run pylint checks on our scripts."""
import efrotools.code
pcommand.disallow_in_batch()
full = '-full' in sys.argv
fast = '-fast' in sys.argv
efrotools.code.pylint(pcommand.PROJROOT, full, fast)
def pylint_files() -> None:
"""Run pylint checks on provided filenames."""
from efro.terminal import Clr
from efro.error import CleanError
import efrotools.code
pcommand.disallow_in_batch()
if len(sys.argv) < 3:
raise CleanError('Expected at least 1 filename arg.')
filenames = sys.argv[2:]
efrotools.code.runpylint(pcommand.PROJROOT, filenames)
print(f'{Clr.GRN}Pylint Passed.{Clr.RST}')
def mypy() -> None:
"""Run mypy checks on our scripts."""
import efrotools.code
pcommand.disallow_in_batch()
full = '-full' in sys.argv
efrotools.code.mypy(pcommand.PROJROOT, full)
def mypy_files() -> None:
"""Run mypy checks on provided filenames."""
from efro.terminal import Clr
from efro.error import CleanError
import efrotools.code
pcommand.disallow_in_batch()
if len(sys.argv) < 3:
raise CleanError('Expected at least 1 filename arg.')
filenames = sys.argv[2:]
try:
efrotools.code.mypy_files(pcommand.PROJROOT, filenames)
print(f'{Clr.GRN}Mypy Passed.{Clr.RST}')
except Exception as exc:
raise CleanError('Mypy Failed.') from exc
def dmypy() -> None:
"""Run mypy checks on our scripts using the mypy daemon."""
import efrotools.code
pcommand.disallow_in_batch()
efrotools.code.dmypy(pcommand.PROJROOT)
def pycharm() -> None:
"""Run PyCharm checks on our scripts."""
import efrotools.code
pcommand.disallow_in_batch()
full = '-full' in sys.argv
verbose = '-v' in sys.argv
efrotools.code.check_pycharm(pcommand.PROJROOT, full, verbose)
def clioncode() -> None:
"""Run CLion checks on our code."""
import efrotools.code
pcommand.disallow_in_batch()
full = '-full' in sys.argv
verbose = '-v' in sys.argv
efrotools.code.check_clioncode(pcommand.PROJROOT, full, verbose)
def androidstudiocode() -> None:
"""Run Android Studio checks on our code."""
import efrotools.code
pcommand.disallow_in_batch()
full = '-full' in sys.argv
verbose = '-v' in sys.argv
efrotools.code.check_android_studio(pcommand.PROJROOT, full, verbose)
def tool_config_install() -> None:
"""Install a tool config file (with some filtering)."""
from pathlib import Path
from efro.error import CleanError
import efrotools.toolconfig
pcommand.disallow_in_batch()
if len(sys.argv) != 4:
raise CleanError('expected 2 args')
src = Path(sys.argv[2])
dst = Path(sys.argv[3])
efrotools.toolconfig.install_tool_config(pcommand.PROJROOT, src, dst)
def sync_all() -> None:
"""Runs full syncs between all efrotools projects.
This list is defined in the EFROTOOLS_SYNC_PROJECTS env var.
This assumes that there is a 'sync-full' and 'sync-list' Makefile target
under each project.
"""
import os
import subprocess
import concurrent.futures
from efro.error import CleanError
from efro.terminal import Clr
pcommand.disallow_in_batch()
print(f'{Clr.BLD}Updating formatting for all projects...{Clr.RST}')
projects_str = os.environ.get('EFROTOOLS_SYNC_PROJECTS')
if projects_str is None:
raise CleanError('EFROTOOL_SYNC_PROJECTS is not defined.')
projects = projects_str.split(':')
def _format_project(fproject: str) -> None:
fcmd = f'cd "{fproject}" && make format'
# print(fcmd)
subprocess.run(fcmd, shell=True, check=True)
# No matter what we're doing (even if just listing), run formatting
# in all projects before beginning. Otherwise if we do a sync and then
# a preflight we'll often wind up getting out-of-sync errors due to
# formatting changing after the sync.
with concurrent.futures.ThreadPoolExecutor(
max_workers=len(projects)
) as executor:
# Converting this to a list will propagate any errors.
list(executor.map(_format_project, projects))
if len(sys.argv) > 2 and sys.argv[2] == 'list':
# List mode
for project in projects_str.split(':'):
cmd = f'cd "{project}" && make sync-list'
print(cmd)
subprocess.run(cmd, shell=True, check=True)
else:
# Real mode
for i in range(2):
if i == 0:
print(
f'{Clr.BLD}Running sync pass 1'
f' (ensures all changes at dsts are pushed to src):'
f'{Clr.RST}'
)
else:
print(
f'{Clr.BLD}Running sync pass 2'
f' (ensures latest src is pulled to all dsts):{Clr.RST}'
)
for project in projects_str.split(':'):
cmd = f'cd "{project}" && make sync-full'
subprocess.run(cmd, shell=True, check=True)
print(Clr.BLD + 'Sync-all successful!' + Clr.RST)
def sync() -> None:
"""Runs standard syncs between this project and others."""
from efrotools import getprojectconfig
from efrotools.sync import Mode, SyncItem, run_standard_syncs
pcommand.disallow_in_batch()
mode = Mode(sys.argv[2]) if len(sys.argv) > 2 else Mode.PULL
# Load sync-items from project config and run them
sync_items = [
SyncItem(**i)
for i in getprojectconfig(pcommand.PROJROOT).get('sync_items', [])
]
run_standard_syncs(pcommand.PROJROOT, mode, sync_items)
def compile_python_file() -> None:
"""Compile pyc files for packaging.
This creates hash-based PYC files in opt level 1 with hash checks
defaulting to off, so we don't have to worry about timestamps or
loading speed hits due to hash checks. (see PEP 552).
We just need to tell modders that they'll need to clear these
cache files out or turn on debugging mode if they want to tweak
the built-in scripts directly (or go through the asset build system which
properly recreates the .pyc files).
"""
import os
import py_compile
from efro.error import CleanError
args = pcommand.get_args()
if len(args) != 1:
raise CleanError('Expected a single arg.')
fname = args[0]
# Print project-relative path when possible.
relpath = os.path.abspath(fname).removeprefix(f'{pcommand.PROJROOT}/')
pcommand.set_output(f'Compiling script: {relpath}')
py_compile.compile(
fname,
doraise=True,
optimize=1,
invalidation_mode=py_compile.PycInvalidationMode.UNCHECKED_HASH,
)
def copy_python_file() -> None:
"""Copy Python files for packaging."""
import os
import shutil
from efro.error import CleanError
args = pcommand.get_args()
if len(args) != 2:
raise CleanError('Expected 2 args.')
src, dst = args
relpath = os.path.abspath(dst).removeprefix(f'{pcommand.PROJROOT}/')
pcommand.set_output(f'Copying script: {relpath}')
# Since we're making built files unwritable, we need to blow
# away exiting ones to allow this to succeed.
if os.path.exists(dst):
os.unlink(dst)
os.makedirs(os.path.dirname(dst), exist_ok=True)
shutil.copyfile(src, dst)
# Make built files unwritable to save myself from accidentally
# doing editing there and then blowing away my work.
os.chmod(dst, 0o444)
assert os.path.exists(dst)
def pytest() -> None:
"""Run pytest with project environment set up properly."""
import os
import platform
import subprocess
from efrotools import getprojectconfig, PYTHON_BIN
from efro.error import CleanError
pcommand.disallow_in_batch()
# Grab our python paths for the project and stuff them in PYTHONPATH.
pypaths = getprojectconfig(pcommand.PROJROOT).get('python_paths')
if pypaths is None:
raise CleanError('python_paths not found in project config.')
separator = ';' if platform.system() == 'Windows' else ':'
os.environ['PYTHONPATH'] = separator.join(pypaths)
# Also tell Python interpreters not to write __pycache__ dirs everywhere
# which can screw up our builds.
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
# Let's flip on dev mode to hopefully be informed on more bad stuff
# happening. https://docs.python.org/3/library/devmode.html
os.environ['PYTHONDEVMODE'] = '1'
# Do the thing.
results = subprocess.run(
[PYTHON_BIN, '-m', 'pytest'] + sys.argv[2:], check=False
)
if results.returncode != 0:
sys.exit(results.returncode)
def makefile_target_list() -> None:
"""Prints targets in a makefile.
Takes a single argument: a path to a Makefile.
"""
from dataclasses import dataclass
from efro.error import CleanError
from efro.terminal import Clr
pcommand.disallow_in_batch()
@dataclass
class _Entry:
kind: str
line: int
title: str
if len(sys.argv) != 3:
raise CleanError('Expected exactly one filename arg.')
with open(sys.argv[2], encoding='utf-8') as infile:
lines = infile.readlines()
def _docstr(lines2: list[str], linenum: int) -> str:
doc = ''
j = linenum - 1
while j >= 0 and lines2[j].startswith('#'):
doc = lines2[j][1:].strip()
j -= 1
if doc != '':
return ' - ' + doc
return doc
print(
'----------------------\n'
'Available Make Targets\n'
'----------------------'
)
entries: list[_Entry] = []
for i, line in enumerate(lines):
# Targets.
if (
':' in line
and line.split(':')[0].replace('-', '').replace('_', '').isalnum()
and not line.startswith('_')
):
entries.append(
_Entry(kind='target', line=i, title=line.split(':')[0])
)
# Section titles.
if (
line.startswith('# ')
and line.endswith(' #\n')
and len(line.split()) > 2
):
entries.append(
_Entry(kind='section', line=i, title=line[1:-2].strip())
)
for i, entry in enumerate(entries):
if entry.kind == 'section':
# Don't print headers for empty sections.
if i + 1 >= len(entries) or entries[i + 1].kind == 'section':
continue
print('\n' + entry.title + '\n' + '-' * len(entry.title))
elif entry.kind == 'target':
print(
Clr.MAG
+ entry.title
+ Clr.BLU
+ _docstr(lines, entry.line)
+ Clr.RST
)
def echo() -> None:
"""Echo with support for efro.terminal.Clr args (RED, GRN, BLU, etc).
Prints a Clr.RST at the end so that can be omitted.
"""
from efro.terminal import Clr
pcommand.disallow_in_batch()
clrnames = {n for n in dir(Clr) if n.isupper() and not n.startswith('_')}
first = True
out: list[str] = []
for arg in sys.argv[2:]:
if arg in clrnames:
out.append(getattr(Clr, arg))
else:
if not first:
out.append(' ')
first = False
out.append(arg)
out.append(Clr.RST)
print(''.join(out))
def urandom_pretty() -> None:
"""Spits out urandom bytes formatted for source files."""
# Note; this is not especially efficient. It should probably be rewritten
# if ever needed in a performance-sensitive context.
import os
from efro.error import CleanError
pcommand.disallow_in_batch()
if len(sys.argv) not in (3, 4):
raise CleanError(
'Expected one arg (count) and possibly two (line len).'
)
size = int(sys.argv[2])
linemax = 72 if len(sys.argv) < 4 else int(sys.argv[3])
val = os.urandom(size)
lines: list[str] = []
line = b''
for i in range(len(val)):
char = val[i : i + 1]
thislinelen = len(repr(line + char))
if thislinelen > linemax:
lines.append(repr(line))
line = b''
line += char
if line:
lines.append(repr(line))
bstr = '\n'.join(str(l) for l in lines)
print(f'({bstr})')
def tweak_empty_py_files() -> None:
"""Find any zero-length Python files and make them length 1."""
from efro.error import CleanError
import efrotools.pybuild
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise CleanError('Expected exactly 1 path arg.')
efrotools.pybuild.tweak_empty_py_files(sys.argv[2])
def make_ensure() -> None:
"""Make sure a makefile target is up-to-date.
This can technically be done by simply `make --question`, but this
has some extra bells and whistles such as printing some of the commands
that would run.
Can be useful to run after cloud-builds to ensure the local results
consider themselves up-to-date.
"""
# pylint: disable=too-many-locals
from efro.error import CleanError
from efro.terminal import Clr
import subprocess
pcommand.disallow_in_batch()
dirpath: str | None = None
args = sys.argv[2:]
if '--dir' in args:
argindex = args.index('--dir')
dirpath = args[argindex + 1]
del args[argindex : argindex + 2]
if len(args) not in (0, 1):
raise CleanError('Expected zero or one target args.')
target = args[0] if args else None
cmd = ['make', '--no-print-directory', '--dry-run']
if target is not None:
cmd.append(target)
results = subprocess.run(cmd, check=False, capture_output=True, cwd=dirpath)
out = results.stdout.decode()
err = results.stderr.decode()
if results.returncode != 0:
print(f'Failed command stdout:\n{out}\nFailed command stderr:\n{err}')
raise CleanError(f"Command failed during make_ensure: '{cmd}'.")
targetname: str = '<default>' if target is None else target
lines = out.splitlines()
in_str = '' if dirpath is None else f"in directory '{dirpath}' "
if len(lines) == 1 and 'Nothing to be done for ' in lines[0]:
print(f"make_ensure: '{targetname}' target {in_str}is up to date.")
else:
maxlines = 20
if len(lines) > maxlines:
outlines = '\n'.join(
lines[:maxlines] + [f'(plus {len(lines)-maxlines} more lines)']
)
else:
outlines = '\n'.join(lines)
print(
f"make_ensure: '{targetname}' target {in_str}"
f'is out of date; would run:\n\n'
'-------------------------- MAKE-ENSURE COMMANDS BEGIN '
f'--------------------------\n{Clr.YLW}'
f'{outlines}{Clr.RST}\n'
'--------------------------- MAKE-ENSURE COMMANDS END '
'---------------------------\n'
)
raise CleanError(
f"make_ensure: '{targetname}' target {in_str}is out of date."
)
def make_target_debug() -> None:
"""Debug makefile src/target mod times given src and dst path.
Built to debug stubborn Makefile targets that insist on being
rebuilt just after being built via a cloud target.
"""
import os
import datetime
from efro.error import CleanError
pcommand.disallow_in_batch()
# from efro.util import ago_str, utc_now
args = sys.argv[2:]
if len(args) != 2:
raise CleanError('Expected 2 args.')
def _utc_mod_time(path: str) -> datetime.datetime:
mtime = os.path.getmtime(path)
mdtime = datetime.datetime.fromtimestamp(mtime, datetime.timezone.utc)
# mdtime.replace(tzinfo=datetime.timezone.utc)
return mdtime
# srcname = os.path.basename(args[0])
# dstname = os.path.basename(args[1])
srctime = _utc_mod_time(args[0])
dsttime = _utc_mod_time(args[1])
# now = utc_now()
# src_ago = ago_str(srctime, maxparts=3, decimals=2, now=now)
# dst_ago = ago_str(dsttime, maxparts=3, decimals=2, now=now)
srctimestr = (
f'{srctime.hour}:{srctime.minute}:{srctime.second}:'
f'{srctime.microsecond}'
)
dsttimestr = (
f'{dsttime.hour}:{dsttime.minute}:{dsttime.second}:'
f'{dsttime.microsecond}'
)
print(f'SRC modified at {srctimestr}.')
print(f'DST modified at {dsttimestr}.')

View File

@ -0,0 +1,216 @@
# Released under the MIT License. See LICENSE for details.
#
"""Standard snippets that can be pulled into project pcommand scripts.
A snippet is a mini-program that directly takes input from stdin and does
some focused task. This module is a repository of common snippets that can
be imported into projects' pcommand script for easy reuse.
"""
from __future__ import annotations
import sys
from typing import TYPE_CHECKING
from efrotools import pcommand
if TYPE_CHECKING:
pass
def with_build_lock() -> None:
"""Run a shell command wrapped in a build-lock."""
from efro.error import CleanError
from efrotools.buildlock import BuildLock
import subprocess
pcommand.disallow_in_batch()
args = sys.argv[2:]
if len(args) < 2:
raise CleanError(
'Expected one lock-name arg and at least one command arg'
)
with BuildLock(args[0]):
subprocess.run(' '.join(args[1:]), check=True, shell=True)
def sortlines() -> None:
"""Sort provided lines. For tidying import lists, etc."""
from efro.error import CleanError
pcommand.disallow_in_batch()
if len(sys.argv) != 3:
raise CleanError('Expected 1 arg.')
val = sys.argv[2]
lines = val.splitlines()
print('\n'.join(sorted(lines, key=lambda l: l.lower())))
def openal_build_android() -> None:
"""Build openalsoft for android."""
from efro.error import CleanError
from efrotools.openalbuild import build
pcommand.disallow_in_batch()
args = sys.argv[2:]
if len(args) != 2:
raise CleanError(
'Expected one <ARCH> arg: arm, arm64, x86, x86_64'
' and one <MODE> arg: debug, release'
)
build(args[0], args[1])
def openal_gather() -> None:
"""Gather built opealsoft libs into src."""
from efro.error import CleanError
from efrotools.openalbuild import gather
pcommand.disallow_in_batch()
args = sys.argv[2:]
if args:
raise CleanError('No args expected.')
gather()
def pyright() -> None:
"""Run Pyright checks on project Python code."""
import subprocess
from efro.terminal import Clr
from efro.error import CleanError
pcommand.disallow_in_batch()
print(f'{Clr.BLU}Running Pyright (experimental)...{Clr.RST}')
try:
subprocess.run(
['pyright', '--project', '.pyrightconfig.json'], check=True
)
except Exception as exc:
raise CleanError('Pyright failed.') from exc
def build_pcommandbatch() -> None:
"""Build a version of pcommand geared for large batches of commands."""
from efro.error import CleanError
from efro.terminal import Clr
import efrotools.pcommandbatch as pcb
pcommand.disallow_in_batch()
args = pcommand.get_args()
if len(args) < 2:
raise CleanError('Expected at least 2 args.')
inpaths = args[:-1]
outpath = args[-1]
print(f'Creating batch executable: {Clr.BLD}{outpath}{Clr.RST}')
pcb.build_pcommandbatch(inpaths, outpath)
def run_pcommandbatch_server() -> None:
"""Run a server for handling pcommands."""
from efro.error import CleanError
from efrotools import extract_arg
import efrotools.pcommandbatch as pcb
pcommand.disallow_in_batch()
args = pcommand.get_args()
idle_timeout_secs = int(extract_arg(args, '--timeout', required=True))
state_dir = extract_arg(args, '--state-dir', required=True)
instance = extract_arg(args, '--instance', required=True)
if args:
raise CleanError(f'Unexpected args: {args}.')
pcb.run_pcommandbatch_server(
idle_timeout_secs=idle_timeout_secs,
state_dir=state_dir,
instance=instance,
)
def pcommandbatch_speed_test() -> None:
"""Test batch mode speeds."""
# pylint: disable=too-many-locals
import time
import subprocess
import threading
from multiprocessing import cpu_count
from concurrent.futures import ThreadPoolExecutor
from efro.error import CleanError
from efro.terminal import Clr
args = pcommand.get_args()
if len(args) != 1:
raise CleanError('Expected one arg.')
batch_binary_path = args[0]
thread_count = cpu_count()
class _Test:
def __init__(self) -> None:
self.in_flight = 0
self.lock = threading.Lock()
self.total_runs = 0
def run_standalone(self) -> None:
"""Run an instance of the test in standalone mode."""
subprocess.run(['tools/pcommand', 'null'], check=True)
self._finish_run()
def run_batch(self) -> None:
"""Run an instance of the test in batch mode."""
subprocess.run([batch_binary_path, 'null'], check=True)
self._finish_run()
def _finish_run(self) -> None:
with self.lock:
self.in_flight -= 1
assert self.in_flight >= 0
self.total_runs += 1
test_duration = 5.0
for name, batch in [('regular pcommand', False), ('pcommandbatch', True)]:
print(f'{Clr.BLU}Testing {name} speed...{Clr.RST}')
start_time = time.monotonic()
test = _Test()
total_runs_at_timeout = 0
with ThreadPoolExecutor(max_workers=thread_count) as executor:
# Convert the generator to a list to trigger any
# exceptions that occurred.
while True:
# Try to keep all worker threads busy.
while test.in_flight < thread_count * 2:
with test.lock:
test.in_flight += 1
executor.submit(
test.run_batch if batch else test.run_standalone
)
if time.monotonic() - start_time > test_duration:
total_runs_at_timeout = test.total_runs
break
time.sleep(0.0001)
print(
f'Total runs in {test_duration:.0f} seconds:'
f' {Clr.SMAG}{Clr.BLD}{total_runs_at_timeout}{Clr.RST}.'
)
def null() -> None:
"""Do nothing. Useful for speed tests and whatnot."""

View File

@ -4,23 +4,22 @@
"""A collection of commands for use with this project.
All top level functions here can be run by passing them as the first
argument on the command line. (or pass no arguments to get a list of them).
argument on the command line. (or pass no arguments to get a list of
them).
"""
# Note: we import as little as possible here at the module level to
# keep launch times fast; most imports should happen within individual command
# functions.
# Note: we import as little as possible here at the module level to keep
# launch times fast; most imports should happen within individual
# command functions.
from __future__ import annotations
from typing import TYPE_CHECKING
from efrotools import pcommand
# Pull in commands we want to expose. Its more efficient to define them in
# modules rather than inline here because we'll be able to load them via pyc.
# pylint: disable=unused-import
from efrotools.pcommand import (
PROJROOT,
pcommand_main,
# Pull in commands we want to expose. Its more efficient to define them
# in modules rather than inline here because we'll be able to load them
# via pyc. pylint: disable=unused-import
from efrotools.pcommands import (
formatcode,
formatscripts,
formatmakefile,
@ -42,7 +41,8 @@ from efrotools.pcommand import (
spelling_all,
pytest,
echo,
compile_python_files,
compile_python_file,
copy_python_file,
pyver,
try_repeat,
xcodebuild,
@ -51,14 +51,18 @@ from efrotools.pcommand import (
make_ensure,
make_target_debug,
)
from efrotools.pcommand2 import (
from efrotools.pcommands2 import (
with_build_lock,
sortlines,
openal_build_android,
openal_gather,
pyright,
build_pcommandbatch,
run_pcommandbatch_server,
pcommandbatch_speed_test,
null,
)
from batools.pcommand import (
from batools.pcommands import (
resize_image,
check_clean_safety,
archive_old_builds,
@ -103,8 +107,6 @@ from batools.pcommand import (
cmake_prep_dir,
gen_binding_code,
gen_flat_data_code,
wsl_path_to_win,
wsl_build_check_win_drive,
genchangelog,
android_sdk_utils,
logcat,
@ -112,7 +114,7 @@ from batools.pcommand import (
gen_dummy_modules,
version,
)
from batools.pcommand2 import (
from batools.pcommands2 import (
gen_python_init_module,
gen_monolithic_register_modules,
py_examine,
@ -124,12 +126,12 @@ from batools.pcommand2 import (
spinoff_test,
spinoff_check_submodule_parent,
tests_warm_start,
wsl_path_to_win,
wsl_build_check_win_drive,
)
# pylint: enable=unused-import
if TYPE_CHECKING:
pass
if __name__ == '__main__':
pcommand_main(globals())
pcommand.pcommand_main(globals())