mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-02-04 22:43:17 +08:00
spinoff cleanup
This commit is contained in:
parent
c9d4a095bc
commit
c12be782f7
56
.efrocachemap
generated
56
.efrocachemap
generated
@ -4056,26 +4056,26 @@
|
|||||||
"build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1",
|
"build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1",
|
||||||
"build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae",
|
"build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae",
|
||||||
"build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599",
|
"build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599",
|
||||||
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "10ca339552ccefcc1cf0458d77ddfc60",
|
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "279d063ae54801d02d5f9739a8cf2fcb",
|
||||||
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "220a8bae9a55c6625972254dd7265029",
|
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "21fb39e8d8aba6d6027f3465ab5fbc6b",
|
||||||
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "54bde68d1f6c948a0fe1a3844b49fbc9",
|
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "c1dc7f237469dd36deffe5414a81a5c2",
|
||||||
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "d6908e5517980c48204c5d7d38b624b2",
|
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "13436c4ecb012fa616ab3b52d765a4a8",
|
||||||
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "0552bfb41850409763c3eb0824629c4f",
|
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "481e76f0d618ea949640779db8089e46",
|
||||||
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "3b92b9b7b6f25ec0b00ebdac6af8f7f7",
|
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "6452a4a354d36a5618275719046c45c0",
|
||||||
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "bf943bb02a696a97b8eb13173bd10381",
|
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "99ea8e8d810194730a58c1f61baf6bd4",
|
||||||
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "733c06666e6ac2bd49a645814820ae70",
|
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "9206a652925c58d5f54ac3b19202568b",
|
||||||
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "274f31c8955c9af9799edc72eef37184",
|
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "de160cabd194d8e1a08945e2a990c6e7",
|
||||||
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "ced3d5b6e818c2c5901e6373d40583ba",
|
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "45f1d057b67eea2b8937c0987f4600df",
|
||||||
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "c745ec776c3fa19166fab17b91541c02",
|
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "05fd1a0c61f766d01fdb4d8a6f21884a",
|
||||||
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "cf3751677f46d248d251b566b737c3ca",
|
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "652bb06c069ea03d82b20d94a1c0d8b0",
|
||||||
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "ab74e096ad474d389c4e20cc78f22e43",
|
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "90e139a76507c00deff3ea1dcf569935",
|
||||||
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "e54648fc2a899449baa081b55cb52588",
|
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "7314fa7928a4714dfb2c44bd0e976878",
|
||||||
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "882935d0e1e2a9a36dfb7e0a3d4a5170",
|
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "eb68e8ab4ee7c9bb9748ba72d92424ce",
|
||||||
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "342b58d91ed50562f9f8d8163f5f4441",
|
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "69c56cc9b9a373953008c5a3742c514a",
|
||||||
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "591cf3434d9ac490d416c9dcc60f051f",
|
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "ff043c91883c2c3fb71f64b744774232",
|
||||||
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "4ef7a5bbfef88bf2174a0001aaf9532d",
|
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "014cb8642e6fae3bf542117433bea233",
|
||||||
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "16253a3ef3ccc96250eb5a311caf14fe",
|
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "f640bd3d830548bf42960643f89a7564",
|
||||||
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "90ab514825fbb1e5c2a88297b19d95d2",
|
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "1a340e7924157c2e84b0cd3c3ac84a6b",
|
||||||
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "11668f7191dc8d4e2b03db15ff2a4241",
|
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "11668f7191dc8d4e2b03db15ff2a4241",
|
||||||
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "b190199ce8b8fe122993d14fb7e8e813",
|
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "b190199ce8b8fe122993d14fb7e8e813",
|
||||||
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "11668f7191dc8d4e2b03db15ff2a4241",
|
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "11668f7191dc8d4e2b03db15ff2a4241",
|
||||||
@ -4092,14 +4092,14 @@
|
|||||||
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "4fab22abc481a97e884cce9d67b35c6b",
|
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "4fab22abc481a97e884cce9d67b35c6b",
|
||||||
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "7c536b056d78148a302c975bf9238118",
|
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "7c536b056d78148a302c975bf9238118",
|
||||||
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "4fab22abc481a97e884cce9d67b35c6b",
|
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "4fab22abc481a97e884cce9d67b35c6b",
|
||||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "be00cf9781b1c86439452c4ba0999c8d",
|
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "cf2978894cde54eed829e1c5b641c656",
|
||||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "5803cfddbaf5e09f513aded1a8fc71f5",
|
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "a9a883ae6660b516ab3da8202ecf8afd",
|
||||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "b682b559cef053626ee8ee9da79ad266",
|
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "dc18e0fbb1c7c42a1d2ae7dbe8104fc9",
|
||||||
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "8d467c3b01c4b2fd4977afd0aabb2bd0",
|
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "2b20efd81ca876dee30ec64917ae53fb",
|
||||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "748545de892e5ed6556ba0b05f9a0e9d",
|
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "093edb4112d70c03032d938d345ff0be",
|
||||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "2671eae8ad8f42eb272ffed0fc8e93df",
|
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "286e901c3f7395b790213295586ba619",
|
||||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "23712747790fbd36e33b9e59364accb7",
|
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "2aae411134c4eb841f28dc932baba0d3",
|
||||||
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "1832b257d7cd01ca68b7b99cbb62bfa6",
|
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "e9989960860d3371f4b17eec3d6c098a",
|
||||||
"src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c",
|
"src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c",
|
||||||
"src/assets/ba_data/python/babase/_mgen/enums.py": "b611c090513a21e2fe90e56582724e9d",
|
"src/assets/ba_data/python/babase/_mgen/enums.py": "b611c090513a21e2fe90e56582724e9d",
|
||||||
"src/ballistica/base/mgen/pyembed/binding_base.inc": "72bfed2cce8ff19741989dec28302f3f",
|
"src/ballistica/base/mgen/pyembed/binding_base.inc": "72bfed2cce8ff19741989dec28302f3f",
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
### 1.7.34 (build 21801, api 8, 2024-04-21)
|
### 1.7.34 (build 21802, api 8, 2024-04-23)
|
||||||
- Bumped Python version from 3.11 to 3.12 for all builds and project tools. One
|
- Bumped Python version from 3.11 to 3.12 for all builds and project tools. One
|
||||||
of the things this means is that we can use `typing.override` instead of the
|
of the things this means is that we can use `typing.override` instead of the
|
||||||
`typing_extensions` version so the annoying workaround of installing
|
`typing_extensions` version so the annoying requirement of installing
|
||||||
`typing_extensions` first thing when setting up the repo introduced a few
|
`typing_extensions` first thing when setting up the repo introduced a few
|
||||||
versions back is finally no longer needed.
|
versions back is finally no longer a thing.
|
||||||
- The project now maintains its own Python virtual environment in `.venv` where
|
- The project now maintains its own Python virtual environment in `.venv` where
|
||||||
it automatically installs whatever Python packages it needs instead of asking
|
it automatically installs whatever Python packages it needs instead of asking
|
||||||
the user to do so in their own environment. This should greatly simplify
|
the user to do so in their own environment. This should greatly simplify
|
||||||
@ -28,6 +28,7 @@
|
|||||||
does not and you get errors.
|
does not and you get errors.
|
||||||
- `_bascenev1.protocol_version()` now properly throws an exception if called
|
- `_bascenev1.protocol_version()` now properly throws an exception if called
|
||||||
while scene-v1 is not active.
|
while scene-v1 is not active.
|
||||||
|
- The `efro.dataclassio` system now supports `datetime.timedelta` values.
|
||||||
|
|
||||||
### 1.7.33 (build 21795, api 8, 2024-03-24)
|
### 1.7.33 (build 21795, api 8, 2024-03-24)
|
||||||
- Stress test input-devices are now a bit smarter; they won't press any buttons
|
- Stress test input-devices are now a bit smarter; they won't press any buttons
|
||||||
|
|||||||
12
Makefile
12
Makefile
@ -149,13 +149,13 @@ meta-clean:
|
|||||||
|
|
||||||
# Remove ALL files and directories that aren't managed by git (except for a
|
# Remove ALL files and directories that aren't managed by git (except for a
|
||||||
# few things such as localconfig.json).
|
# few things such as localconfig.json).
|
||||||
clean:
|
clean: prereqs
|
||||||
$(CHECK_CLEAN_SAFETY)
|
$(CHECK_CLEAN_SAFETY)
|
||||||
rm -rf build # Kill this ourself; can confuse git if contains other repos.
|
rm -rf build # Kill this ourself; can confuse git if contains other repos.
|
||||||
git clean -dfx $(ROOT_CLEAN_IGNORES)
|
git clean -dfx $(ROOT_CLEAN_IGNORES)
|
||||||
|
|
||||||
# Show what clean would delete without actually deleting it.
|
# Show what clean would delete without actually deleting it.
|
||||||
clean-list:
|
clean-list: prereqs
|
||||||
$(CHECK_CLEAN_SAFETY)
|
$(CHECK_CLEAN_SAFETY)
|
||||||
@echo Would remove build # Handle this part ourself; can confuse git.
|
@echo Would remove build # Handle this part ourself; can confuse git.
|
||||||
git clean -dnx $(ROOT_CLEAN_IGNORES)
|
git clean -dnx $(ROOT_CLEAN_IGNORES)
|
||||||
@ -752,12 +752,12 @@ update-check: prereqs-pre-update
|
|||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
# Run formatting on all files in the project considered 'dirty'.
|
# Run formatting on all files in the project considered 'dirty'.
|
||||||
format:
|
format: prereqs
|
||||||
@$(MAKE) -j$(CPUS) format-code format-scripts format-makefile
|
@$(MAKE) -j$(CPUS) format-code format-scripts format-makefile
|
||||||
@$(PCOMMANDBATCH) echo BLD Formatting complete for $(notdir $(CURDIR))!
|
@$(PCOMMANDBATCH) echo BLD Formatting complete for $(notdir $(CURDIR))!
|
||||||
|
|
||||||
# Same but always formats; ignores dirty state.
|
# Same but always formats; ignores dirty state.
|
||||||
format-full:
|
format-full: prereqs
|
||||||
@$(MAKE) -j$(CPUS) format-code-full format-scripts-full format-makefile
|
@$(MAKE) -j$(CPUS) format-code-full format-scripts-full format-makefile
|
||||||
@$(PCOMMANDBATCH) echo BLD Formatting complete for $(notdir $(CURDIR))!
|
@$(PCOMMANDBATCH) echo BLD Formatting complete for $(notdir $(CURDIR))!
|
||||||
|
|
||||||
@ -933,6 +933,7 @@ test-rpc:
|
|||||||
preflight:
|
preflight:
|
||||||
@$(MAKE) format
|
@$(MAKE) format
|
||||||
@$(MAKE) update
|
@$(MAKE) update
|
||||||
|
@$(MAKE) -j$(CPUS) py_check_prereqs # Needs to be done explicitly first.
|
||||||
@$(MAKE) -j$(CPUS) cpplint pylint mypy test
|
@$(MAKE) -j$(CPUS) cpplint pylint mypy test
|
||||||
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
|
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
|
||||||
|
|
||||||
@ -940,6 +941,7 @@ preflight:
|
|||||||
preflight-full:
|
preflight-full:
|
||||||
@$(MAKE) format-full
|
@$(MAKE) format-full
|
||||||
@$(MAKE) update
|
@$(MAKE) update
|
||||||
|
@$(MAKE) -j$(CPUS) py_check_prereqs # Needs to be done explicitly first.
|
||||||
@$(MAKE) -j$(CPUS) cpplint-full pylint-full mypy-full test-full
|
@$(MAKE) -j$(CPUS) cpplint-full pylint-full mypy-full test-full
|
||||||
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
|
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
|
||||||
|
|
||||||
@ -947,6 +949,7 @@ preflight-full:
|
|||||||
preflight2:
|
preflight2:
|
||||||
@$(MAKE) format
|
@$(MAKE) format
|
||||||
@$(MAKE) update
|
@$(MAKE) update
|
||||||
|
@$(MAKE) -j$(CPUS) py_check_prereqs # Needs to be done explicitly first.
|
||||||
@$(MAKE) -j$(CPUS) cpplint pylint mypy test
|
@$(MAKE) -j$(CPUS) cpplint pylint mypy test
|
||||||
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
|
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
|
||||||
|
|
||||||
@ -954,6 +957,7 @@ preflight2:
|
|||||||
preflight2-full:
|
preflight2-full:
|
||||||
@$(MAKE) format-full
|
@$(MAKE) format-full
|
||||||
@$(MAKE) update
|
@$(MAKE) update
|
||||||
|
@$(MAKE) -j$(CPUS) py_check_prereqs # Needs to be done explicitly first.
|
||||||
@$(MAKE) -j$(CPUS) cpplint-full pylint-full mypy-full test-full
|
@$(MAKE) -j$(CPUS) cpplint-full pylint-full mypy-full test-full
|
||||||
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
|
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
|
||||||
|
|
||||||
|
|||||||
@ -52,7 +52,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
# Build number and version of the ballistica binary we expect to be
|
# Build number and version of the ballistica binary we expect to be
|
||||||
# using.
|
# using.
|
||||||
TARGET_BALLISTICA_BUILD = 21801
|
TARGET_BALLISTICA_BUILD = 21802
|
||||||
TARGET_BALLISTICA_VERSION = '1.7.34'
|
TARGET_BALLISTICA_VERSION = '1.7.34'
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -39,7 +39,7 @@ auto main(int argc, char** argv) -> int {
|
|||||||
namespace ballistica {
|
namespace ballistica {
|
||||||
|
|
||||||
// These are set automatically via script; don't modify them here.
|
// These are set automatically via script; don't modify them here.
|
||||||
const int kEngineBuildNumber = 21801;
|
const int kEngineBuildNumber = 21802;
|
||||||
const char* kEngineVersion = "1.7.34";
|
const char* kEngineVersion = "1.7.34";
|
||||||
const int kEngineApiVersion = 8;
|
const int kEngineApiVersion = 8;
|
||||||
|
|
||||||
|
|||||||
@ -101,6 +101,7 @@ def test_assign() -> None:
|
|||||||
dictval: dict[int, str] = field(default_factory=dict)
|
dictval: dict[int, str] = field(default_factory=dict)
|
||||||
tupleval: tuple[int, str, bool] = (1, 'foo', False)
|
tupleval: tuple[int, str, bool] = (1, 'foo', False)
|
||||||
datetimeval: datetime.datetime | None = None
|
datetimeval: datetime.datetime | None = None
|
||||||
|
timedeltaval: datetime.timedelta | None = None
|
||||||
|
|
||||||
class _TestClass2:
|
class _TestClass2:
|
||||||
pass
|
pass
|
||||||
@ -116,10 +117,10 @@ def test_assign() -> None:
|
|||||||
dataclass_from_dict(_TestClass, None) # type: ignore
|
dataclass_from_dict(_TestClass, None) # type: ignore
|
||||||
|
|
||||||
now = utc_now()
|
now = utc_now()
|
||||||
|
tdelta = datetime.timedelta(days=123, seconds=456, microseconds=789)
|
||||||
|
|
||||||
# A dict containing *ALL* values should match what we
|
# A dict containing *ALL* values should exactly match what we get
|
||||||
# get when creating a dataclass and then converting back
|
# when creating a dataclass and then converting back to a dict.
|
||||||
# to a dict.
|
|
||||||
dict1 = {
|
dict1 = {
|
||||||
'ival': 1,
|
'ival': 1,
|
||||||
'sval': 'foo',
|
'sval': 'foo',
|
||||||
@ -156,6 +157,7 @@ def test_assign() -> None:
|
|||||||
now.second,
|
now.second,
|
||||||
now.microsecond,
|
now.microsecond,
|
||||||
],
|
],
|
||||||
|
'timedeltaval': [tdelta.days, tdelta.seconds, tdelta.microseconds],
|
||||||
}
|
}
|
||||||
dc1 = dataclass_from_dict(_TestClass, dict1)
|
dc1 = dataclass_from_dict(_TestClass, dict1)
|
||||||
assert dataclass_to_dict(dc1) == dict1
|
assert dataclass_to_dict(dc1) == dict1
|
||||||
|
|||||||
@ -24,7 +24,8 @@ def generate_app_module(
|
|||||||
# pylint: disable=too-many-statements
|
# pylint: disable=too-many-statements
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
from efrotools import replace_section, getprojectconfig
|
from efrotools.util import replace_section
|
||||||
|
from efrotools.project import getprojectconfig
|
||||||
|
|
||||||
out = ''
|
out = ''
|
||||||
|
|
||||||
|
|||||||
@ -415,7 +415,7 @@ def generate_assets_makefile(
|
|||||||
) -> dict[str, str]:
|
) -> dict[str, str]:
|
||||||
"""Main script entry point."""
|
"""Main script entry point."""
|
||||||
# pylint: disable=too-many-locals
|
# pylint: disable=too-many-locals
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
public = getprojectconfig(Path(projroot))['public']
|
public = getprojectconfig(Path(projroot))['public']
|
||||||
|
|||||||
@ -14,7 +14,7 @@ from typing import TYPE_CHECKING
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@ -126,7 +126,7 @@ def lazy_increment_build() -> None:
|
|||||||
import subprocess
|
import subprocess
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
from efrotools import get_files_hash
|
from efrotools.util import get_files_hash
|
||||||
from efrotools.code import get_code_filenames
|
from efrotools.code import get_code_filenames
|
||||||
|
|
||||||
pcommand.disallow_in_batch()
|
pcommand.disallow_in_batch()
|
||||||
@ -221,7 +221,7 @@ def androidaddr() -> None:
|
|||||||
def push_ipa() -> None:
|
def push_ipa() -> None:
|
||||||
"""Construct and push ios IPA for testing."""
|
"""Construct and push ios IPA for testing."""
|
||||||
|
|
||||||
from efrotools import extract_arg
|
from efrotools.util import extract_arg
|
||||||
import efrotools.ios
|
import efrotools.ios
|
||||||
|
|
||||||
pcommand.disallow_in_batch()
|
pcommand.disallow_in_batch()
|
||||||
@ -493,7 +493,7 @@ def warm_start_asset_build() -> None:
|
|||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
|
|
||||||
pcommand.disallow_in_batch()
|
pcommand.disallow_in_batch()
|
||||||
|
|||||||
@ -124,7 +124,7 @@ def py_examine() -> None:
|
|||||||
"""Run a python examination at a given point in a given file."""
|
"""Run a python examination at a given point in a given file."""
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import efrotools
|
import efrotools.emacs
|
||||||
|
|
||||||
pcommand.disallow_in_batch()
|
pcommand.disallow_in_batch()
|
||||||
|
|
||||||
@ -153,7 +153,7 @@ def py_examine() -> None:
|
|||||||
sys.path.append(scriptsdir)
|
sys.path.append(scriptsdir)
|
||||||
if toolsdir not in sys.path:
|
if toolsdir not in sys.path:
|
||||||
sys.path.append(toolsdir)
|
sys.path.append(toolsdir)
|
||||||
efrotools.py_examine(
|
efrotools.emacs.py_examine(
|
||||||
pcommand.PROJROOT, filename, line, column, selection, operation
|
pcommand.PROJROOT, filename, line, column, selection, operation
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -587,7 +587,7 @@ def check_sync_states(self: ProjectUpdater) -> None:
|
|||||||
|
|
||||||
def check_misc(self: ProjectUpdater) -> None:
|
def check_misc(self: ProjectUpdater) -> None:
|
||||||
"""Check misc project stuff."""
|
"""Check misc project stuff."""
|
||||||
from efrotools import readfile, replace_exact
|
from efrotools.util import readfile, replace_exact
|
||||||
|
|
||||||
# Make sure we're set to prod master server. (but ONLY when
|
# Make sure we're set to prod master server. (but ONLY when
|
||||||
# checking; still want to be able to run updates).
|
# checking; still want to be able to run updates).
|
||||||
|
|||||||
@ -9,7 +9,7 @@ from pathlib import Path
|
|||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from efrotools import getprojectconfig, getlocalconfig
|
from efrotools.project import getprojectconfig, getlocalconfig
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
|
|
||||||
|
|||||||
@ -57,7 +57,7 @@ class ResourcesMakefileGenerator:
|
|||||||
existing_data: str,
|
existing_data: str,
|
||||||
projname: str,
|
projname: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
|
|
||||||
self.public = getprojectconfig(Path(projroot))['public']
|
self.public = getprojectconfig(Path(projroot))['public']
|
||||||
assert isinstance(self.public, bool)
|
assert isinstance(self.public, bool)
|
||||||
|
|||||||
@ -15,7 +15,8 @@ from pathlib import Path
|
|||||||
from typing import TYPE_CHECKING, assert_never
|
from typing import TYPE_CHECKING, assert_never
|
||||||
|
|
||||||
from efrotools.code import format_python_str, format_cpp_str
|
from efrotools.code import format_python_str, format_cpp_str
|
||||||
from efrotools import getprojectconfig, replace_exact
|
from efrotools.project import getprojectconfig
|
||||||
|
from efrotools.util import replace_exact
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
from efro.util import timedelta_str
|
from efro.util import timedelta_str
|
||||||
@ -693,7 +694,7 @@ class SpinoffContext:
|
|||||||
|
|
||||||
def _generate_env_hash(self) -> None:
|
def _generate_env_hash(self) -> None:
|
||||||
# pylint: disable=cyclic-import
|
# pylint: disable=cyclic-import
|
||||||
from efrotools import get_files_hash
|
from efrotools.util import get_files_hash
|
||||||
|
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
import batools.spinoff
|
import batools.spinoff
|
||||||
|
|||||||
@ -13,7 +13,7 @@ from typing import assert_never, TYPE_CHECKING
|
|||||||
|
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
from efrotools import replace_exact
|
from efrotools.util import replace_exact
|
||||||
|
|
||||||
from batools.spinoff._context import SpinoffContext
|
from batools.spinoff._context import SpinoffContext
|
||||||
|
|
||||||
@ -65,19 +65,8 @@ def _main() -> None:
|
|||||||
_print_available_commands()
|
_print_available_commands()
|
||||||
return
|
return
|
||||||
|
|
||||||
dst_root = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..'))
|
src_root = os.environ['BA_SPINOFF_SRC_ROOT']
|
||||||
|
dst_root = os.environ.get('BA_SPINOFF_DST_ROOT')
|
||||||
# Determine our src project based on our tools/spinoff symlink.
|
|
||||||
# If its not a link it means we ARE a src project.
|
|
||||||
dst_spinoff_path = os.path.join(dst_root, 'tools', 'spinoff')
|
|
||||||
if os.path.islink(dst_spinoff_path):
|
|
||||||
src_root = os.path.abspath(
|
|
||||||
os.path.join(
|
|
||||||
os.path.dirname(os.path.realpath(dst_spinoff_path)), '..'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
src_root = None
|
|
||||||
|
|
||||||
single_run_mode: SpinoffContext.Mode | None = None
|
single_run_mode: SpinoffContext.Mode | None = None
|
||||||
|
|
||||||
@ -102,14 +91,14 @@ def _main() -> None:
|
|||||||
elif cmd is Command.BACKPORT:
|
elif cmd is Command.BACKPORT:
|
||||||
_do_backport(src_root, dst_root)
|
_do_backport(src_root, dst_root)
|
||||||
elif cmd is Command.FEATURE_SET_LIST:
|
elif cmd is Command.FEATURE_SET_LIST:
|
||||||
_do_featuresets(dst_root)
|
_do_featuresets(src_root)
|
||||||
elif cmd is Command.CREATE:
|
elif cmd is Command.CREATE:
|
||||||
_do_create(src_root, dst_root)
|
_do_create(src_root, dst_root)
|
||||||
elif cmd is Command.ADD_SUBMODULE_PARENT:
|
elif cmd is Command.ADD_SUBMODULE_PARENT:
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
|
|
||||||
public = getprojectconfig(Path(dst_root))['public']
|
public = getprojectconfig(Path(src_root))['public']
|
||||||
_do_add_submodule_parent(dst_root, is_new=False, public=public)
|
_do_add_submodule_parent(src_root, is_new=False, public=public)
|
||||||
elif cmd is Command.FEATURE_SET_COPY:
|
elif cmd is Command.FEATURE_SET_COPY:
|
||||||
_do_featureset_copy()
|
_do_featureset_copy()
|
||||||
elif cmd is Command.FEATURE_SET_DELETE:
|
elif cmd is Command.FEATURE_SET_DELETE:
|
||||||
@ -118,13 +107,13 @@ def _main() -> None:
|
|||||||
assert_never(cmd)
|
assert_never(cmd)
|
||||||
|
|
||||||
if single_run_mode is not None:
|
if single_run_mode is not None:
|
||||||
from efrotools import extract_flag
|
from efrotools.util import extract_flag
|
||||||
|
|
||||||
args = sys.argv[2:]
|
args = sys.argv[2:]
|
||||||
force = extract_flag(args, '--force')
|
force = extract_flag(args, '--force')
|
||||||
verbose = extract_flag(args, '--verbose')
|
verbose = extract_flag(args, '--verbose')
|
||||||
print_full_lists = extract_flag(args, '--full')
|
print_full_lists = extract_flag(args, '--full')
|
||||||
if src_root is None:
|
if dst_root is None:
|
||||||
if '--soft' in sys.argv:
|
if '--soft' in sys.argv:
|
||||||
return
|
return
|
||||||
raise CleanError(
|
raise CleanError(
|
||||||
@ -155,16 +144,14 @@ def _main() -> None:
|
|||||||
).run()
|
).run()
|
||||||
|
|
||||||
|
|
||||||
def _do_create(src_root: str | None, dst_root: str) -> None:
|
def _do_create(src_root: str, dst_root: str | None) -> None:
|
||||||
# pylint: disable=too-many-locals, cyclic-import
|
# pylint: disable=too-many-locals, cyclic-import
|
||||||
from efrotools import extract_arg, extract_flag
|
from efrotools.util import extract_arg, extract_flag
|
||||||
from efrotools.code import format_python_str
|
from efrotools.code import format_python_str
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
import batools.spinoff
|
import batools.spinoff
|
||||||
|
|
||||||
# Note: in our case dst_root is actually what becomes the src project
|
if dst_root is not None:
|
||||||
# should clean up these var names to make that clearer.
|
|
||||||
if src_root is not None:
|
|
||||||
raise CleanError('This only works on src projects.')
|
raise CleanError('This only works on src projects.')
|
||||||
|
|
||||||
args = sys.argv[2:]
|
args = sys.argv[2:]
|
||||||
@ -224,7 +211,7 @@ def _do_create(src_root: str | None, dst_root: str) -> None:
|
|||||||
template,
|
template,
|
||||||
'# __SRC_FEATURE_SETS__',
|
'# __SRC_FEATURE_SETS__',
|
||||||
format_python_str(
|
format_python_str(
|
||||||
projroot=dst_root, code=f'ctx.src_feature_sets = {featuresets!r}'
|
projroot=src_root, code=f'ctx.src_feature_sets = {featuresets!r}'
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -237,7 +224,7 @@ def _do_create(src_root: str | None, dst_root: str) -> None:
|
|||||||
# on git so its best to always do this.
|
# on git so its best to always do this.
|
||||||
subprocess.run(['git', 'init'], cwd=path, check=True, capture_output=True)
|
subprocess.run(['git', 'init'], cwd=path, check=True, capture_output=True)
|
||||||
|
|
||||||
public = getprojectconfig(Path(dst_root))['public']
|
public = getprojectconfig(Path(src_root))['public']
|
||||||
|
|
||||||
if submodule_parent:
|
if submodule_parent:
|
||||||
_do_add_submodule_parent(path, is_new=True, public=public)
|
_do_add_submodule_parent(path, is_new=True, public=public)
|
||||||
@ -246,7 +233,7 @@ def _do_create(src_root: str | None, dst_root: str) -> None:
|
|||||||
[
|
[
|
||||||
'ln',
|
'ln',
|
||||||
'-s',
|
'-s',
|
||||||
os.path.join(dst_root, 'tools', 'spinoff'),
|
os.path.join(src_root, 'tools', 'spinoff'),
|
||||||
os.path.join(path, 'tools'),
|
os.path.join(path, 'tools'),
|
||||||
],
|
],
|
||||||
check=True,
|
check=True,
|
||||||
@ -330,7 +317,7 @@ def _do_featureset_delete() -> None:
|
|||||||
|
|
||||||
def _do_featureset_copy() -> None:
|
def _do_featureset_copy() -> None:
|
||||||
# pylint: disable=too-many-locals
|
# pylint: disable=too-many-locals
|
||||||
from efrotools import extract_flag
|
from efrotools.util import extract_flag
|
||||||
|
|
||||||
from batools.featureset import FeatureSet
|
from batools.featureset import FeatureSet
|
||||||
|
|
||||||
@ -514,9 +501,10 @@ def _do_featureset_copy_dir(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _do_override(src_root: str | None, dst_root: str) -> None:
|
def _do_override(src_root: str, dst_root: str | None) -> None:
|
||||||
if src_root is None:
|
if dst_root is None:
|
||||||
raise CleanError('This only works on dst projects.')
|
raise CleanError('This only works on dst projects.')
|
||||||
|
|
||||||
override_paths = [os.path.abspath(p) for p in sys.argv[2:]]
|
override_paths = [os.path.abspath(p) for p in sys.argv[2:]]
|
||||||
if not override_paths:
|
if not override_paths:
|
||||||
raise RuntimeError('Expected at least one path arg.')
|
raise RuntimeError('Expected at least one path arg.')
|
||||||
@ -549,8 +537,8 @@ def _do_override(src_root: str | None, dst_root: str) -> None:
|
|||||||
SpinoffContext(src_root, dst_root, SpinoffContext.Mode.UPDATE).run()
|
SpinoffContext(src_root, dst_root, SpinoffContext.Mode.UPDATE).run()
|
||||||
|
|
||||||
|
|
||||||
def _do_backport(src_root: str | None, dst_root: str) -> None:
|
def _do_backport(src_root: str, dst_root: str | None) -> None:
|
||||||
if src_root is None:
|
if dst_root is None:
|
||||||
raise CleanError('This only works on dst projects.')
|
raise CleanError('This only works on dst projects.')
|
||||||
args = sys.argv[2:]
|
args = sys.argv[2:]
|
||||||
auto = '--auto' in args
|
auto = '--auto' in args
|
||||||
|
|||||||
@ -20,7 +20,8 @@ def spinoff_test(args: list[str]) -> None:
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from batools.featureset import FeatureSet
|
from batools.featureset import FeatureSet
|
||||||
from efrotools import extract_flag, getprojectconfig
|
from efrotools.util import extract_flag
|
||||||
|
from efrotools.project import getprojectconfig
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
|
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from functools import partial
|
|||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
from efrotools import extract_arg, extract_flag
|
from efrotools.util import extract_arg, extract_flag
|
||||||
from efrotools.pyver import PYVER
|
from efrotools.pyver import PYVER
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@ -750,7 +750,7 @@ def _stage_server_file(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Stage files for the server environment with some filtering."""
|
"""Stage files for the server environment with some filtering."""
|
||||||
import batools.build
|
import batools.build
|
||||||
from efrotools import replace_exact
|
from efrotools.util import replace_exact
|
||||||
|
|
||||||
if mode not in ('debug', 'release'):
|
if mode not in ('debug', 'release'):
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
|
|||||||
@ -14,7 +14,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
def generate_top_level_makefile(projroot: str, existing_data: str) -> str:
|
def generate_top_level_makefile(projroot: str, existing_data: str) -> str:
|
||||||
"""Main script entry point."""
|
"""Main script entry point."""
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
public = getprojectconfig(Path(projroot))['public']
|
public = getprojectconfig(Path(projroot))['public']
|
||||||
|
|||||||
@ -193,6 +193,9 @@ class _Inputter:
|
|||||||
if issubclass(origin, datetime.datetime):
|
if issubclass(origin, datetime.datetime):
|
||||||
return self._datetime_from_input(cls, fieldpath, value, ioattrs)
|
return self._datetime_from_input(cls, fieldpath, value, ioattrs)
|
||||||
|
|
||||||
|
if issubclass(origin, datetime.timedelta):
|
||||||
|
return self._timedelta_from_input(cls, fieldpath, value, ioattrs)
|
||||||
|
|
||||||
if origin is bytes:
|
if origin is bytes:
|
||||||
return self._bytes_from_input(origin, fieldpath, value)
|
return self._bytes_from_input(origin, fieldpath, value)
|
||||||
|
|
||||||
@ -634,3 +637,23 @@ class _Inputter:
|
|||||||
if ioattrs is not None:
|
if ioattrs is not None:
|
||||||
ioattrs.validate_datetime(out, fieldpath)
|
ioattrs.validate_datetime(out, fieldpath)
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
def _timedelta_from_input(
|
||||||
|
self, cls: type, fieldpath: str, value: Any, ioattrs: IOAttrs | None
|
||||||
|
) -> Any:
|
||||||
|
del ioattrs # Unused.
|
||||||
|
# We expect a list of 3 ints.
|
||||||
|
if type(value) is not list:
|
||||||
|
raise TypeError(
|
||||||
|
f'Invalid input value for "{fieldpath}" on "{cls.__name__}";'
|
||||||
|
f' expected a list, got a {type(value).__name__}'
|
||||||
|
)
|
||||||
|
if len(value) != 3 or not all(isinstance(x, int) for x in value):
|
||||||
|
raise ValueError(
|
||||||
|
f'Invalid input value for "{fieldpath}" on "{cls.__name__}";'
|
||||||
|
f' expected a list of 3 ints, got {[type(v) for v in value]}.'
|
||||||
|
)
|
||||||
|
out = datetime.timedelta(
|
||||||
|
days=value[0], seconds=value[1], microseconds=value[2]
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|||||||
@ -454,6 +454,17 @@ class _Outputter:
|
|||||||
if self._create
|
if self._create
|
||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
|
if issubclass(origin, datetime.timedelta):
|
||||||
|
if not isinstance(value, origin):
|
||||||
|
raise TypeError(
|
||||||
|
f'Expected a {origin} for {fieldpath};'
|
||||||
|
f' found a {type(value)}.'
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
[value.days, value.seconds, value.microseconds]
|
||||||
|
if self._create
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
if origin is bytes:
|
if origin is bytes:
|
||||||
return self._process_bytes(cls, fieldpath, value)
|
return self._process_bytes(cls, fieldpath, value)
|
||||||
|
|||||||
@ -420,6 +420,10 @@ class PrepSession:
|
|||||||
if issubclass(origin, datetime.datetime):
|
if issubclass(origin, datetime.datetime):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# We support datetime.timedelta.
|
||||||
|
if issubclass(origin, datetime.timedelta):
|
||||||
|
return
|
||||||
|
|
||||||
if dataclasses.is_dataclass(origin):
|
if dataclasses.is_dataclass(origin):
|
||||||
self.prep_dataclass(origin, recursion_level=recursion_level + 1)
|
self.prep_dataclass(origin, recursion_level=recursion_level + 1)
|
||||||
return
|
return
|
||||||
|
|||||||
@ -5,336 +5,3 @@
|
|||||||
This stuff can be a bit more sloppy/loosey-goosey since it is not used in
|
This stuff can be a bit more sloppy/loosey-goosey since it is not used in
|
||||||
live client or server code.
|
live client or server code.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# FIXME: should migrate everything here into submodules since this adds
|
|
||||||
# overhead to anything importing from any efrotools submodule.
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import TYPE_CHECKING, overload
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from typing import Sequence, Any, Literal
|
|
||||||
|
|
||||||
# Cache these since we may repeatedly fetch these in batch mode.
|
|
||||||
_g_project_configs: dict[str, dict[str, Any]] = {}
|
|
||||||
_g_local_configs: dict[str, dict[str, Any]] = {}
|
|
||||||
|
|
||||||
|
|
||||||
def explicit_bool(value: bool) -> bool:
|
|
||||||
"""Simply return input value; can avoid unreachable-code type warnings."""
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def getlocalconfig(projroot: Path | str) -> dict[str, Any]:
|
|
||||||
"""Return a project's localconfig contents (or default if missing)."""
|
|
||||||
projrootstr = str(projroot)
|
|
||||||
if projrootstr not in _g_local_configs:
|
|
||||||
localconfig: dict[str, Any]
|
|
||||||
|
|
||||||
# Allow overriding path via env var.
|
|
||||||
path = os.environ.get('EFRO_LOCALCONFIG_PATH')
|
|
||||||
if path is None:
|
|
||||||
path = 'config/localconfig.json'
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(Path(projroot, path), encoding='utf-8') as infile:
|
|
||||||
localconfig = json.loads(infile.read())
|
|
||||||
except FileNotFoundError:
|
|
||||||
localconfig = {}
|
|
||||||
_g_local_configs[projrootstr] = localconfig
|
|
||||||
|
|
||||||
return _g_local_configs[projrootstr]
|
|
||||||
|
|
||||||
|
|
||||||
def getprojectconfig(projroot: Path | str) -> dict[str, Any]:
|
|
||||||
"""Return a project's projectconfig contents (or default if missing)."""
|
|
||||||
projrootstr = str(projroot)
|
|
||||||
if projrootstr not in _g_project_configs:
|
|
||||||
config: dict[str, Any]
|
|
||||||
try:
|
|
||||||
with open(
|
|
||||||
Path(projroot, 'config/projectconfig.json'), encoding='utf-8'
|
|
||||||
) as infile:
|
|
||||||
config = json.loads(infile.read())
|
|
||||||
except FileNotFoundError:
|
|
||||||
config = {}
|
|
||||||
_g_project_configs[projrootstr] = config
|
|
||||||
return _g_project_configs[projrootstr]
|
|
||||||
|
|
||||||
|
|
||||||
def setprojectconfig(projroot: Path | str, config: dict[str, Any]) -> None:
|
|
||||||
"""Set the project config contents."""
|
|
||||||
projrootstr = str(projroot)
|
|
||||||
_g_project_configs[projrootstr] = config
|
|
||||||
os.makedirs(Path(projroot, 'config'), exist_ok=True)
|
|
||||||
with Path(projroot, 'config/projectconfig.json').open(
|
|
||||||
'w', encoding='utf-8'
|
|
||||||
) as outfile:
|
|
||||||
outfile.write(json.dumps(config, indent=2))
|
|
||||||
|
|
||||||
|
|
||||||
def extract_flag(args: list[str], name: str) -> bool:
|
|
||||||
"""Given a list of args and a flag name, returns whether it is present.
|
|
||||||
|
|
||||||
The arg flag, if present, is removed from the arg list.
|
|
||||||
"""
|
|
||||||
from efro.error import CleanError
|
|
||||||
|
|
||||||
count = args.count(name)
|
|
||||||
if count > 1:
|
|
||||||
raise CleanError(f'Flag {name} passed multiple times.')
|
|
||||||
if not count:
|
|
||||||
return False
|
|
||||||
args.remove(name)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def extract_arg(
|
|
||||||
args: list[str], name: str, required: Literal[False] = False
|
|
||||||
) -> str | None: ...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
def extract_arg(args: list[str], name: str, required: Literal[True]) -> str: ...
|
|
||||||
|
|
||||||
|
|
||||||
def extract_arg(
|
|
||||||
args: list[str], name: str, required: bool = False
|
|
||||||
) -> str | None:
|
|
||||||
"""Given a list of args and an arg name, returns a value.
|
|
||||||
|
|
||||||
The arg flag and value are removed from the arg list.
|
|
||||||
raises CleanErrors on any problems.
|
|
||||||
"""
|
|
||||||
from efro.error import CleanError
|
|
||||||
|
|
||||||
count = args.count(name)
|
|
||||||
if not count:
|
|
||||||
if required:
|
|
||||||
raise CleanError(f'Required argument {name} not passed.')
|
|
||||||
return None
|
|
||||||
|
|
||||||
if count > 1:
|
|
||||||
raise CleanError(f'Arg {name} passed multiple times.')
|
|
||||||
|
|
||||||
argindex = args.index(name)
|
|
||||||
if argindex + 1 >= len(args):
|
|
||||||
raise CleanError(f'No value passed after {name} arg.')
|
|
||||||
|
|
||||||
val = args[argindex + 1]
|
|
||||||
del args[argindex : argindex + 2]
|
|
||||||
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
def replace_section(
|
|
||||||
text: str,
|
|
||||||
begin_marker: str,
|
|
||||||
end_marker: str,
|
|
||||||
replace_text: str = '',
|
|
||||||
keep_markers: bool = False,
|
|
||||||
error_if_missing: bool = True,
|
|
||||||
) -> str:
|
|
||||||
"""Replace all text between two marker strings (including the markers)."""
|
|
||||||
if begin_marker not in text:
|
|
||||||
if error_if_missing:
|
|
||||||
raise RuntimeError(f"Marker not found in text: '{begin_marker}'.")
|
|
||||||
return text
|
|
||||||
splits = text.split(begin_marker)
|
|
||||||
if len(splits) != 2:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Expected one marker '{begin_marker}'"
|
|
||||||
f'; found {text.count(begin_marker)}.'
|
|
||||||
)
|
|
||||||
before_begin, after_begin = splits
|
|
||||||
splits = after_begin.split(end_marker)
|
|
||||||
if len(splits) != 2:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Expected one marker '{end_marker}'"
|
|
||||||
f'; found {text.count(end_marker)}.'
|
|
||||||
)
|
|
||||||
_before_end, after_end = splits
|
|
||||||
if keep_markers:
|
|
||||||
replace_text = f'{begin_marker}{replace_text}{end_marker}'
|
|
||||||
return f'{before_begin}{replace_text}{after_end}'
|
|
||||||
|
|
||||||
|
|
||||||
def readfile(path: str | Path) -> str:
|
|
||||||
"""Read a utf-8 text file into a string."""
|
|
||||||
with open(path, encoding='utf-8') as infile:
|
|
||||||
return infile.read()
|
|
||||||
|
|
||||||
|
|
||||||
def writefile(path: str | Path, txt: str) -> None:
|
|
||||||
"""Write a string to a utf-8 text file."""
|
|
||||||
with open(path, 'w', encoding='utf-8') as outfile:
|
|
||||||
outfile.write(txt)
|
|
||||||
|
|
||||||
|
|
||||||
def replace_exact(
|
|
||||||
opstr: str, old: str, new: str, count: int = 1, label: str | None = None
|
|
||||||
) -> str:
|
|
||||||
"""Replace text ensuring that exactly x occurrences are replaced.
|
|
||||||
|
|
||||||
Useful when filtering data in some predefined way to ensure the original
|
|
||||||
has not changed.
|
|
||||||
"""
|
|
||||||
found = opstr.count(old)
|
|
||||||
label_str = f' in {label}' if label is not None else ''
|
|
||||||
if found != count:
|
|
||||||
raise RuntimeError(
|
|
||||||
f'Expected {count} string occurrence(s){label_str};'
|
|
||||||
f' found {found}. String: {repr(old)}'
|
|
||||||
)
|
|
||||||
return opstr.replace(old, new)
|
|
||||||
|
|
||||||
|
|
||||||
def get_files_hash(
|
|
||||||
filenames: Sequence[str | Path],
|
|
||||||
extrahash: str = '',
|
|
||||||
int_only: bool = False,
|
|
||||||
hashtype: Literal['md5', 'sha256'] = 'md5',
|
|
||||||
) -> str:
|
|
||||||
"""Return a hash for the given files."""
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
if not isinstance(filenames, list):
|
|
||||||
raise RuntimeError(f'Expected a list; got a {type(filenames)}.')
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
# Help Mypy infer the right type for this.
|
|
||||||
hashobj = hashlib.md5()
|
|
||||||
else:
|
|
||||||
hashobj = getattr(hashlib, hashtype)()
|
|
||||||
for fname in filenames:
|
|
||||||
with open(fname, 'rb') as infile:
|
|
||||||
while True:
|
|
||||||
data = infile.read(2**20)
|
|
||||||
if not data:
|
|
||||||
break
|
|
||||||
hashobj.update(data)
|
|
||||||
hashobj.update(extrahash.encode())
|
|
||||||
|
|
||||||
if int_only:
|
|
||||||
return str(int.from_bytes(hashobj.digest(), byteorder='big'))
|
|
||||||
|
|
||||||
return hashobj.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
def get_string_hash(
|
|
||||||
value: str,
|
|
||||||
int_only: bool = False,
|
|
||||||
hashtype: Literal['md5', 'sha256'] = 'md5',
|
|
||||||
) -> str:
|
|
||||||
"""Return a hash for the given files."""
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
if not isinstance(value, str):
|
|
||||||
raise TypeError('Expected a str.')
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
# Help Mypy infer the right type for this.
|
|
||||||
hashobj = hashlib.md5()
|
|
||||||
else:
|
|
||||||
hashobj = getattr(hashlib, hashtype)()
|
|
||||||
hashobj.update(value.encode())
|
|
||||||
|
|
||||||
if int_only:
|
|
||||||
return str(int.from_bytes(hashobj.digest(), byteorder='big'))
|
|
||||||
|
|
||||||
return hashobj.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
def _py_symbol_at_column(line: str, col: int) -> str:
|
|
||||||
start = col
|
|
||||||
while start > 0 and line[start - 1] != ' ':
|
|
||||||
start -= 1
|
|
||||||
end = col
|
|
||||||
while end < len(line) and line[end] != ' ':
|
|
||||||
end += 1
|
|
||||||
return line[start:end]
|
|
||||||
|
|
||||||
|
|
||||||
def py_examine(
|
|
||||||
projroot: Path,
|
|
||||||
filename: Path,
|
|
||||||
line: int,
|
|
||||||
column: int,
|
|
||||||
selection: str | None,
|
|
||||||
operation: str,
|
|
||||||
) -> None:
|
|
||||||
"""Given file position info, performs some code inspection."""
|
|
||||||
# pylint: disable=too-many-locals
|
|
||||||
# pylint: disable=cyclic-import
|
|
||||||
import astroid
|
|
||||||
import re
|
|
||||||
from efrotools import code
|
|
||||||
|
|
||||||
# Pull in our pylint plugin which really just adds astroid filters.
|
|
||||||
# That way our introspection here will see the same thing as pylint's does.
|
|
||||||
with open(filename, encoding='utf-8') as infile:
|
|
||||||
fcontents = infile.read()
|
|
||||||
if '#@' in fcontents:
|
|
||||||
raise RuntimeError('#@ marker found in file; this breaks examinations.')
|
|
||||||
flines = fcontents.splitlines()
|
|
||||||
|
|
||||||
if operation == 'pylint_infer':
|
|
||||||
# See what asteroid can infer about the target symbol.
|
|
||||||
symbol = (
|
|
||||||
selection
|
|
||||||
if selection is not None
|
|
||||||
else _py_symbol_at_column(flines[line - 1], column)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Insert a line after the provided one which is just the symbol so
|
|
||||||
# that we can ask for its value alone.
|
|
||||||
match = re.match(r'\s*', flines[line - 1])
|
|
||||||
whitespace = match.group() if match is not None else ''
|
|
||||||
sline = whitespace + symbol + ' #@'
|
|
||||||
flines = flines[:line] + [sline] + flines[line:]
|
|
||||||
node = astroid.extract_node('\n'.join(flines))
|
|
||||||
inferred = list(node.infer())
|
|
||||||
print(symbol + ':', ', '.join([str(i) for i in inferred]))
|
|
||||||
elif operation in ('mypy_infer', 'mypy_locals'):
|
|
||||||
# Ask mypy for the type of the target symbol.
|
|
||||||
symbol = (
|
|
||||||
selection
|
|
||||||
if selection is not None
|
|
||||||
else _py_symbol_at_column(flines[line - 1], column)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Insert a line after the provided one which is just the symbol so
|
|
||||||
# that we can ask for its value alone.
|
|
||||||
match = re.match(r'\s*', flines[line - 1])
|
|
||||||
whitespace = match.group() if match is not None else ''
|
|
||||||
if operation == 'mypy_infer':
|
|
||||||
sline = whitespace + 'reveal_type(' + symbol + ')'
|
|
||||||
else:
|
|
||||||
sline = whitespace + 'reveal_locals()'
|
|
||||||
flines = flines[:line] + [sline] + flines[line:]
|
|
||||||
|
|
||||||
# Write a temp file and run the check on it.
|
|
||||||
# Let's use ' flycheck_*' for the name since pipeline scripts
|
|
||||||
# are already set to ignore those files.
|
|
||||||
tmppath = Path(filename.parent, 'flycheck_mp_' + filename.name)
|
|
||||||
with tmppath.open('w', encoding='utf-8') as outfile:
|
|
||||||
outfile.write('\n'.join(flines))
|
|
||||||
try:
|
|
||||||
code.mypy_files(projroot, [str(tmppath)], check=False)
|
|
||||||
except Exception as exc:
|
|
||||||
print('error running mypy:', exc)
|
|
||||||
tmppath.unlink()
|
|
||||||
elif operation == 'pylint_node':
|
|
||||||
flines[line - 1] += ' #@'
|
|
||||||
node = astroid.extract_node('\n'.join(flines))
|
|
||||||
print(node)
|
|
||||||
elif operation == 'pylint_tree':
|
|
||||||
flines[line - 1] += ' #@'
|
|
||||||
node = astroid.extract_node('\n'.join(flines))
|
|
||||||
print(node.repr_tree())
|
|
||||||
else:
|
|
||||||
print('unknown operation: ' + operation)
|
|
||||||
|
|||||||
@ -77,7 +77,7 @@ def format_project_cpp_files(projroot: Path, full: bool) -> None:
|
|||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
|
|
||||||
from efrotools import get_files_hash
|
from efrotools.util import get_files_hash
|
||||||
|
|
||||||
if os.path.abspath(projroot) != os.getcwd():
|
if os.path.abspath(projroot) != os.getcwd():
|
||||||
raise RuntimeError('We expect to be running from project root.')
|
raise RuntimeError('We expect to be running from project root.')
|
||||||
@ -137,7 +137,7 @@ def check_cpplint(projroot: Path, full: bool) -> None:
|
|||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
|
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
|
|
||||||
os.chdir(projroot)
|
os.chdir(projroot)
|
||||||
@ -221,7 +221,7 @@ def get_code_filenames(projroot: Path, include_generated: bool) -> list[str]:
|
|||||||
could cause dirty generated files to not get updated properly when
|
could cause dirty generated files to not get updated properly when
|
||||||
their sources change).
|
their sources change).
|
||||||
"""
|
"""
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
|
|
||||||
exts = ('.h', '.c', '.cc', '.cpp', '.cxx', '.m', '.mm')
|
exts = ('.h', '.c', '.cc', '.cpp', '.cxx', '.m', '.mm')
|
||||||
places = getprojectconfig(projroot).get('code_source_dirs', None)
|
places = getprojectconfig(projroot).get('code_source_dirs', None)
|
||||||
@ -272,7 +272,7 @@ def black_base_args(projroot: Path) -> list[str]:
|
|||||||
|
|
||||||
def format_project_python_files(projroot: Path, full: bool) -> None:
|
def format_project_python_files(projroot: Path, full: bool) -> None:
|
||||||
"""Runs formatting on all of our Python code."""
|
"""Runs formatting on all of our Python code."""
|
||||||
from efrotools import get_string_hash
|
from efrotools.util import get_string_hash
|
||||||
|
|
||||||
os.chdir(projroot)
|
os.chdir(projroot)
|
||||||
cachepath = Path(projroot, '.cache/format_project_python_files')
|
cachepath = Path(projroot, '.cache/format_project_python_files')
|
||||||
@ -344,7 +344,7 @@ def _should_include_script(fnamefull: str) -> bool:
|
|||||||
|
|
||||||
def get_script_filenames(projroot: Path) -> list[str]:
|
def get_script_filenames(projroot: Path) -> list[str]:
|
||||||
"""Return the Python filenames to lint-check or auto-format."""
|
"""Return the Python filenames to lint-check or auto-format."""
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
|
|
||||||
proot = f'{projroot}/'
|
proot = f'{projroot}/'
|
||||||
|
|
||||||
@ -392,7 +392,7 @@ def runpylint(projroot: Path, filenames: list[str]) -> None:
|
|||||||
|
|
||||||
def pylint(projroot: Path, full: bool, fast: bool) -> None:
|
def pylint(projroot: Path, full: bool, fast: bool) -> None:
|
||||||
"""Run Pylint on all scripts in our project (with smart dep tracking)."""
|
"""Run Pylint on all scripts in our project (with smart dep tracking)."""
|
||||||
from efrotools import get_files_hash
|
from efrotools.util import get_files_hash
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
|
|
||||||
pylintrc = Path(projroot, '.pylintrc')
|
pylintrc = Path(projroot, '.pylintrc')
|
||||||
@ -568,7 +568,7 @@ def _apply_pylint_run_to_cache(
|
|||||||
|
|
||||||
from astroid import modutils
|
from astroid import modutils
|
||||||
|
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
|
|
||||||
# First off, build a map of dirtyfiles to module names
|
# First off, build a map of dirtyfiles to module names
|
||||||
# (and the corresponding reverse map).
|
# (and the corresponding reverse map).
|
||||||
|
|||||||
@ -74,10 +74,9 @@ def get_local_cache_dir() -> str:
|
|||||||
|
|
||||||
def get_repository_base_url() -> str:
|
def get_repository_base_url() -> str:
|
||||||
"""Return the base repository url (assumes cwd is project root)."""
|
"""Return the base repository url (assumes cwd is project root)."""
|
||||||
# from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
import efrotools
|
|
||||||
|
|
||||||
pconfig = efrotools.getprojectconfig('.')
|
pconfig = getprojectconfig('.')
|
||||||
name = 'efrocache_repository_url'
|
name = 'efrocache_repository_url'
|
||||||
val = pconfig.get(name)
|
val = pconfig.get(name)
|
||||||
if not isinstance(val, str):
|
if not isinstance(val, str):
|
||||||
|
|||||||
103
tools/efrotools/emacs.py
Normal file
103
tools/efrotools/emacs.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
# Released under the MIT License. See LICENSE for details.
|
||||||
|
#
|
||||||
|
"""Stuff intended to be used from emacs"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _py_symbol_at_column(line: str, col: int) -> str:
|
||||||
|
start = col
|
||||||
|
while start > 0 and line[start - 1] != ' ':
|
||||||
|
start -= 1
|
||||||
|
end = col
|
||||||
|
while end < len(line) and line[end] != ' ':
|
||||||
|
end += 1
|
||||||
|
return line[start:end]
|
||||||
|
|
||||||
|
|
||||||
|
def py_examine(
|
||||||
|
projroot: Path,
|
||||||
|
filename: Path,
|
||||||
|
line: int,
|
||||||
|
column: int,
|
||||||
|
selection: str | None,
|
||||||
|
operation: str,
|
||||||
|
) -> None:
|
||||||
|
"""Given file position info, performs some code inspection."""
|
||||||
|
# pylint: disable=too-many-locals
|
||||||
|
# pylint: disable=cyclic-import
|
||||||
|
import astroid
|
||||||
|
import re
|
||||||
|
from efrotools import code
|
||||||
|
|
||||||
|
# Pull in our pylint plugin which really just adds astroid filters.
|
||||||
|
# That way our introspection here will see the same thing as pylint's does.
|
||||||
|
with open(filename, encoding='utf-8') as infile:
|
||||||
|
fcontents = infile.read()
|
||||||
|
if '#@' in fcontents:
|
||||||
|
raise RuntimeError('#@ marker found in file; this breaks examinations.')
|
||||||
|
flines = fcontents.splitlines()
|
||||||
|
|
||||||
|
if operation == 'pylint_infer':
|
||||||
|
# See what asteroid can infer about the target symbol.
|
||||||
|
symbol = (
|
||||||
|
selection
|
||||||
|
if selection is not None
|
||||||
|
else _py_symbol_at_column(flines[line - 1], column)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Insert a line after the provided one which is just the symbol so
|
||||||
|
# that we can ask for its value alone.
|
||||||
|
match = re.match(r'\s*', flines[line - 1])
|
||||||
|
whitespace = match.group() if match is not None else ''
|
||||||
|
sline = whitespace + symbol + ' #@'
|
||||||
|
flines = flines[:line] + [sline] + flines[line:]
|
||||||
|
node = astroid.extract_node('\n'.join(flines))
|
||||||
|
inferred = list(node.infer())
|
||||||
|
print(symbol + ':', ', '.join([str(i) for i in inferred]))
|
||||||
|
elif operation in ('mypy_infer', 'mypy_locals'):
|
||||||
|
# Ask mypy for the type of the target symbol.
|
||||||
|
symbol = (
|
||||||
|
selection
|
||||||
|
if selection is not None
|
||||||
|
else _py_symbol_at_column(flines[line - 1], column)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Insert a line after the provided one which is just the symbol so
|
||||||
|
# that we can ask for its value alone.
|
||||||
|
match = re.match(r'\s*', flines[line - 1])
|
||||||
|
whitespace = match.group() if match is not None else ''
|
||||||
|
if operation == 'mypy_infer':
|
||||||
|
sline = whitespace + 'reveal_type(' + symbol + ')'
|
||||||
|
else:
|
||||||
|
sline = whitespace + 'reveal_locals()'
|
||||||
|
flines = flines[:line] + [sline] + flines[line:]
|
||||||
|
|
||||||
|
# Write a temp file and run the check on it.
|
||||||
|
# Let's use ' flycheck_*' for the name since pipeline scripts
|
||||||
|
# are already set to ignore those files.
|
||||||
|
tmppath = Path(filename.parent, 'flycheck_mp_' + filename.name)
|
||||||
|
with tmppath.open('w', encoding='utf-8') as outfile:
|
||||||
|
outfile.write('\n'.join(flines))
|
||||||
|
try:
|
||||||
|
code.mypy_files(projroot, [str(tmppath)], check=False)
|
||||||
|
except Exception as exc:
|
||||||
|
print('error running mypy:', exc)
|
||||||
|
tmppath.unlink()
|
||||||
|
elif operation == 'pylint_node':
|
||||||
|
flines[line - 1] += ' #@'
|
||||||
|
node = astroid.extract_node('\n'.join(flines))
|
||||||
|
print(node)
|
||||||
|
elif operation == 'pylint_tree':
|
||||||
|
flines[line - 1] += ' #@'
|
||||||
|
node = astroid.extract_node('\n'.join(flines))
|
||||||
|
print(node.repr_tree())
|
||||||
|
else:
|
||||||
|
print('unknown operation: ' + operation)
|
||||||
@ -12,7 +12,7 @@ from typing import TYPE_CHECKING
|
|||||||
# pylint: disable=useless-suppression
|
# pylint: disable=useless-suppression
|
||||||
# pylint: disable=wrong-import-order
|
# pylint: disable=wrong-import-order
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
from efrotools import get_files_hash
|
from efrotools.util import get_files_hash
|
||||||
|
|
||||||
# pylint: enable=wrong-import-order
|
# pylint: enable=wrong-import-order
|
||||||
# pylint: enable=useless-suppression
|
# pylint: enable=useless-suppression
|
||||||
|
|||||||
@ -15,8 +15,8 @@ if __name__ == '__main__':
|
|||||||
if len(sys.argv) not in (3, 4):
|
if len(sys.argv) not in (3, 4):
|
||||||
raise RuntimeError('Expected 2 args')
|
raise RuntimeError('Expected 2 args')
|
||||||
|
|
||||||
from efrotools import getprojectconfig
|
|
||||||
from efrotools.project import (
|
from efrotools.project import (
|
||||||
|
getprojectconfig,
|
||||||
get_public_legal_notice,
|
get_public_legal_notice,
|
||||||
get_non_public_legal_notice,
|
get_non_public_legal_notice,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from efrotools import getprojectconfig, getlocalconfig
|
from efrotools.project import getprojectconfig, getlocalconfig
|
||||||
|
|
||||||
MODES = {
|
MODES = {
|
||||||
'debug': {'configuration': 'Debug'},
|
'debug': {'configuration': 'Debug'},
|
||||||
|
|||||||
@ -16,7 +16,7 @@ from typing import TYPE_CHECKING
|
|||||||
# pylint: disable=wrong-import-order
|
# pylint: disable=wrong-import-order
|
||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
from efrotools.buildlock import BuildLock
|
from efrotools.buildlock import BuildLock
|
||||||
from efrotools import get_string_hash
|
from efrotools.util import get_string_hash
|
||||||
|
|
||||||
# pylint: enable=wrong-import-order
|
# pylint: enable=wrong-import-order
|
||||||
# pylint: enable=useless-suppression
|
# pylint: enable=useless-suppression
|
||||||
|
|||||||
@ -33,7 +33,7 @@ def build_openal(arch: str, mode: str) -> None:
|
|||||||
"""Do the thing."""
|
"""Do the thing."""
|
||||||
# pylint: disable=too-many-statements
|
# pylint: disable=too-many-statements
|
||||||
# pylint: disable=too-many-locals
|
# pylint: disable=too-many-locals
|
||||||
from efrotools import replace_exact
|
from efrotools.util import replace_exact
|
||||||
|
|
||||||
if arch not in ARCHS:
|
if arch not in ARCHS:
|
||||||
raise CleanError(f"Invalid arch '{arch}'.")
|
raise CleanError(f"Invalid arch '{arch}'.")
|
||||||
|
|||||||
@ -477,7 +477,7 @@ def sync_all() -> None:
|
|||||||
|
|
||||||
def sync() -> None:
|
def sync() -> None:
|
||||||
"""Runs standard syncs between this project and others."""
|
"""Runs standard syncs between this project and others."""
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
from efrotools.sync import Mode, SyncItem, run_standard_syncs
|
from efrotools.sync import Mode, SyncItem, run_standard_syncs
|
||||||
|
|
||||||
pcommand.disallow_in_batch()
|
pcommand.disallow_in_batch()
|
||||||
@ -625,7 +625,7 @@ def pytest() -> None:
|
|||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
|
|
||||||
pcommand.disallow_in_batch()
|
pcommand.disallow_in_batch()
|
||||||
|
|||||||
@ -122,7 +122,7 @@ def batchserver() -> None:
|
|||||||
"""Run a server for handling pcommands."""
|
"""Run a server for handling pcommands."""
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
|
|
||||||
from efrotools import extract_arg
|
from efrotools.util import extract_arg
|
||||||
import efrotools.pcommandbatch as pcb
|
import efrotools.pcommandbatch as pcb
|
||||||
|
|
||||||
pcommand.disallow_in_batch()
|
pcommand.disallow_in_batch()
|
||||||
|
|||||||
@ -4,10 +4,17 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Literal
|
from typing import Literal, Any
|
||||||
|
|
||||||
|
# Cache these since we may repeatedly fetch these in batch mode.
|
||||||
|
_g_project_configs: dict[str, dict[str, Any]] = {}
|
||||||
|
_g_local_configs: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
|
|
||||||
def get_public_legal_notice(
|
def get_public_legal_notice(
|
||||||
@ -40,3 +47,51 @@ def get_non_public_legal_notice_prev() -> str:
|
|||||||
"""Allows us to auto-update."""
|
"""Allows us to auto-update."""
|
||||||
# TODO: Move this to project config or somewhere not hard-coded.
|
# TODO: Move this to project config or somewhere not hard-coded.
|
||||||
return 'Copyright (c) 2011-2023 Eric Froemling'
|
return 'Copyright (c) 2011-2023 Eric Froemling'
|
||||||
|
|
||||||
|
|
||||||
|
def getlocalconfig(projroot: Path | str) -> dict[str, Any]:
|
||||||
|
"""Return a project's localconfig contents (or default if missing)."""
|
||||||
|
projrootstr = str(projroot)
|
||||||
|
if projrootstr not in _g_local_configs:
|
||||||
|
localconfig: dict[str, Any]
|
||||||
|
|
||||||
|
# Allow overriding path via env var.
|
||||||
|
path = os.environ.get('EFRO_LOCALCONFIG_PATH')
|
||||||
|
if path is None:
|
||||||
|
path = 'config/localconfig.json'
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(Path(projroot, path), encoding='utf-8') as infile:
|
||||||
|
localconfig = json.loads(infile.read())
|
||||||
|
except FileNotFoundError:
|
||||||
|
localconfig = {}
|
||||||
|
_g_local_configs[projrootstr] = localconfig
|
||||||
|
|
||||||
|
return _g_local_configs[projrootstr]
|
||||||
|
|
||||||
|
|
||||||
|
def getprojectconfig(projroot: Path | str) -> dict[str, Any]:
|
||||||
|
"""Return a project's projectconfig contents (or default if missing)."""
|
||||||
|
projrootstr = str(projroot)
|
||||||
|
if projrootstr not in _g_project_configs:
|
||||||
|
config: dict[str, Any]
|
||||||
|
try:
|
||||||
|
with open(
|
||||||
|
Path(projroot, 'config/projectconfig.json'), encoding='utf-8'
|
||||||
|
) as infile:
|
||||||
|
config = json.loads(infile.read())
|
||||||
|
except FileNotFoundError:
|
||||||
|
config = {}
|
||||||
|
_g_project_configs[projrootstr] = config
|
||||||
|
return _g_project_configs[projrootstr]
|
||||||
|
|
||||||
|
|
||||||
|
def setprojectconfig(projroot: Path | str, config: dict[str, Any]) -> None:
|
||||||
|
"""Set the project config contents."""
|
||||||
|
projrootstr = str(projroot)
|
||||||
|
_g_project_configs[projrootstr] = config
|
||||||
|
os.makedirs(Path(projroot, 'config'), exist_ok=True)
|
||||||
|
with Path(projroot, 'config/projectconfig.json').open(
|
||||||
|
'w', encoding='utf-8'
|
||||||
|
) as outfile:
|
||||||
|
outfile.write(json.dumps(config, indent=2))
|
||||||
|
|||||||
@ -10,7 +10,7 @@ import subprocess
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from efrotools import readfile, writefile, replace_exact
|
from efrotools.util import readfile, writefile, replace_exact
|
||||||
|
|
||||||
# Python version we build here (not necessarily same as we use in repo).
|
# Python version we build here (not necessarily same as we use in repo).
|
||||||
PY_VER_ANDROID = '3.12'
|
PY_VER_ANDROID = '3.12'
|
||||||
|
|||||||
@ -71,7 +71,7 @@ def run_standard_syncs(
|
|||||||
a src subpath, and optionally a dst subpath (src will be used by default).
|
a src subpath, and optionally a dst subpath (src will be used by default).
|
||||||
"""
|
"""
|
||||||
# pylint: disable=too-many-locals
|
# pylint: disable=too-many-locals
|
||||||
from efrotools import getlocalconfig
|
from efrotools.project import getlocalconfig
|
||||||
|
|
||||||
localconfig = getlocalconfig(projectroot)
|
localconfig = getlocalconfig(projectroot)
|
||||||
total_count = 0
|
total_count = 0
|
||||||
|
|||||||
@ -68,7 +68,7 @@ def _filter_tool_config(projroot: Path, cfg: str) -> str:
|
|||||||
# pylint: disable=too-many-locals
|
# pylint: disable=too-many-locals
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
from efrotools import getprojectconfig
|
from efrotools.project import getprojectconfig
|
||||||
from efrotools.pyver import PYVER
|
from efrotools.pyver import PYVER
|
||||||
|
|
||||||
# Emacs dir-locals defaults. Note that these contain other
|
# Emacs dir-locals defaults. Note that these contain other
|
||||||
|
|||||||
191
tools/efrotools/util.py
Normal file
191
tools/efrotools/util.py
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
# Released under the MIT License. See LICENSE for details.
|
||||||
|
#
|
||||||
|
"""Misc util calls/etc.
|
||||||
|
|
||||||
|
Ideally the stuff in here should migrate to more descriptive module names.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, overload
|
||||||
|
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Sequence, Literal
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def explicit_bool(value: bool) -> bool:
|
||||||
|
"""Simply return input value; can avoid unreachable-code type warnings."""
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def extract_flag(args: list[str], name: str) -> bool:
|
||||||
|
"""Given a list of args and a flag name, returns whether it is present.
|
||||||
|
|
||||||
|
The arg flag, if present, is removed from the arg list.
|
||||||
|
"""
|
||||||
|
from efro.error import CleanError
|
||||||
|
|
||||||
|
count = args.count(name)
|
||||||
|
if count > 1:
|
||||||
|
raise CleanError(f'Flag {name} passed multiple times.')
|
||||||
|
if not count:
|
||||||
|
return False
|
||||||
|
args.remove(name)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def extract_arg(
|
||||||
|
args: list[str], name: str, required: Literal[False] = False
|
||||||
|
) -> str | None: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def extract_arg(args: list[str], name: str, required: Literal[True]) -> str: ...
|
||||||
|
|
||||||
|
|
||||||
|
def extract_arg(
|
||||||
|
args: list[str], name: str, required: bool = False
|
||||||
|
) -> str | None:
|
||||||
|
"""Given a list of args and an arg name, returns a value.
|
||||||
|
|
||||||
|
The arg flag and value are removed from the arg list.
|
||||||
|
raises CleanErrors on any problems.
|
||||||
|
"""
|
||||||
|
from efro.error import CleanError
|
||||||
|
|
||||||
|
count = args.count(name)
|
||||||
|
if not count:
|
||||||
|
if required:
|
||||||
|
raise CleanError(f'Required argument {name} not passed.')
|
||||||
|
return None
|
||||||
|
|
||||||
|
if count > 1:
|
||||||
|
raise CleanError(f'Arg {name} passed multiple times.')
|
||||||
|
|
||||||
|
argindex = args.index(name)
|
||||||
|
if argindex + 1 >= len(args):
|
||||||
|
raise CleanError(f'No value passed after {name} arg.')
|
||||||
|
|
||||||
|
val = args[argindex + 1]
|
||||||
|
del args[argindex : argindex + 2]
|
||||||
|
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
def replace_section(
|
||||||
|
text: str,
|
||||||
|
begin_marker: str,
|
||||||
|
end_marker: str,
|
||||||
|
replace_text: str = '',
|
||||||
|
keep_markers: bool = False,
|
||||||
|
error_if_missing: bool = True,
|
||||||
|
) -> str:
|
||||||
|
"""Replace all text between two marker strings (including the markers)."""
|
||||||
|
if begin_marker not in text:
|
||||||
|
if error_if_missing:
|
||||||
|
raise RuntimeError(f"Marker not found in text: '{begin_marker}'.")
|
||||||
|
return text
|
||||||
|
splits = text.split(begin_marker)
|
||||||
|
if len(splits) != 2:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Expected one marker '{begin_marker}'"
|
||||||
|
f'; found {text.count(begin_marker)}.'
|
||||||
|
)
|
||||||
|
before_begin, after_begin = splits
|
||||||
|
splits = after_begin.split(end_marker)
|
||||||
|
if len(splits) != 2:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Expected one marker '{end_marker}'"
|
||||||
|
f'; found {text.count(end_marker)}.'
|
||||||
|
)
|
||||||
|
_before_end, after_end = splits
|
||||||
|
if keep_markers:
|
||||||
|
replace_text = f'{begin_marker}{replace_text}{end_marker}'
|
||||||
|
return f'{before_begin}{replace_text}{after_end}'
|
||||||
|
|
||||||
|
|
||||||
|
def readfile(path: str | Path) -> str:
|
||||||
|
"""Read a utf-8 text file into a string."""
|
||||||
|
with open(path, encoding='utf-8') as infile:
|
||||||
|
return infile.read()
|
||||||
|
|
||||||
|
|
||||||
|
def writefile(path: str | Path, txt: str) -> None:
|
||||||
|
"""Write a string to a utf-8 text file."""
|
||||||
|
with open(path, 'w', encoding='utf-8') as outfile:
|
||||||
|
outfile.write(txt)
|
||||||
|
|
||||||
|
|
||||||
|
def replace_exact(
|
||||||
|
opstr: str, old: str, new: str, count: int = 1, label: str | None = None
|
||||||
|
) -> str:
|
||||||
|
"""Replace text ensuring that exactly x occurrences are replaced.
|
||||||
|
|
||||||
|
Useful when filtering data in some predefined way to ensure the original
|
||||||
|
has not changed.
|
||||||
|
"""
|
||||||
|
found = opstr.count(old)
|
||||||
|
label_str = f' in {label}' if label is not None else ''
|
||||||
|
if found != count:
|
||||||
|
raise RuntimeError(
|
||||||
|
f'Expected {count} string occurrence(s){label_str};'
|
||||||
|
f' found {found}. String: {repr(old)}'
|
||||||
|
)
|
||||||
|
return opstr.replace(old, new)
|
||||||
|
|
||||||
|
|
||||||
|
def get_files_hash(
|
||||||
|
filenames: Sequence[str | Path],
|
||||||
|
extrahash: str = '',
|
||||||
|
int_only: bool = False,
|
||||||
|
hashtype: Literal['md5', 'sha256'] = 'md5',
|
||||||
|
) -> str:
|
||||||
|
"""Return a hash for the given files."""
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
if not isinstance(filenames, list):
|
||||||
|
raise RuntimeError(f'Expected a list; got a {type(filenames)}.')
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
# Help Mypy infer the right type for this.
|
||||||
|
hashobj = hashlib.md5()
|
||||||
|
else:
|
||||||
|
hashobj = getattr(hashlib, hashtype)()
|
||||||
|
for fname in filenames:
|
||||||
|
with open(fname, 'rb') as infile:
|
||||||
|
while True:
|
||||||
|
data = infile.read(2**20)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
hashobj.update(data)
|
||||||
|
hashobj.update(extrahash.encode())
|
||||||
|
|
||||||
|
if int_only:
|
||||||
|
return str(int.from_bytes(hashobj.digest(), byteorder='big'))
|
||||||
|
|
||||||
|
return hashobj.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def get_string_hash(
|
||||||
|
value: str,
|
||||||
|
int_only: bool = False,
|
||||||
|
hashtype: Literal['md5', 'sha256'] = 'md5',
|
||||||
|
) -> str:
|
||||||
|
"""Return a hash for the given files."""
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise TypeError('Expected a str.')
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
# Help Mypy infer the right type for this.
|
||||||
|
hashobj = hashlib.md5()
|
||||||
|
else:
|
||||||
|
hashobj = getattr(hashlib, hashtype)()
|
||||||
|
hashobj.update(value.encode())
|
||||||
|
|
||||||
|
if int_only:
|
||||||
|
return str(int.from_bytes(hashobj.digest(), byteorder='big'))
|
||||||
|
|
||||||
|
return hashobj.hexdigest()
|
||||||
@ -22,7 +22,7 @@ from filelock import FileLock
|
|||||||
from efro.terminal import Clr
|
from efro.terminal import Clr
|
||||||
from efro.error import CleanError
|
from efro.error import CleanError
|
||||||
from efro.dataclassio import ioprepped, dataclass_from_dict
|
from efro.dataclassio import ioprepped, dataclass_from_dict
|
||||||
from efrotools import getlocalconfig # pylint: disable=wrong-import-order
|
from efrotools.project import getlocalconfig # pylint: disable=C0411
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|||||||
@ -10,36 +10,50 @@ import sys
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
from batools.spinoff import spinoff_main
|
|
||||||
|
|
||||||
# Our initial invocation actually just sets up the env for our
|
# The initial invocation of this script actually just sets the stage
|
||||||
# *real* invocation (so we can run under our desired venv/etc.)
|
# for the *real* invocation of this script, which always happens
|
||||||
if os.environ.get('BA_SPINOFF_HAVE_ENV') != '1':
|
# from the fully-inited virtual env of the source project. This way
|
||||||
|
# all modules used by the spinoff system are in place and there's no
|
||||||
|
# abiguity where we could be loading Python stuff from the dst
|
||||||
|
# project while we're in the process of modifying it.
|
||||||
|
if 'BA_SPINOFF_SRC_ROOT' not in os.environ:
|
||||||
|
|
||||||
# Our shebang line gives us a generic 'pythonX.Y' environment, but
|
# Calc absolute paths for our source (and possibly dst)
|
||||||
# we actually want to run under the virtual-env of the source
|
# projects. If we are getting invoked via a symlink, what it
|
||||||
# project so we have all the pip stuff we expect. So if we are
|
# points to is src and we are dst. Otherwise we are src and
|
||||||
# getting invoked via a symlink we assume it points to the source
|
# there is no dst.
|
||||||
# project, and if not then we assume we are the source project.
|
dst_proj_root: str | None
|
||||||
if os.path.islink(sys.argv[0]):
|
if os.path.islink(sys.argv[0]):
|
||||||
src_spinoff_path = os.path.realpath(sys.argv[0])
|
src_spinoff_path = os.path.realpath(sys.argv[0])
|
||||||
|
dst_proj_root = os.path.abspath(
|
||||||
|
os.path.join(os.path.dirname(sys.argv[0]), '..')
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
src_spinoff_path = sys.argv[0]
|
src_spinoff_path = sys.argv[0]
|
||||||
|
dst_proj_root = None # pylint: disable=invalid-name
|
||||||
|
|
||||||
src_proj_root = os.path.abspath(
|
src_proj_root = os.path.abspath(
|
||||||
os.path.join(os.path.dirname(src_spinoff_path), '..')
|
os.path.join(os.path.dirname(src_spinoff_path), '..')
|
||||||
)
|
)
|
||||||
src_proj_python = os.path.join(src_proj_root, '.venv/bin/python3.12')
|
src_proj_python = os.path.join(src_proj_root, '.venv/bin/python3.12')
|
||||||
|
|
||||||
cmd = [src_proj_python, sys.argv[0]] + sys.argv[1:]
|
cmd = [src_proj_python, 'tools/spinoff'] + sys.argv[1:]
|
||||||
|
|
||||||
|
env = dict(os.environ, BA_SPINOFF_SRC_ROOT=src_proj_root)
|
||||||
|
if dst_proj_root is not None:
|
||||||
|
env['BA_SPINOFF_DST_ROOT'] = dst_proj_root
|
||||||
|
|
||||||
# Make sure the src project is properly bootstrapped.
|
# Make sure the src project is properly bootstrapped.
|
||||||
subprocess.run(['make', 'prereqs'], check=True, cwd=src_proj_root)
|
subprocess.run(['make', 'prereqs'], check=True, cwd=src_proj_root)
|
||||||
|
|
||||||
# Finally, run for realz.
|
# Finally, run for realz (from src proj dir).
|
||||||
subprocess.run(
|
result = subprocess.run(cmd, check=False, env=env, cwd=src_proj_root)
|
||||||
cmd, check=True, env=dict(os.environ, BA_SPINOFF_HAVE_ENV='1')
|
sys.exit(result.returncode)
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
from batools.spinoff import spinoff_main
|
||||||
|
|
||||||
|
# Ok; we're a real invocation. Do our thing.
|
||||||
spinoff_main()
|
spinoff_main()
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user