Merge branch 'efroemling:master' into master

This commit is contained in:
Loup 2024-04-24 03:57:53 +05:30 committed by GitHub
commit f67038fb66
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
47 changed files with 676 additions and 584 deletions

56
.efrocachemap generated
View File

@ -4056,26 +4056,26 @@
"build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1",
"build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae",
"build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599",
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "10ca339552ccefcc1cf0458d77ddfc60",
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "220a8bae9a55c6625972254dd7265029",
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "54bde68d1f6c948a0fe1a3844b49fbc9",
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "d6908e5517980c48204c5d7d38b624b2",
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "0552bfb41850409763c3eb0824629c4f",
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "3b92b9b7b6f25ec0b00ebdac6af8f7f7",
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "bf943bb02a696a97b8eb13173bd10381",
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "733c06666e6ac2bd49a645814820ae70",
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "274f31c8955c9af9799edc72eef37184",
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "ced3d5b6e818c2c5901e6373d40583ba",
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "c745ec776c3fa19166fab17b91541c02",
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "cf3751677f46d248d251b566b737c3ca",
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "ab74e096ad474d389c4e20cc78f22e43",
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "e54648fc2a899449baa081b55cb52588",
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "882935d0e1e2a9a36dfb7e0a3d4a5170",
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "342b58d91ed50562f9f8d8163f5f4441",
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "591cf3434d9ac490d416c9dcc60f051f",
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "4ef7a5bbfef88bf2174a0001aaf9532d",
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "16253a3ef3ccc96250eb5a311caf14fe",
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "90ab514825fbb1e5c2a88297b19d95d2",
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "9d6c7dcd7bf389dcbe29cfc7937d9a56",
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "4f40db9f40f1c16799254b2c94696942",
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "2d224e9438975458e1952af0c2b0d533",
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "ad311de40204ae2c96af8b7159fc4b28",
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "403fe6bd30d80cf4ecefd47dba093f09",
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "d9b98710fe088d506543a50294c5a3a4",
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "29c87115ee8771a4a0bc17317a4a127a",
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "27a27244b971b48dc545b40a3011ef1a",
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "5c7540dc387ae395dd73eeb0eaea28ba",
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "81bd15d634824d149e50774b3233a31e",
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "37b5d59fb91dca52f1adec53e31b084f",
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "af1a00445c5801b194f02ccd9398004d",
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "75f400ca9e44557aa1c68aeb87b411f7",
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "aa7f3acd8b8a5161fc74897b6f24d307",
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "40c465bbefbb7a76abe944a237606965",
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "a4ab928602e225bf6cab87fe66418420",
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "386448718b4cc1df3b4ddb4792a3a38c",
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "59eec91d53ef39b46c69d6955f7920c8",
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "57e8c777723b873c7e90ad5d7655de15",
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "67899fad5203c451e5e3411bd887100b",
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "11668f7191dc8d4e2b03db15ff2a4241",
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "b190199ce8b8fe122993d14fb7e8e813",
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "11668f7191dc8d4e2b03db15ff2a4241",
@ -4092,14 +4092,14 @@
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "4fab22abc481a97e884cce9d67b35c6b",
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "7c536b056d78148a302c975bf9238118",
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "4fab22abc481a97e884cce9d67b35c6b",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "be00cf9781b1c86439452c4ba0999c8d",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "5803cfddbaf5e09f513aded1a8fc71f5",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "b682b559cef053626ee8ee9da79ad266",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "8d467c3b01c4b2fd4977afd0aabb2bd0",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "748545de892e5ed6556ba0b05f9a0e9d",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "2671eae8ad8f42eb272ffed0fc8e93df",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "23712747790fbd36e33b9e59364accb7",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "1832b257d7cd01ca68b7b99cbb62bfa6",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "8072b0140655621c07fd97cab9e9ca2d",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "758e51a2b6f52436796c607371221bbc",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "240aea32d29d4cc6260c5be61faad5c7",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "30ea6e77aa5e33949bba018acbbe0b93",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "8dfb5b9a723189015c9b3eaf73816f90",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "2b82beecc22a653fc8881e76b1c9adcb",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "29893062fd160d20b621ead316232961",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "ed2ce864584e0ed9d6427bf4b1c5743c",
"src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c",
"src/assets/ba_data/python/babase/_mgen/enums.py": "b611c090513a21e2fe90e56582724e9d",
"src/ballistica/base/mgen/pyembed/binding_base.inc": "72bfed2cce8ff19741989dec28302f3f",

View File

@ -95,8 +95,10 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install dependencies testing
run: make VENV_PYTHON=python prereqs
- name: Create tools/pcommand
run: |
$env:PYTHONPATH = "tools"
python -m efrotools.genwrapper pcommand batools.pcommandmain tools/pcommand
- name: Install dependencies
run: |
python -m pip install --upgrade pip

View File

@ -14,12 +14,9 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
# Remove this once we upgrade to 3.12.
- name: Install typing_extensions (temp)
run: python3.11 -m pip install typing_extensions
python-version: '3.12'
- name: Install pip requirements
run: tools/pcommand install_pip_reqs
run: make prereqs
- name: Build documentation
run: make docs-sphinx
- name: Upload the build

View File

@ -1,9 +1,9 @@
### 1.7.34 (build 21801, api 8, 2024-04-21)
### 1.7.34 (build 21804, api 8, 2024-04-23)
- Bumped Python version from 3.11 to 3.12 for all builds and project tools. One
of the things this means is that we can use `typing.override` instead of the
`typing_extensions` version so the annoying workaround of installing
`typing_extensions` version so the annoying requirement of installing
`typing_extensions` first thing when setting up the repo introduced a few
versions back is finally no longer needed.
versions back is finally no longer a thing.
- The project now maintains its own Python virtual environment in `.venv` where
it automatically installs whatever Python packages it needs instead of asking
the user to do so in their own environment. This should greatly simplify
@ -28,6 +28,7 @@
does not and you get errors.
- `_bascenev1.protocol_version()` now properly throws an exception if called
while scene-v1 is not active.
- The `efro.dataclassio` system now supports `datetime.timedelta` values.
### 1.7.33 (build 21795, api 8, 2024-03-24)
- Stress test input-devices are now a bit smarter; they won't press any buttons

View File

@ -149,13 +149,13 @@ meta-clean:
# Remove ALL files and directories that aren't managed by git (except for a
# few things such as localconfig.json).
clean:
clean: prereqs
$(CHECK_CLEAN_SAFETY)
rm -rf build # Kill this ourself; can confuse git if contains other repos.
git clean -dfx $(ROOT_CLEAN_IGNORES)
# Show what clean would delete without actually deleting it.
clean-list:
clean-list: prereqs
$(CHECK_CLEAN_SAFETY)
@echo Would remove build # Handle this part ourself; can confuse git.
git clean -dnx $(ROOT_CLEAN_IGNORES)
@ -179,7 +179,7 @@ venv: .venv/efro_venv_complete
# Update pip requirements to latest versions.
venv-upgrade: prereqs
$(PCOMMAND) pur -r config/requirements.txt
$(PCOMMAND) requirements_upgrade config/requirements.txt
venv-clean:
rm -rf .venv
@ -752,12 +752,12 @@ update-check: prereqs-pre-update
################################################################################
# Run formatting on all files in the project considered 'dirty'.
format:
format: prereqs
@$(MAKE) -j$(CPUS) format-code format-scripts format-makefile
@$(PCOMMANDBATCH) echo BLD Formatting complete for $(notdir $(CURDIR))!
# Same but always formats; ignores dirty state.
format-full:
format-full: prereqs
@$(MAKE) -j$(CPUS) format-code-full format-scripts-full format-makefile
@$(PCOMMANDBATCH) echo BLD Formatting complete for $(notdir $(CURDIR))!
@ -933,6 +933,7 @@ test-rpc:
preflight:
@$(MAKE) format
@$(MAKE) update
@$(MAKE) -j$(CPUS) py_check_prereqs # Needs to be done explicitly first.
@$(MAKE) -j$(CPUS) cpplint pylint mypy test
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
@ -940,6 +941,7 @@ preflight:
preflight-full:
@$(MAKE) format-full
@$(MAKE) update
@$(MAKE) -j$(CPUS) py_check_prereqs # Needs to be done explicitly first.
@$(MAKE) -j$(CPUS) cpplint-full pylint-full mypy-full test-full
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
@ -947,6 +949,7 @@ preflight-full:
preflight2:
@$(MAKE) format
@$(MAKE) update
@$(MAKE) -j$(CPUS) py_check_prereqs # Needs to be done explicitly first.
@$(MAKE) -j$(CPUS) cpplint pylint mypy test
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
@ -954,6 +957,7 @@ preflight2:
preflight2-full:
@$(MAKE) format-full
@$(MAKE) update
@$(MAKE) -j$(CPUS) py_check_prereqs # Needs to be done explicitly first.
@$(MAKE) -j$(CPUS) cpplint-full pylint-full mypy-full test-full
@$(PCOMMANDBATCH) echo SGRN BLD PREFLIGHT SUCCESSFUL!
@ -1221,14 +1225,15 @@ ENV_SRC = tools/batools/build.py .venv/efro_venv_complete tools/pcommand
# This is a prereq dependency so should not itself depend on prereqs.
tools/pcommand: tools/efrotools/genwrapper.py tools/efrotools/pyver.py
@echo Generating tools/pcommand...
@PYTHONPATH=tools python3 -m efrotools.genwrapper pcommand tools/pcommand \
batools.pcommandmain
@PYTHONPATH=tools python3 -m \
efrotools.genwrapper pcommand batools.pcommandmain tools/pcommand
# Generate a cloudshell script hard-coded to use our virtual environment.
# This is a prereq dependency so should not itself depend on prereqs.
tools/cloudshell: tools/efrotools/genwrapper.py tools/efrotools/pyver.py
@echo Generating tools/cloudshell...
@PYTHONPATH=tools python3 -m efrotools.genwrapper cloudshell tools/cloudshell
@PYTHONPATH=tools python3 -m \
efrotools.genwrapper cloudshell efrotoolsinternal.cloudshell tools/cloudshell
.clang-format: config/toolconfigsrc/clang-format $(TOOL_CFG_SRC)
@$(TOOL_CFG_INST) $< $@

View File

@ -1,25 +1,24 @@
typing_extensions==4.11.0
filelock==3.13.4
python-daemon==3.0.1
python-lsp-server==1.11.0
python-lsp-black==2.0.0
mypy==1.9.0
pylint==3.1.0
cpplint==1.6.1
pytest==8.1.1
pytz==2024.1
ansiwrap==0.8.4
requests==2.31.0
cpplint==1.6.1
filelock==3.13.4
furo==2024.1.29
mypy==1.9.0
pbxproj==4.1.0
pdoc==14.4.0
pur==7.3.1
pylint==3.1.0
pylsp-mypy==0.6.8
pytest==8.1.1
python-daemon==3.0.1
python-lsp-black==2.0.0
python-lsp-server==1.11.0
pytz==2024.1
PyYAML==6.0.1
requests==2.31.0
Sphinx==7.3.7
types-certifi==2021.10.8.3
types-filelock==3.2.7
types-requests==2.31.0.20240406
types-pytz==2024.1.0.20240417
types-PyYAML==6.0.12.20240311
types-certifi==2021.10.8.3
pbxproj==4.1.0
Sphinx==7.3.7
furo==2024.1.29
pylsp-mypy==0.6.8
pur==7.3.1
types-requests==2.31.0.20240406
typing_extensions==4.11.0

View File

@ -52,7 +52,7 @@ if TYPE_CHECKING:
# Build number and version of the ballistica binary we expect to be
# using.
TARGET_BALLISTICA_BUILD = 21801
TARGET_BALLISTICA_BUILD = 21804
TARGET_BALLISTICA_VERSION = '1.7.34'

View File

@ -39,7 +39,7 @@ auto main(int argc, char** argv) -> int {
namespace ballistica {
// These are set automatically via script; don't modify them here.
const int kEngineBuildNumber = 21801;
const int kEngineBuildNumber = 21804;
const char* kEngineVersion = "1.7.34";
const int kEngineApiVersion = 8;

View File

@ -101,6 +101,7 @@ def test_assign() -> None:
dictval: dict[int, str] = field(default_factory=dict)
tupleval: tuple[int, str, bool] = (1, 'foo', False)
datetimeval: datetime.datetime | None = None
timedeltaval: datetime.timedelta | None = None
class _TestClass2:
pass
@ -116,10 +117,10 @@ def test_assign() -> None:
dataclass_from_dict(_TestClass, None) # type: ignore
now = utc_now()
tdelta = datetime.timedelta(days=123, seconds=456, microseconds=789)
# A dict containing *ALL* values should match what we
# get when creating a dataclass and then converting back
# to a dict.
# A dict containing *ALL* values should exactly match what we get
# when creating a dataclass and then converting back to a dict.
dict1 = {
'ival': 1,
'sval': 'foo',
@ -156,6 +157,7 @@ def test_assign() -> None:
now.second,
now.microsecond,
],
'timedeltaval': [tdelta.days, tdelta.seconds, tdelta.microseconds],
}
dc1 = dataclass_from_dict(_TestClass, dict1)
assert dataclass_to_dict(dc1) == dict1

View File

@ -24,7 +24,8 @@ def generate_app_module(
# pylint: disable=too-many-statements
import textwrap
from efrotools import replace_section, getprojectconfig
from efrotools.util import replace_section
from efrotools.project import getprojectconfig
out = ''

View File

@ -415,7 +415,7 @@ def generate_assets_makefile(
) -> dict[str, str]:
"""Main script entry point."""
# pylint: disable=too-many-locals
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
from pathlib import Path
public = getprojectconfig(Path(projroot))['public']

View File

@ -286,6 +286,7 @@ def _run_sphinx(
index_rst.write(index_template.render(data=data))
starttime = time.monotonic()
apidoc_cmd = [
'sphinx-apidoc',
# '-f', # Force overwriting of any existing generated files.
@ -302,14 +303,28 @@ def _run_sphinx(
paths['sphinx_cache_dir'],
]
# Prevents Python from writing __pycache__ dirs in our source tree
# which leads to slight annoyances.
environ = dict(os.environ, PYTHONDONTWRITEBYTECODE='1')
if generate_dummymodules_doc:
subprocess.run(
apidoc_cmd + [assets_dirs['dummy_modules']] + ['--private'],
check=True,
env=environ,
)
if generate_tools_doc:
subprocess.run(apidoc_cmd + [assets_dirs['efro_tools']], check=True)
subprocess.run(apidoc_cmd + [assets_dirs['ba_data'], '-f'], check=True)
subprocess.run(
apidoc_cmd + [assets_dirs['efro_tools']],
check=True,
env=environ,
)
subprocess.run(
apidoc_cmd + [assets_dirs['ba_data'], '-f'],
check=True,
env=environ,
)
# -f for regenerating index page so it contains the ba_data modules
subprocess.run(
@ -324,6 +339,7 @@ def _run_sphinx(
# '-Q', #quiet now
],
check=True,
env=environ,
)
duration = time.monotonic() - starttime

View File

@ -14,7 +14,7 @@ from typing import TYPE_CHECKING
from dataclasses import dataclass
from efro.error import CleanError
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
if TYPE_CHECKING:
pass

View File

@ -55,7 +55,7 @@ from efrotools.pcommands import (
tweak_empty_py_files,
make_ensure,
make_target_debug,
pur,
requirements_upgrade,
)
from efrotools.pcommands2 import (
with_build_lock,

View File

@ -126,7 +126,7 @@ def lazy_increment_build() -> None:
import subprocess
from efro.terminal import Clr
from efro.error import CleanError
from efrotools import get_files_hash
from efrotools.util import get_files_hash
from efrotools.code import get_code_filenames
pcommand.disallow_in_batch()
@ -221,7 +221,7 @@ def androidaddr() -> None:
def push_ipa() -> None:
"""Construct and push ios IPA for testing."""
from efrotools import extract_arg
from efrotools.util import extract_arg
import efrotools.ios
pcommand.disallow_in_batch()
@ -493,7 +493,7 @@ def warm_start_asset_build() -> None:
import subprocess
from pathlib import Path
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
from efro.error import CleanError
pcommand.disallow_in_batch()

View File

@ -124,7 +124,7 @@ def py_examine() -> None:
"""Run a python examination at a given point in a given file."""
import os
from pathlib import Path
import efrotools
import efrotools.emacs
pcommand.disallow_in_batch()
@ -153,7 +153,7 @@ def py_examine() -> None:
sys.path.append(scriptsdir)
if toolsdir not in sys.path:
sys.path.append(toolsdir)
efrotools.py_examine(
efrotools.emacs.py_examine(
pcommand.PROJROOT, filename, line, column, selection, operation
)

View File

@ -587,7 +587,7 @@ def check_sync_states(self: ProjectUpdater) -> None:
def check_misc(self: ProjectUpdater) -> None:
"""Check misc project stuff."""
from efrotools import readfile, replace_exact
from efrotools.util import readfile, replace_exact
# Make sure we're set to prod master server. (but ONLY when
# checking; still want to be able to run updates).

View File

@ -9,7 +9,7 @@ from pathlib import Path
from typing import TYPE_CHECKING
from dataclasses import dataclass
from efrotools import getprojectconfig, getlocalconfig
from efrotools.project import getprojectconfig, getlocalconfig
from efro.error import CleanError
from efro.terminal import Clr

View File

@ -57,7 +57,7 @@ class ResourcesMakefileGenerator:
existing_data: str,
projname: str,
) -> None:
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
self.public = getprojectconfig(Path(projroot))['public']
assert isinstance(self.public, bool)

View File

@ -15,7 +15,8 @@ from pathlib import Path
from typing import TYPE_CHECKING, assert_never
from efrotools.code import format_python_str, format_cpp_str
from efrotools import getprojectconfig, replace_exact
from efrotools.project import getprojectconfig
from efrotools.util import replace_exact
from efro.error import CleanError
from efro.terminal import Clr
from efro.util import timedelta_str
@ -693,7 +694,7 @@ class SpinoffContext:
def _generate_env_hash(self) -> None:
# pylint: disable=cyclic-import
from efrotools import get_files_hash
from efrotools.util import get_files_hash
# noinspection PyUnresolvedReferences
import batools.spinoff

View File

@ -13,7 +13,7 @@ from typing import assert_never, TYPE_CHECKING
from efro.error import CleanError
from efro.terminal import Clr
from efrotools import replace_exact
from efrotools.util import replace_exact
from batools.spinoff._context import SpinoffContext
@ -65,19 +65,8 @@ def _main() -> None:
_print_available_commands()
return
dst_root = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..'))
# Determine our src project based on our tools/spinoff symlink.
# If its not a link it means we ARE a src project.
dst_spinoff_path = os.path.join(dst_root, 'tools', 'spinoff')
if os.path.islink(dst_spinoff_path):
src_root = os.path.abspath(
os.path.join(
os.path.dirname(os.path.realpath(dst_spinoff_path)), '..'
)
)
else:
src_root = None
src_root = os.environ['BA_SPINOFF_SRC_ROOT']
dst_root = os.environ.get('BA_SPINOFF_DST_ROOT')
single_run_mode: SpinoffContext.Mode | None = None
@ -102,14 +91,14 @@ def _main() -> None:
elif cmd is Command.BACKPORT:
_do_backport(src_root, dst_root)
elif cmd is Command.FEATURE_SET_LIST:
_do_featuresets(dst_root)
_do_featuresets(src_root)
elif cmd is Command.CREATE:
_do_create(src_root, dst_root)
elif cmd is Command.ADD_SUBMODULE_PARENT:
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
public = getprojectconfig(Path(dst_root))['public']
_do_add_submodule_parent(dst_root, is_new=False, public=public)
public = getprojectconfig(Path(src_root))['public']
_do_add_submodule_parent(src_root, is_new=False, public=public)
elif cmd is Command.FEATURE_SET_COPY:
_do_featureset_copy()
elif cmd is Command.FEATURE_SET_DELETE:
@ -118,13 +107,13 @@ def _main() -> None:
assert_never(cmd)
if single_run_mode is not None:
from efrotools import extract_flag
from efrotools.util import extract_flag
args = sys.argv[2:]
force = extract_flag(args, '--force')
verbose = extract_flag(args, '--verbose')
print_full_lists = extract_flag(args, '--full')
if src_root is None:
if dst_root is None:
if '--soft' in sys.argv:
return
raise CleanError(
@ -155,16 +144,14 @@ def _main() -> None:
).run()
def _do_create(src_root: str | None, dst_root: str) -> None:
def _do_create(src_root: str, dst_root: str | None) -> None:
# pylint: disable=too-many-locals, cyclic-import
from efrotools import extract_arg, extract_flag
from efrotools.util import extract_arg, extract_flag
from efrotools.code import format_python_str
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
import batools.spinoff
# Note: in our case dst_root is actually what becomes the src project
# should clean up these var names to make that clearer.
if src_root is not None:
if dst_root is not None:
raise CleanError('This only works on src projects.')
args = sys.argv[2:]
@ -224,7 +211,7 @@ def _do_create(src_root: str | None, dst_root: str) -> None:
template,
'# __SRC_FEATURE_SETS__',
format_python_str(
projroot=dst_root, code=f'ctx.src_feature_sets = {featuresets!r}'
projroot=src_root, code=f'ctx.src_feature_sets = {featuresets!r}'
),
)
@ -237,7 +224,7 @@ def _do_create(src_root: str | None, dst_root: str) -> None:
# on git so its best to always do this.
subprocess.run(['git', 'init'], cwd=path, check=True, capture_output=True)
public = getprojectconfig(Path(dst_root))['public']
public = getprojectconfig(Path(src_root))['public']
if submodule_parent:
_do_add_submodule_parent(path, is_new=True, public=public)
@ -246,7 +233,7 @@ def _do_create(src_root: str | None, dst_root: str) -> None:
[
'ln',
'-s',
os.path.join(dst_root, 'tools', 'spinoff'),
os.path.join(src_root, 'tools', 'spinoff'),
os.path.join(path, 'tools'),
],
check=True,
@ -330,7 +317,7 @@ def _do_featureset_delete() -> None:
def _do_featureset_copy() -> None:
# pylint: disable=too-many-locals
from efrotools import extract_flag
from efrotools.util import extract_flag
from batools.featureset import FeatureSet
@ -514,9 +501,10 @@ def _do_featureset_copy_dir(
)
def _do_override(src_root: str | None, dst_root: str) -> None:
if src_root is None:
def _do_override(src_root: str, dst_root: str | None) -> None:
if dst_root is None:
raise CleanError('This only works on dst projects.')
override_paths = [os.path.abspath(p) for p in sys.argv[2:]]
if not override_paths:
raise RuntimeError('Expected at least one path arg.')
@ -549,8 +537,8 @@ def _do_override(src_root: str | None, dst_root: str) -> None:
SpinoffContext(src_root, dst_root, SpinoffContext.Mode.UPDATE).run()
def _do_backport(src_root: str | None, dst_root: str) -> None:
if src_root is None:
def _do_backport(src_root: str, dst_root: str | None) -> None:
if dst_root is None:
raise CleanError('This only works on dst projects.')
args = sys.argv[2:]
auto = '--auto' in args

View File

@ -20,7 +20,8 @@ def spinoff_test(args: list[str]) -> None:
import subprocess
from batools.featureset import FeatureSet
from efrotools import extract_flag, getprojectconfig
from efrotools.util import extract_flag
from efrotools.project import getprojectconfig
from efro.terminal import Clr
from efro.error import CleanError

View File

@ -12,7 +12,7 @@ from functools import partial
from typing import TYPE_CHECKING
from efro.terminal import Clr
from efrotools import extract_arg, extract_flag
from efrotools.util import extract_arg, extract_flag
from efrotools.pyver import PYVER
if TYPE_CHECKING:
@ -750,7 +750,7 @@ def _stage_server_file(
) -> None:
"""Stage files for the server environment with some filtering."""
import batools.build
from efrotools import replace_exact
from efrotools.util import replace_exact
if mode not in ('debug', 'release'):
raise RuntimeError(

View File

@ -14,7 +14,7 @@ if TYPE_CHECKING:
def generate_top_level_makefile(projroot: str, existing_data: str) -> str:
"""Main script entry point."""
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
from pathlib import Path
public = getprojectconfig(Path(projroot))['public']

View File

@ -193,6 +193,9 @@ class _Inputter:
if issubclass(origin, datetime.datetime):
return self._datetime_from_input(cls, fieldpath, value, ioattrs)
if issubclass(origin, datetime.timedelta):
return self._timedelta_from_input(cls, fieldpath, value, ioattrs)
if origin is bytes:
return self._bytes_from_input(origin, fieldpath, value)
@ -634,3 +637,23 @@ class _Inputter:
if ioattrs is not None:
ioattrs.validate_datetime(out, fieldpath)
return out
def _timedelta_from_input(
self, cls: type, fieldpath: str, value: Any, ioattrs: IOAttrs | None
) -> Any:
del ioattrs # Unused.
# We expect a list of 3 ints.
if type(value) is not list:
raise TypeError(
f'Invalid input value for "{fieldpath}" on "{cls.__name__}";'
f' expected a list, got a {type(value).__name__}'
)
if len(value) != 3 or not all(isinstance(x, int) for x in value):
raise ValueError(
f'Invalid input value for "{fieldpath}" on "{cls.__name__}";'
f' expected a list of 3 ints, got {[type(v) for v in value]}.'
)
out = datetime.timedelta(
days=value[0], seconds=value[1], microseconds=value[2]
)
return out

View File

@ -454,6 +454,17 @@ class _Outputter:
if self._create
else None
)
if issubclass(origin, datetime.timedelta):
if not isinstance(value, origin):
raise TypeError(
f'Expected a {origin} for {fieldpath};'
f' found a {type(value)}.'
)
return (
[value.days, value.seconds, value.microseconds]
if self._create
else None
)
if origin is bytes:
return self._process_bytes(cls, fieldpath, value)

View File

@ -420,6 +420,10 @@ class PrepSession:
if issubclass(origin, datetime.datetime):
return
# We support datetime.timedelta.
if issubclass(origin, datetime.timedelta):
return
if dataclasses.is_dataclass(origin):
self.prep_dataclass(origin, recursion_level=recursion_level + 1)
return

View File

@ -5,336 +5,3 @@
This stuff can be a bit more sloppy/loosey-goosey since it is not used in
live client or server code.
"""
# FIXME: should migrate everything here into submodules since this adds
# overhead to anything importing from any efrotools submodule.
from __future__ import annotations
import os
import json
from pathlib import Path
from typing import TYPE_CHECKING, overload
if TYPE_CHECKING:
from typing import Sequence, Any, Literal
# Cache these since we may repeatedly fetch these in batch mode.
_g_project_configs: dict[str, dict[str, Any]] = {}
_g_local_configs: dict[str, dict[str, Any]] = {}
def explicit_bool(value: bool) -> bool:
"""Simply return input value; can avoid unreachable-code type warnings."""
return value
def getlocalconfig(projroot: Path | str) -> dict[str, Any]:
"""Return a project's localconfig contents (or default if missing)."""
projrootstr = str(projroot)
if projrootstr not in _g_local_configs:
localconfig: dict[str, Any]
# Allow overriding path via env var.
path = os.environ.get('EFRO_LOCALCONFIG_PATH')
if path is None:
path = 'config/localconfig.json'
try:
with open(Path(projroot, path), encoding='utf-8') as infile:
localconfig = json.loads(infile.read())
except FileNotFoundError:
localconfig = {}
_g_local_configs[projrootstr] = localconfig
return _g_local_configs[projrootstr]
def getprojectconfig(projroot: Path | str) -> dict[str, Any]:
"""Return a project's projectconfig contents (or default if missing)."""
projrootstr = str(projroot)
if projrootstr not in _g_project_configs:
config: dict[str, Any]
try:
with open(
Path(projroot, 'config/projectconfig.json'), encoding='utf-8'
) as infile:
config = json.loads(infile.read())
except FileNotFoundError:
config = {}
_g_project_configs[projrootstr] = config
return _g_project_configs[projrootstr]
def setprojectconfig(projroot: Path | str, config: dict[str, Any]) -> None:
"""Set the project config contents."""
projrootstr = str(projroot)
_g_project_configs[projrootstr] = config
os.makedirs(Path(projroot, 'config'), exist_ok=True)
with Path(projroot, 'config/projectconfig.json').open(
'w', encoding='utf-8'
) as outfile:
outfile.write(json.dumps(config, indent=2))
def extract_flag(args: list[str], name: str) -> bool:
"""Given a list of args and a flag name, returns whether it is present.
The arg flag, if present, is removed from the arg list.
"""
from efro.error import CleanError
count = args.count(name)
if count > 1:
raise CleanError(f'Flag {name} passed multiple times.')
if not count:
return False
args.remove(name)
return True
@overload
def extract_arg(
args: list[str], name: str, required: Literal[False] = False
) -> str | None: ...
@overload
def extract_arg(args: list[str], name: str, required: Literal[True]) -> str: ...
def extract_arg(
args: list[str], name: str, required: bool = False
) -> str | None:
"""Given a list of args and an arg name, returns a value.
The arg flag and value are removed from the arg list.
raises CleanErrors on any problems.
"""
from efro.error import CleanError
count = args.count(name)
if not count:
if required:
raise CleanError(f'Required argument {name} not passed.')
return None
if count > 1:
raise CleanError(f'Arg {name} passed multiple times.')
argindex = args.index(name)
if argindex + 1 >= len(args):
raise CleanError(f'No value passed after {name} arg.')
val = args[argindex + 1]
del args[argindex : argindex + 2]
return val
def replace_section(
text: str,
begin_marker: str,
end_marker: str,
replace_text: str = '',
keep_markers: bool = False,
error_if_missing: bool = True,
) -> str:
"""Replace all text between two marker strings (including the markers)."""
if begin_marker not in text:
if error_if_missing:
raise RuntimeError(f"Marker not found in text: '{begin_marker}'.")
return text
splits = text.split(begin_marker)
if len(splits) != 2:
raise RuntimeError(
f"Expected one marker '{begin_marker}'"
f'; found {text.count(begin_marker)}.'
)
before_begin, after_begin = splits
splits = after_begin.split(end_marker)
if len(splits) != 2:
raise RuntimeError(
f"Expected one marker '{end_marker}'"
f'; found {text.count(end_marker)}.'
)
_before_end, after_end = splits
if keep_markers:
replace_text = f'{begin_marker}{replace_text}{end_marker}'
return f'{before_begin}{replace_text}{after_end}'
def readfile(path: str | Path) -> str:
"""Read a utf-8 text file into a string."""
with open(path, encoding='utf-8') as infile:
return infile.read()
def writefile(path: str | Path, txt: str) -> None:
"""Write a string to a utf-8 text file."""
with open(path, 'w', encoding='utf-8') as outfile:
outfile.write(txt)
def replace_exact(
opstr: str, old: str, new: str, count: int = 1, label: str | None = None
) -> str:
"""Replace text ensuring that exactly x occurrences are replaced.
Useful when filtering data in some predefined way to ensure the original
has not changed.
"""
found = opstr.count(old)
label_str = f' in {label}' if label is not None else ''
if found != count:
raise RuntimeError(
f'Expected {count} string occurrence(s){label_str};'
f' found {found}. String: {repr(old)}'
)
return opstr.replace(old, new)
def get_files_hash(
filenames: Sequence[str | Path],
extrahash: str = '',
int_only: bool = False,
hashtype: Literal['md5', 'sha256'] = 'md5',
) -> str:
"""Return a hash for the given files."""
import hashlib
if not isinstance(filenames, list):
raise RuntimeError(f'Expected a list; got a {type(filenames)}.')
if TYPE_CHECKING:
# Help Mypy infer the right type for this.
hashobj = hashlib.md5()
else:
hashobj = getattr(hashlib, hashtype)()
for fname in filenames:
with open(fname, 'rb') as infile:
while True:
data = infile.read(2**20)
if not data:
break
hashobj.update(data)
hashobj.update(extrahash.encode())
if int_only:
return str(int.from_bytes(hashobj.digest(), byteorder='big'))
return hashobj.hexdigest()
def get_string_hash(
value: str,
int_only: bool = False,
hashtype: Literal['md5', 'sha256'] = 'md5',
) -> str:
"""Return a hash for the given files."""
import hashlib
if not isinstance(value, str):
raise TypeError('Expected a str.')
if TYPE_CHECKING:
# Help Mypy infer the right type for this.
hashobj = hashlib.md5()
else:
hashobj = getattr(hashlib, hashtype)()
hashobj.update(value.encode())
if int_only:
return str(int.from_bytes(hashobj.digest(), byteorder='big'))
return hashobj.hexdigest()
def _py_symbol_at_column(line: str, col: int) -> str:
start = col
while start > 0 and line[start - 1] != ' ':
start -= 1
end = col
while end < len(line) and line[end] != ' ':
end += 1
return line[start:end]
def py_examine(
projroot: Path,
filename: Path,
line: int,
column: int,
selection: str | None,
operation: str,
) -> None:
"""Given file position info, performs some code inspection."""
# pylint: disable=too-many-locals
# pylint: disable=cyclic-import
import astroid
import re
from efrotools import code
# Pull in our pylint plugin which really just adds astroid filters.
# That way our introspection here will see the same thing as pylint's does.
with open(filename, encoding='utf-8') as infile:
fcontents = infile.read()
if '#@' in fcontents:
raise RuntimeError('#@ marker found in file; this breaks examinations.')
flines = fcontents.splitlines()
if operation == 'pylint_infer':
# See what asteroid can infer about the target symbol.
symbol = (
selection
if selection is not None
else _py_symbol_at_column(flines[line - 1], column)
)
# Insert a line after the provided one which is just the symbol so
# that we can ask for its value alone.
match = re.match(r'\s*', flines[line - 1])
whitespace = match.group() if match is not None else ''
sline = whitespace + symbol + ' #@'
flines = flines[:line] + [sline] + flines[line:]
node = astroid.extract_node('\n'.join(flines))
inferred = list(node.infer())
print(symbol + ':', ', '.join([str(i) for i in inferred]))
elif operation in ('mypy_infer', 'mypy_locals'):
# Ask mypy for the type of the target symbol.
symbol = (
selection
if selection is not None
else _py_symbol_at_column(flines[line - 1], column)
)
# Insert a line after the provided one which is just the symbol so
# that we can ask for its value alone.
match = re.match(r'\s*', flines[line - 1])
whitespace = match.group() if match is not None else ''
if operation == 'mypy_infer':
sline = whitespace + 'reveal_type(' + symbol + ')'
else:
sline = whitespace + 'reveal_locals()'
flines = flines[:line] + [sline] + flines[line:]
# Write a temp file and run the check on it.
# Let's use ' flycheck_*' for the name since pipeline scripts
# are already set to ignore those files.
tmppath = Path(filename.parent, 'flycheck_mp_' + filename.name)
with tmppath.open('w', encoding='utf-8') as outfile:
outfile.write('\n'.join(flines))
try:
code.mypy_files(projroot, [str(tmppath)], check=False)
except Exception as exc:
print('error running mypy:', exc)
tmppath.unlink()
elif operation == 'pylint_node':
flines[line - 1] += ' #@'
node = astroid.extract_node('\n'.join(flines))
print(node)
elif operation == 'pylint_tree':
flines[line - 1] += ' #@'
node = astroid.extract_node('\n'.join(flines))
print(node.repr_tree())
else:
print('unknown operation: ' + operation)

View File

@ -77,7 +77,7 @@ def format_project_cpp_files(projroot: Path, full: bool) -> None:
import concurrent.futures
from multiprocessing import cpu_count
from efrotools import get_files_hash
from efrotools.util import get_files_hash
if os.path.abspath(projroot) != os.getcwd():
raise RuntimeError('We expect to be running from project root.')
@ -137,7 +137,7 @@ def check_cpplint(projroot: Path, full: bool) -> None:
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import cpu_count
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
from efro.terminal import Clr
os.chdir(projroot)
@ -221,7 +221,7 @@ def get_code_filenames(projroot: Path, include_generated: bool) -> list[str]:
could cause dirty generated files to not get updated properly when
their sources change).
"""
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
exts = ('.h', '.c', '.cc', '.cpp', '.cxx', '.m', '.mm')
places = getprojectconfig(projroot).get('code_source_dirs', None)
@ -272,7 +272,7 @@ def black_base_args(projroot: Path) -> list[str]:
def format_project_python_files(projroot: Path, full: bool) -> None:
"""Runs formatting on all of our Python code."""
from efrotools import get_string_hash
from efrotools.util import get_string_hash
os.chdir(projroot)
cachepath = Path(projroot, '.cache/format_project_python_files')
@ -344,7 +344,7 @@ def _should_include_script(fnamefull: str) -> bool:
def get_script_filenames(projroot: Path) -> list[str]:
"""Return the Python filenames to lint-check or auto-format."""
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
proot = f'{projroot}/'
@ -392,7 +392,7 @@ def runpylint(projroot: Path, filenames: list[str]) -> None:
def pylint(projroot: Path, full: bool, fast: bool) -> None:
"""Run Pylint on all scripts in our project (with smart dep tracking)."""
from efrotools import get_files_hash
from efrotools.util import get_files_hash
from efro.terminal import Clr
pylintrc = Path(projroot, '.pylintrc')
@ -568,7 +568,7 @@ def _apply_pylint_run_to_cache(
from astroid import modutils
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
# First off, build a map of dirtyfiles to module names
# (and the corresponding reverse map).

View File

@ -74,10 +74,9 @@ def get_local_cache_dir() -> str:
def get_repository_base_url() -> str:
"""Return the base repository url (assumes cwd is project root)."""
# from efrotools import getprojectconfig
import efrotools
from efrotools.project import getprojectconfig
pconfig = efrotools.getprojectconfig('.')
pconfig = getprojectconfig('.')
name = 'efrocache_repository_url'
val = pconfig.get(name)
if not isinstance(val, str):

103
tools/efrotools/emacs.py Normal file
View File

@ -0,0 +1,103 @@
# Released under the MIT License. See LICENSE for details.
#
"""Stuff intended to be used from emacs"""
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
pass
def _py_symbol_at_column(line: str, col: int) -> str:
start = col
while start > 0 and line[start - 1] != ' ':
start -= 1
end = col
while end < len(line) and line[end] != ' ':
end += 1
return line[start:end]
def py_examine(
projroot: Path,
filename: Path,
line: int,
column: int,
selection: str | None,
operation: str,
) -> None:
"""Given file position info, performs some code inspection."""
# pylint: disable=too-many-locals
# pylint: disable=cyclic-import
import astroid
import re
from efrotools import code
# Pull in our pylint plugin which really just adds astroid filters.
# That way our introspection here will see the same thing as pylint's does.
with open(filename, encoding='utf-8') as infile:
fcontents = infile.read()
if '#@' in fcontents:
raise RuntimeError('#@ marker found in file; this breaks examinations.')
flines = fcontents.splitlines()
if operation == 'pylint_infer':
# See what asteroid can infer about the target symbol.
symbol = (
selection
if selection is not None
else _py_symbol_at_column(flines[line - 1], column)
)
# Insert a line after the provided one which is just the symbol so
# that we can ask for its value alone.
match = re.match(r'\s*', flines[line - 1])
whitespace = match.group() if match is not None else ''
sline = whitespace + symbol + ' #@'
flines = flines[:line] + [sline] + flines[line:]
node = astroid.extract_node('\n'.join(flines))
inferred = list(node.infer())
print(symbol + ':', ', '.join([str(i) for i in inferred]))
elif operation in ('mypy_infer', 'mypy_locals'):
# Ask mypy for the type of the target symbol.
symbol = (
selection
if selection is not None
else _py_symbol_at_column(flines[line - 1], column)
)
# Insert a line after the provided one which is just the symbol so
# that we can ask for its value alone.
match = re.match(r'\s*', flines[line - 1])
whitespace = match.group() if match is not None else ''
if operation == 'mypy_infer':
sline = whitespace + 'reveal_type(' + symbol + ')'
else:
sline = whitespace + 'reveal_locals()'
flines = flines[:line] + [sline] + flines[line:]
# Write a temp file and run the check on it.
# Let's use ' flycheck_*' for the name since pipeline scripts
# are already set to ignore those files.
tmppath = Path(filename.parent, 'flycheck_mp_' + filename.name)
with tmppath.open('w', encoding='utf-8') as outfile:
outfile.write('\n'.join(flines))
try:
code.mypy_files(projroot, [str(tmppath)], check=False)
except Exception as exc:
print('error running mypy:', exc)
tmppath.unlink()
elif operation == 'pylint_node':
flines[line - 1] += ' #@'
node = astroid.extract_node('\n'.join(flines))
print(node)
elif operation == 'pylint_tree':
flines[line - 1] += ' #@'
node = astroid.extract_node('\n'.join(flines))
print(node.repr_tree())
else:
print('unknown operation: ' + operation)

View File

@ -12,7 +12,7 @@ from typing import TYPE_CHECKING
# pylint: disable=useless-suppression
# pylint: disable=wrong-import-order
from efro.terminal import Clr
from efrotools import get_files_hash
from efrotools.util import get_files_hash
# pylint: enable=wrong-import-order
# pylint: enable=useless-suppression

View File

@ -12,18 +12,19 @@ if TYPE_CHECKING:
pass
if __name__ == '__main__':
if len(sys.argv) not in (3, 4):
raise RuntimeError('Expected 2 args')
from efrotools import getprojectconfig
from efrotools.project import (
getprojectconfig,
get_public_legal_notice,
get_non_public_legal_notice,
)
gentype = sys.argv[1]
path = sys.argv[2]
module = sys.argv[3] if len(sys.argv) > 3 else None
# We expect 3 args: tool-name, tool-module, output-path
if len(sys.argv) != 4:
raise RuntimeError('Expected 3 args')
toolname = sys.argv[1]
toolmodule = sys.argv[2]
outpath = sys.argv[3]
# We technically could stick the 'python' or 'python3' path in, but
# let's go with the full versioned one just to keep it clear what
@ -41,39 +42,22 @@ if __name__ == '__main__':
if public
else get_non_public_legal_notice()
)
if gentype == 'pcommand':
assert module is not None
contents = (
f'#!{pybinpath}\n'
f'# {legalnotice}\n'
'# This file is autogenerated; do not hand edit.\n'
'#\n'
'"""Simple wrapper so pcommand uses our internal virtual'
' environment."""\n'
f'from {module} import run_pcommand_main\n'
'\n'
'if __name__ == "__main__":\n'
' run_pcommand_main()\n'
)
contents = (
f'#!{pybinpath}\n'
f'# {legalnotice}\n'
f'# This file is autogenerated; do not edit.\n'
f'#\n'
f'"""Simple wrapper so {toolname} uses our internal virtual'
f' environment."""\n'
f'from {toolmodule} import run_{toolname}_main\n'
f'\n'
f'if __name__ == "__main__":\n'
f' run_{toolname}_main()\n'
)
elif gentype == 'cloudshell':
contents = (
f'#!{pybinpath}\n'
f'# {legalnotice}\n'
'# This file is autogenerated; do not hand edit.\n'
'#\n'
'"""Simple wrapper so cloudshell uses our'
' internal virtual environment."""\n'
'from efrotoolsinternal.cloudshell import run_cloudshell_main\n'
'\n'
'if __name__ == "__main__":\n'
' run_cloudshell_main()\n'
)
else:
raise RuntimeError(f'Unsupported gentype: {gentype}')
with open(path, 'w', encoding='utf-8') as outfile:
with open(outpath, 'w', encoding='utf-8') as outfile:
outfile.write(contents)
os.chmod(
path, os.stat(path).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
outpath,
os.stat(outpath).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH,
)

View File

@ -9,7 +9,7 @@ import subprocess
import sys
from dataclasses import dataclass
from efrotools import getprojectconfig, getlocalconfig
from efrotools.project import getprojectconfig, getlocalconfig
MODES = {
'debug': {'configuration': 'Debug'},

View File

@ -16,7 +16,7 @@ from typing import TYPE_CHECKING
# pylint: disable=wrong-import-order
from efro.terminal import Clr
from efrotools.buildlock import BuildLock
from efrotools import get_string_hash
from efrotools.util import get_string_hash
# pylint: enable=wrong-import-order
# pylint: enable=useless-suppression

View File

@ -33,7 +33,7 @@ def build_openal(arch: str, mode: str) -> None:
"""Do the thing."""
# pylint: disable=too-many-statements
# pylint: disable=too-many-locals
from efrotools import replace_exact
from efrotools.util import replace_exact
if arch not in ARCHS:
raise CleanError(f"Invalid arch '{arch}'.")

View File

@ -10,6 +10,7 @@ from __future__ import annotations
# Note: import as little as possible here at the module level to keep
# launch times fast for small snippets.
import os
import sys
from pathlib import Path
from typing import TYPE_CHECKING
@ -39,8 +40,8 @@ _g_batch_server_mode: bool = False
def pcommand_main(globs: dict[str, Any]) -> None:
"""Main entry point to pcommand scripts.
We simply look for all public functions and call
the one corresponding to the first passed arg.
We simply look for all public functions in the provided module globals
and call the one corresponding to the first passed arg.
"""
import types
@ -50,7 +51,49 @@ def pcommand_main(globs: dict[str, Any]) -> None:
global _g_funcs # pylint: disable=global-statement
assert _g_funcs is None
# Build our list of available funcs.
# Nowadays generated pcommand scripts run themselves using the
# project virtual environment's Python interpreter
# (.venv/bin/pythonX.Y, etc.). This nicely sets up the Python
# environment but does not touch PATH, meaning the stuff under
# .venv/bin won't get found if we do subprocess.run()/etc.
#
# One way to solve this would be to always do `source
# .venv/bin/activate` before running tools/pcommand. This sets PATH
# but also seems unwieldy and easy to forget. It's nice to be able
# to just run tools/pcommand and assume it'll do the right thing.
#
# So let's go ahead and set up PATH here so tools/pcommand by itself
# *does* do the right thing.
# Don't do this on Windows; we're not currently using virtual-envs
# there for the little bit of tools stuff we support.
if not sys.platform.startswith('win'):
abs_exe_path = Path(sys.executable).absolute()
pathparts = abs_exe_path.parts
if (
len(pathparts) < 3
or pathparts[-3] != '.venv'
or pathparts[-2] != 'bin'
or not pathparts[-1].startswith('python')
):
raise RuntimeError(
'Unexpected Python environment;'
' we expect to be running using .venv/bin/pythonX.Y'
)
cur_paths_str = os.environ.get('PATH')
if cur_paths_str is None:
raise RuntimeError("'PATH' is not currently set; unexpected.")
venv_bin_dir = str(abs_exe_path.parent)
# Only add our entry if it's not already there; don't want PATH to
# get out of control if we're doing recursive stuff.
cur_paths = cur_paths_str.split(':')
if venv_bin_dir not in cur_paths:
os.environ['PATH'] = ':'.join([venv_bin_dir] + cur_paths)
# Build our list of available command functions.
_g_funcs = dict(
(
(name, obj)

View File

@ -45,13 +45,45 @@ def _spelling(words: list[str]) -> None:
print(f'Modified {num_modded_dictionaries} dictionaries.')
def pur() -> None:
"""Run pur using project's Python version."""
def requirements_upgrade() -> None:
"""Upgrade project requirements."""
import os
import tempfile
import subprocess
from efro.error import CleanError
pcommand.disallow_in_batch()
subprocess.run([sys.executable, '-m', 'pur'] + sys.argv[2:], check=True)
args = pcommand.get_args()
if len(args) != 1:
raise CleanError('Expected a single arg.')
reqpath = args[0]
with open(reqpath, encoding='utf-8') as infile:
reqs = infile.read()
# Operate on a temp file and compare against our existing so we don't
# write unless it has changed.
with tempfile.TemporaryDirectory() as tempdir:
fname = os.path.join(tempdir, 'reqs')
with open(fname, 'w', encoding='utf-8') as outfile:
outfile.write(reqs)
subprocess.run([sys.executable, '-m', 'pur', '-r', fname], check=True)
# Sort lines.
with open(reqpath, encoding='utf-8') as infile:
reqs2 = infile.read().strip()
reqs_new = (
'\n'.join(sorted(reqs2.splitlines(), key=lambda l: l.lower()))
+ '\n'
)
if reqs_new != reqs:
with open(reqpath, 'w', encoding='utf-8') as outfile:
outfile.write(reqs_new)
def spelling_all() -> None:
@ -477,7 +509,7 @@ def sync_all() -> None:
def sync() -> None:
"""Runs standard syncs between this project and others."""
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
from efrotools.sync import Mode, SyncItem, run_standard_syncs
pcommand.disallow_in_batch()
@ -625,7 +657,7 @@ def pytest() -> None:
import os
import platform
import subprocess
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
from efro.error import CleanError
pcommand.disallow_in_batch()

View File

@ -122,7 +122,7 @@ def batchserver() -> None:
"""Run a server for handling pcommands."""
from efro.error import CleanError
from efrotools import extract_arg
from efrotools.util import extract_arg
import efrotools.pcommandbatch as pcb
pcommand.disallow_in_batch()

View File

@ -4,10 +4,17 @@
from __future__ import annotations
import os
import json
from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Literal
from typing import Literal, Any
# Cache these since we may repeatedly fetch these in batch mode.
_g_project_configs: dict[str, dict[str, Any]] = {}
_g_local_configs: dict[str, dict[str, Any]] = {}
def get_public_legal_notice(
@ -40,3 +47,51 @@ def get_non_public_legal_notice_prev() -> str:
"""Allows us to auto-update."""
# TODO: Move this to project config or somewhere not hard-coded.
return 'Copyright (c) 2011-2023 Eric Froemling'
def getlocalconfig(projroot: Path | str) -> dict[str, Any]:
"""Return a project's localconfig contents (or default if missing)."""
projrootstr = str(projroot)
if projrootstr not in _g_local_configs:
localconfig: dict[str, Any]
# Allow overriding path via env var.
path = os.environ.get('EFRO_LOCALCONFIG_PATH')
if path is None:
path = 'config/localconfig.json'
try:
with open(Path(projroot, path), encoding='utf-8') as infile:
localconfig = json.loads(infile.read())
except FileNotFoundError:
localconfig = {}
_g_local_configs[projrootstr] = localconfig
return _g_local_configs[projrootstr]
def getprojectconfig(projroot: Path | str) -> dict[str, Any]:
"""Return a project's projectconfig contents (or default if missing)."""
projrootstr = str(projroot)
if projrootstr not in _g_project_configs:
config: dict[str, Any]
try:
with open(
Path(projroot, 'config/projectconfig.json'), encoding='utf-8'
) as infile:
config = json.loads(infile.read())
except FileNotFoundError:
config = {}
_g_project_configs[projrootstr] = config
return _g_project_configs[projrootstr]
def setprojectconfig(projroot: Path | str, config: dict[str, Any]) -> None:
"""Set the project config contents."""
projrootstr = str(projroot)
_g_project_configs[projrootstr] = config
os.makedirs(Path(projroot, 'config'), exist_ok=True)
with Path(projroot, 'config/projectconfig.json').open(
'w', encoding='utf-8'
) as outfile:
outfile.write(json.dumps(config, indent=2))

View File

@ -10,7 +10,7 @@ import subprocess
from enum import Enum
from dataclasses import dataclass
from efrotools import readfile, writefile, replace_exact
from efrotools.util import readfile, writefile, replace_exact
# Python version we build here (not necessarily same as we use in repo).
PY_VER_ANDROID = '3.12'

View File

@ -16,53 +16,6 @@ _checked_valid_sys_executable = False # pylint: disable=invalid-name
_valid_sys_executable: str | None = None
# def get_valid_sys_executable() -> str:
# """Attempt to get a valid Python interpreter path.
# Using sys.executable for this purpose may return the path to the
# executable containing the embedded Python, which may not be a standard
# iterpreter.
# """
# pyverstr = f'{sys.version_info.major}.{sys.version_info.minor}'
# global _checked_valid_sys_executable
# global _valid_sys_executable
# if not _checked_valid_sys_executable:
# # First look at sys.executable to see if it seems like a standard
# # python interpreter.
# try:
# output = subprocess.run(
# [sys.executable, '--version'], check=True, capture_output=True
# ).stdout.decode()
# if output.startswith(f'Python {pyverstr}'):
# _valid_sys_executable = sys.executable
# except Exception:
# import logging
# logging.exception(
# 'Error checking sys.executable in get_valid_sys_executable'
# )
# if _valid_sys_executable is None:
# # For now, as a fallback, just go with 'pythonX.Y'.
# _valid_sys_executable = f'python{pyverstr}'
# # As a fallback, look for bin/pythonX.Y under our sys.prefix.
# # prefixpath = os.path.join(
# sys.prefix, 'bin', f'python{pyverstr}')
# # if os.path.exists(prefixpath):
# # _valid_sys_executable = prefixpath
# _checked_valid_sys_executable = True
# if _valid_sys_executable is None:
# raise RuntimeError('Have no valid sys executable.')
# return _valid_sys_executable
def get_project_python_executable(projroot: Path | str) -> str:
"""Return the path to a standalone Python interpreter for this project.

View File

@ -71,7 +71,7 @@ def run_standard_syncs(
a src subpath, and optionally a dst subpath (src will be used by default).
"""
# pylint: disable=too-many-locals
from efrotools import getlocalconfig
from efrotools.project import getlocalconfig
localconfig = getlocalconfig(projectroot)
total_count = 0

View File

@ -68,7 +68,7 @@ def _filter_tool_config(projroot: Path, cfg: str) -> str:
# pylint: disable=too-many-locals
import textwrap
from efrotools import getprojectconfig
from efrotools.project import getprojectconfig
from efrotools.pyver import PYVER
# Emacs dir-locals defaults. Note that these contain other

191
tools/efrotools/util.py Normal file
View File

@ -0,0 +1,191 @@
# Released under the MIT License. See LICENSE for details.
#
"""Misc util calls/etc.
Ideally the stuff in here should migrate to more descriptive module names.
"""
from __future__ import annotations
from typing import TYPE_CHECKING, overload
if TYPE_CHECKING:
from typing import Sequence, Literal
from pathlib import Path
def explicit_bool(value: bool) -> bool:
"""Simply return input value; can avoid unreachable-code type warnings."""
return value
def extract_flag(args: list[str], name: str) -> bool:
"""Given a list of args and a flag name, returns whether it is present.
The arg flag, if present, is removed from the arg list.
"""
from efro.error import CleanError
count = args.count(name)
if count > 1:
raise CleanError(f'Flag {name} passed multiple times.')
if not count:
return False
args.remove(name)
return True
@overload
def extract_arg(
args: list[str], name: str, required: Literal[False] = False
) -> str | None: ...
@overload
def extract_arg(args: list[str], name: str, required: Literal[True]) -> str: ...
def extract_arg(
args: list[str], name: str, required: bool = False
) -> str | None:
"""Given a list of args and an arg name, returns a value.
The arg flag and value are removed from the arg list.
raises CleanErrors on any problems.
"""
from efro.error import CleanError
count = args.count(name)
if not count:
if required:
raise CleanError(f'Required argument {name} not passed.')
return None
if count > 1:
raise CleanError(f'Arg {name} passed multiple times.')
argindex = args.index(name)
if argindex + 1 >= len(args):
raise CleanError(f'No value passed after {name} arg.')
val = args[argindex + 1]
del args[argindex : argindex + 2]
return val
def replace_section(
text: str,
begin_marker: str,
end_marker: str,
replace_text: str = '',
keep_markers: bool = False,
error_if_missing: bool = True,
) -> str:
"""Replace all text between two marker strings (including the markers)."""
if begin_marker not in text:
if error_if_missing:
raise RuntimeError(f"Marker not found in text: '{begin_marker}'.")
return text
splits = text.split(begin_marker)
if len(splits) != 2:
raise RuntimeError(
f"Expected one marker '{begin_marker}'"
f'; found {text.count(begin_marker)}.'
)
before_begin, after_begin = splits
splits = after_begin.split(end_marker)
if len(splits) != 2:
raise RuntimeError(
f"Expected one marker '{end_marker}'"
f'; found {text.count(end_marker)}.'
)
_before_end, after_end = splits
if keep_markers:
replace_text = f'{begin_marker}{replace_text}{end_marker}'
return f'{before_begin}{replace_text}{after_end}'
def readfile(path: str | Path) -> str:
"""Read a utf-8 text file into a string."""
with open(path, encoding='utf-8') as infile:
return infile.read()
def writefile(path: str | Path, txt: str) -> None:
"""Write a string to a utf-8 text file."""
with open(path, 'w', encoding='utf-8') as outfile:
outfile.write(txt)
def replace_exact(
opstr: str, old: str, new: str, count: int = 1, label: str | None = None
) -> str:
"""Replace text ensuring that exactly x occurrences are replaced.
Useful when filtering data in some predefined way to ensure the original
has not changed.
"""
found = opstr.count(old)
label_str = f' in {label}' if label is not None else ''
if found != count:
raise RuntimeError(
f'Expected {count} string occurrence(s){label_str};'
f' found {found}. String: {repr(old)}'
)
return opstr.replace(old, new)
def get_files_hash(
filenames: Sequence[str | Path],
extrahash: str = '',
int_only: bool = False,
hashtype: Literal['md5', 'sha256'] = 'md5',
) -> str:
"""Return a hash for the given files."""
import hashlib
if not isinstance(filenames, list):
raise RuntimeError(f'Expected a list; got a {type(filenames)}.')
if TYPE_CHECKING:
# Help Mypy infer the right type for this.
hashobj = hashlib.md5()
else:
hashobj = getattr(hashlib, hashtype)()
for fname in filenames:
with open(fname, 'rb') as infile:
while True:
data = infile.read(2**20)
if not data:
break
hashobj.update(data)
hashobj.update(extrahash.encode())
if int_only:
return str(int.from_bytes(hashobj.digest(), byteorder='big'))
return hashobj.hexdigest()
def get_string_hash(
value: str,
int_only: bool = False,
hashtype: Literal['md5', 'sha256'] = 'md5',
) -> str:
"""Return a hash for the given files."""
import hashlib
if not isinstance(value, str):
raise TypeError('Expected a str.')
if TYPE_CHECKING:
# Help Mypy infer the right type for this.
hashobj = hashlib.md5()
else:
hashobj = getattr(hashlib, hashtype)()
hashobj.update(value.encode())
if int_only:
return str(int.from_bytes(hashobj.digest(), byteorder='big'))
return hashobj.hexdigest()

View File

@ -22,7 +22,7 @@ from filelock import FileLock
from efro.terminal import Clr
from efro.error import CleanError
from efro.dataclassio import ioprepped, dataclass_from_dict
from efrotools import getlocalconfig # pylint: disable=wrong-import-order
from efrotools.project import getlocalconfig # pylint: disable=C0411
if TYPE_CHECKING:
from typing import Any

View File

@ -10,36 +10,50 @@ import sys
import subprocess
if __name__ == '__main__':
from batools.spinoff import spinoff_main
# Our initial invocation actually just sets up the env for our
# *real* invocation (so we can run under our desired venv/etc.)
if os.environ.get('BA_SPINOFF_HAVE_ENV') != '1':
# The initial invocation of this script actually just sets the stage
# for the *real* invocation of this script, which always happens
# from the fully-inited virtual env of the source project. This way
# all modules used by the spinoff system are in place and there's no
# abiguity where we could be loading Python stuff from the dst
# project while we're in the process of modifying it.
if 'BA_SPINOFF_SRC_ROOT' not in os.environ:
# Our shebang line gives us a generic 'pythonX.Y' environment, but
# we actually want to run under the virtual-env of the source
# project so we have all the pip stuff we expect. So if we are
# getting invoked via a symlink we assume it points to the source
# project, and if not then we assume we are the source project.
# Calc absolute paths for our source (and possibly dst)
# projects. If we are getting invoked via a symlink, what it
# points to is src and we are dst. Otherwise we are src and
# there is no dst.
dst_proj_root: str | None
if os.path.islink(sys.argv[0]):
src_spinoff_path = os.path.realpath(sys.argv[0])
dst_proj_root = os.path.abspath(
os.path.join(os.path.dirname(sys.argv[0]), '..')
)
else:
src_spinoff_path = sys.argv[0]
dst_proj_root = None # pylint: disable=invalid-name
src_proj_root = os.path.abspath(
os.path.join(os.path.dirname(src_spinoff_path), '..')
)
src_proj_python = os.path.join(src_proj_root, '.venv/bin/python3.12')
cmd = [src_proj_python, sys.argv[0]] + sys.argv[1:]
cmd = [src_proj_python, 'tools/spinoff'] + sys.argv[1:]
env = dict(os.environ, BA_SPINOFF_SRC_ROOT=src_proj_root)
if dst_proj_root is not None:
env['BA_SPINOFF_DST_ROOT'] = dst_proj_root
# Make sure the src project is properly bootstrapped.
subprocess.run(['make', 'prereqs'], check=True, cwd=src_proj_root)
# Finally, run for realz.
subprocess.run(
cmd, check=True, env=dict(os.environ, BA_SPINOFF_HAVE_ENV='1')
)
# Finally, run for realz (from src proj dir).
result = subprocess.run(cmd, check=False, env=env, cwd=src_proj_root)
sys.exit(result.returncode)
else:
from batools.spinoff import spinoff_main
# Ok; we're a real invocation. Do our thing.
spinoff_main()