mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-01-19 21:37:57 +08:00
Build system optimizations
This commit is contained in:
parent
65505fc794
commit
3f23969f40
@ -4117,8 +4117,8 @@
|
||||
"assets/build/windows/x64/pythonw.exe": "https://files.ballistica.net/cache/ba1/6c/bb/b6f52c306aa4e88061510e96cefe",
|
||||
"build/prefab/linux/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/8c/ae/8d2561ca2c4bb1bb033560866410",
|
||||
"build/prefab/linux/release/ballisticacore": "https://files.ballistica.net/cache/ba1/74/2b/eea5b942b0cead421529d09039cd",
|
||||
"build/prefab/mac/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/75/f5/740ce4ae3fc843c4cec96967f75a",
|
||||
"build/prefab/mac/release/ballisticacore": "https://files.ballistica.net/cache/ba1/d0/1f/573bbb85fbb6a3bf8a056caeeaf9",
|
||||
"build/prefab/windows/debug/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/35/6d/c5e69424a36f80c0fac65dc684ca",
|
||||
"build/prefab/windows/release/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/16/96/57a8a29623dd1c05f506bb005666"
|
||||
"build/prefab/mac/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/b5/19/84fe00af08957094aceca3104c0d",
|
||||
"build/prefab/mac/release/ballisticacore": "https://files.ballistica.net/cache/ba1/27/2e/94624004bd6298eeb554fba9b87f",
|
||||
"build/prefab/windows/debug/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/49/bc/ae4d27572a810e90628205f55949",
|
||||
"build/prefab/windows/release/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/84/ec/57982b7f576fc347429a107fa685"
|
||||
}
|
||||
9
.idea/dictionaries/ericf.xml
generated
9
.idea/dictionaries/ericf.xml
generated
@ -268,6 +268,7 @@
|
||||
<w>checkfast</w>
|
||||
<w>checkfull</w>
|
||||
<w>checkins</w>
|
||||
<w>checkpaths</w>
|
||||
<w>checkroundover</w>
|
||||
<w>checksums</w>
|
||||
<w>childnode</w>
|
||||
@ -430,6 +431,7 @@
|
||||
<w>distros</w>
|
||||
<w>dline</w>
|
||||
<w>dlls</w>
|
||||
<w>dmitry</w>
|
||||
<w>dmodule</w>
|
||||
<w>dmypy</w>
|
||||
<w>dname</w>
|
||||
@ -572,6 +574,7 @@
|
||||
<w>fcount</w>
|
||||
<w>fdata</w>
|
||||
<w>fdesc</w>
|
||||
<w>fdict</w>
|
||||
<w>fecfc</w>
|
||||
<w>feedparser</w>
|
||||
<w>ffap</w>
|
||||
@ -658,6 +661,7 @@
|
||||
<w>fsrc</w>
|
||||
<w>fstab</w>
|
||||
<w>fstrs</w>
|
||||
<w>ftime</w>
|
||||
<w>ftmp</w>
|
||||
<w>ftplib</w>
|
||||
<w>ftst</w>
|
||||
@ -924,6 +928,8 @@
|
||||
<w>lastpoweruptype</w>
|
||||
<w>laststand</w>
|
||||
<w>launchtest</w>
|
||||
<w>lazybuild</w>
|
||||
<w>lazybuilddir</w>
|
||||
<w>lbits</w>
|
||||
<w>lcfg</w>
|
||||
<w>lcolor</w>
|
||||
@ -1726,6 +1732,7 @@
|
||||
<w>testmock</w>
|
||||
<w>testobj</w>
|
||||
<w>testpatch</w>
|
||||
<w>testpath</w>
|
||||
<w>testpt</w>
|
||||
<w>testsealable</w>
|
||||
<w>testsentinel</w>
|
||||
@ -1761,6 +1768,7 @@
|
||||
<w>tmpf</w>
|
||||
<w>tmppath</w>
|
||||
<w>tname</w>
|
||||
<w>tnamepretty</w>
|
||||
<w>tnode</w>
|
||||
<w>tntspawner</w>
|
||||
<w>tntspawnpos</w>
|
||||
@ -1781,6 +1789,7 @@
|
||||
<w>tracemalloc</w>
|
||||
<w>trailcolor</w>
|
||||
<w>transtime</w>
|
||||
<w>trapeznikov</w>
|
||||
<w>tref</w>
|
||||
<w>tret</w>
|
||||
<w>trophystr</w>
|
||||
|
||||
@ -2,8 +2,7 @@
|
||||
(please add your name and info here if you have contributed to the project)
|
||||
|
||||
### Eric Froemling
|
||||
- Original author
|
||||
- BDFL (benevolent dictator for life).
|
||||
- Original BombSquad/Ballistica author and BDFL (benevolent dictator for life).
|
||||
|
||||
### Dmitry450
|
||||
- Modder
|
||||
|
||||
83
Makefile
83
Makefile
@ -28,23 +28,6 @@
|
||||
# Prefix used for output of docs/changelogs/etc targets for use in webpages.
|
||||
DOCPREFIX = "ballisticacore_"
|
||||
|
||||
# This setup lets us set up files "bfiles" for expensive dummy targets
|
||||
# to avoid re-running them every time. An good use case is VM build targets
|
||||
# where just spinning up the VM to confirm that nothing needs rebuilding is
|
||||
# time-consuming. To use these, do the following:
|
||||
# - create a physical file for the target: ${BFILEDIR}/targetname
|
||||
# (targets that are already physical files work too)
|
||||
# - add this dependency to it: ${shell ${BSOURCES} <category> $@}
|
||||
# (where <category> covers all files that could affect the target)
|
||||
# - always touch the target file as the last build step:
|
||||
# mkdir -p `dirname $@` && touch $@
|
||||
# (even if the build step usually does; the build may not actually run
|
||||
# which could leave one of the overly-broad dep files newer than it)
|
||||
# Note that this mechanism slows builds a bit if category contains a lot of
|
||||
# files, so is not always a win.
|
||||
BFILEDIR = .cache/bfile
|
||||
BSOURCES = tools/snippets sources
|
||||
|
||||
|
||||
################################################################################
|
||||
# #
|
||||
@ -73,47 +56,35 @@ assets: prereqs
|
||||
|
||||
# Build assets required for cmake builds (linux, mac)
|
||||
assets-cmake: prereqs
|
||||
@cd assets && $(MAKE) -j${CPUS} cmake
|
||||
@cd assets && ${MAKE} -j${CPUS} cmake
|
||||
|
||||
# Build assets required for WINDOWS_PLATFORM windows builds.
|
||||
assets-windows: prereqs
|
||||
@cd assets && $(MAKE) -j${CPUS} win-${WINDOWS_PLATFORM}
|
||||
@cd assets && ${MAKE} -j${CPUS} win-${WINDOWS_PLATFORM}
|
||||
|
||||
# Build assets required for Win32 windows builds.
|
||||
assets-windows-Win32: prereqs
|
||||
@cd assets && $(MAKE) -j${CPUS} win-Win32
|
||||
@cd assets && ${MAKE} -j${CPUS} win-Win32
|
||||
|
||||
# Build assets required for x64 windows builds.
|
||||
assets-windows-x64: prereqs
|
||||
@cd assets && $(MAKE) -j${CPUS} win-x64
|
||||
@cd assets && ${MAKE} -j${CPUS} win-x64
|
||||
|
||||
# Build assets required for mac xcode builds
|
||||
assets-mac: prereqs
|
||||
@cd assets && $(MAKE) -j${CPUS} mac
|
||||
@cd assets && ${MAKE} -j${CPUS} mac
|
||||
|
||||
# Build assets required for ios.
|
||||
assets-ios: prereqs
|
||||
@cd assets && $(MAKE) -j${CPUS} ios
|
||||
@cd assets && ${MAKE} -j${CPUS} ios
|
||||
|
||||
# Build assets required for android.
|
||||
assets-android: prereqs
|
||||
@cd assets && $(MAKE) -j${CPUS} android
|
||||
@cd assets && ${MAKE} -j${CPUS} android
|
||||
|
||||
# Clean all assets.
|
||||
assets-clean:
|
||||
@cd assets && $(MAKE) clean
|
||||
|
||||
# A bfile for the resources target so we don't always have to run it.
|
||||
RESOURCES_F = ${BFILEDIR}/resources
|
||||
${RESOURCES_F}: ${PREREQS} resources/Makefile ${shell ${BSOURCES} resources $@}
|
||||
@cd resources && $(MAKE) -j${CPUS} resources
|
||||
@mkdir -p `dirname $@` && touch $@
|
||||
|
||||
# A bfile for the code target so we don't always have to run it.
|
||||
CODE_F = ${BFILEDIR}/code
|
||||
${CODE_F}: ${PREREQS} ${shell ${BSOURCES} gen $@}
|
||||
@cd src/generated_src && $(MAKE) -j${CPUS} generated_code
|
||||
@mkdir -p `dirname $@` && touch $@
|
||||
@cd assets && ${MAKE} clean
|
||||
|
||||
# Remove *ALL* files and directories that aren't managed by git
|
||||
# (except for a few things such as localconfig.json).
|
||||
@ -257,12 +228,12 @@ update-check: prereqs
|
||||
|
||||
# Run formatting on all files in the project considered 'dirty'.
|
||||
format:
|
||||
@$(MAKE) -j3 format-code format-scripts format-makefile
|
||||
@${MAKE} -j3 format-code format-scripts format-makefile
|
||||
@echo Formatting complete!
|
||||
|
||||
# Same but always formats; ignores dirty state.
|
||||
format-full:
|
||||
@$(MAKE) -j3 format-code-full format-scripts-full format-makefile
|
||||
@${MAKE} -j3 format-code-full format-scripts-full format-makefile
|
||||
@echo Formatting complete!
|
||||
|
||||
# Run formatting for compiled code sources (.cc, .h, etc.).
|
||||
@ -297,22 +268,22 @@ format-makefile: prereqs
|
||||
|
||||
# Run all project checks. (static analysis)
|
||||
check: update-check
|
||||
@$(MAKE) -j3 cpplint pylint mypy
|
||||
@${MAKE} -j3 cpplint pylint mypy
|
||||
@echo ALL CHECKS PASSED!
|
||||
|
||||
# Same as check but no caching (all files are checked).
|
||||
check-full: update-check
|
||||
@$(MAKE) -j3 cpplint-full pylint-full mypy-full
|
||||
@${MAKE} -j3 cpplint-full pylint-full mypy-full
|
||||
@echo ALL CHECKS PASSED!
|
||||
|
||||
# Same as 'check' plus optional/slow extra checks.
|
||||
check2: update-check
|
||||
@$(MAKE) -j4 cpplint pylint mypy pycharm
|
||||
@${MAKE} -j4 cpplint pylint mypy pycharm
|
||||
@echo ALL CHECKS PASSED!
|
||||
|
||||
# Same as check2 but no caching (all files are checked).
|
||||
check2-full: update-check
|
||||
@$(MAKE) -j4 cpplint-full pylint-full mypy-full pycharm-full
|
||||
@${MAKE} -j4 cpplint-full pylint-full mypy-full pycharm-full
|
||||
@echo ALL CHECKS PASSED!
|
||||
|
||||
# Run Cpplint checks on all C/C++ code.
|
||||
@ -391,30 +362,30 @@ test-assetmanager:
|
||||
|
||||
# Format, update, check, & test the project. Do this before commits.
|
||||
preflight:
|
||||
@$(MAKE) format
|
||||
@$(MAKE) update
|
||||
@$(MAKE) -j4 cpplint pylint mypy test
|
||||
@${MAKE} format
|
||||
@${MAKE} update
|
||||
@${MAKE} -j4 cpplint pylint mypy test
|
||||
@echo PREFLIGHT SUCCESSFUL!
|
||||
|
||||
# Same as 'preflight' without caching (all files are visited).
|
||||
preflight-full:
|
||||
@$(MAKE) format-full
|
||||
@$(MAKE) update
|
||||
@$(MAKE) -j4 cpplint-full pylint-full mypy-full test-full
|
||||
@${MAKE} format-full
|
||||
@${MAKE} update
|
||||
@${MAKE} -j4 cpplint-full pylint-full mypy-full test-full
|
||||
@echo PREFLIGHT SUCCESSFUL!
|
||||
|
||||
# Same as 'preflight' plus optional/slow extra checks.
|
||||
preflight2:
|
||||
@$(MAKE) format
|
||||
@$(MAKE) update
|
||||
@$(MAKE) -j5 cpplint pylint mypy pycharm test
|
||||
@${MAKE} format
|
||||
@${MAKE} update
|
||||
@${MAKE} -j5 cpplint pylint mypy pycharm test
|
||||
@echo PREFLIGHT SUCCESSFUL!
|
||||
|
||||
# Same as 'preflight2' but without caching (all files visited).
|
||||
preflight2-full:
|
||||
@$(MAKE) format-full
|
||||
@$(MAKE) update
|
||||
@$(MAKE) -j5 cpplint-full pylint-full mypy-full pycharm-full test-full
|
||||
@${MAKE} format-full
|
||||
@${MAKE} update
|
||||
@${MAKE} -j5 cpplint-full pylint-full mypy-full pycharm-full test-full
|
||||
@echo PREFLIGHT SUCCESSFUL!
|
||||
|
||||
# Tell make which of these targets don't represent files.
|
||||
@ -434,7 +405,7 @@ PROJ_DIR = ${abspath ${CURDIR}}
|
||||
VERSION = $(shell tools/version_utils version)
|
||||
BUILD_NUMBER = $(shell tools/version_utils build)
|
||||
BUILD_DIR = ${PROJ_DIR}/build
|
||||
|
||||
LAZYBUILDDIR = .cache/lazybuild
|
||||
STAGE_ASSETS = ${PROJ_DIR}/tools/stage_assets
|
||||
|
||||
# Things to ignore when doing root level cleans.
|
||||
|
||||
146
tools/batools/build.py
Normal file
146
tools/batools/build.py
Normal file
@ -0,0 +1,146 @@
|
||||
# Copyright (c) 2011-2020 Eric Froemling
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
# -----------------------------------------------------------------------------
|
||||
"""General functionality related to running builds."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from enum import Enum
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import List
|
||||
|
||||
CLRBLU = '\033[94m' # Blue.
|
||||
CLRHDR = '\033[95m' # Header.
|
||||
CLREND = '\033[0m' # End.
|
||||
|
||||
|
||||
class SourceCategory(Enum):
|
||||
"""Types of sources."""
|
||||
RESOURCES = 'resources_src'
|
||||
CODE_GEN = 'code_gen_src'
|
||||
ASSETS = 'assets_src'
|
||||
CMAKE = 'cmake_src'
|
||||
WIN = 'win_src'
|
||||
|
||||
|
||||
def _checkpaths(inpaths: List[str], category: SourceCategory,
|
||||
target: str) -> bool:
|
||||
# pylint: disable=too-many-branches
|
||||
|
||||
mtime = None if not os.path.exists(target) else os.path.getmtime(target)
|
||||
|
||||
if target.startswith('.cache/lazybuild/'):
|
||||
tnamepretty = target[len('.cache/lazybuild/'):]
|
||||
else:
|
||||
tnamepretty = target
|
||||
|
||||
def _testpath(path: str) -> bool:
|
||||
# Now see this path is newer than our target..
|
||||
if mtime is None or os.path.getmtime(path) >= mtime:
|
||||
print(f'{CLRHDR}Build of {tnamepretty} triggered by'
|
||||
f' {path}{CLREND}')
|
||||
return True
|
||||
return False
|
||||
|
||||
unchanged_count = 0
|
||||
for inpath in inpaths:
|
||||
# Add files verbatim; recurse through dirs.
|
||||
if os.path.isfile(inpath):
|
||||
if _testpath(inpath):
|
||||
return True
|
||||
unchanged_count += 1
|
||||
continue
|
||||
for root, _dnames, fnames in os.walk(inpath):
|
||||
|
||||
# Only gen category uses gen src.
|
||||
if (root.startswith('src/generated_src')
|
||||
and category is not SourceCategory.CODE_GEN):
|
||||
continue
|
||||
|
||||
# None of our targets use tools-src.
|
||||
if root.startswith('src/tools'):
|
||||
continue
|
||||
|
||||
# Skip most of external except for key cases.
|
||||
if root.startswith('src/external'):
|
||||
if category is SourceCategory.WIN and root.startswith(
|
||||
'src/external/windows'):
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
||||
# Ignore python cache files.
|
||||
if '__pycache__' in root:
|
||||
continue
|
||||
for fname in fnames:
|
||||
# Ignore dot files
|
||||
if fname.startswith('.'):
|
||||
continue
|
||||
fpath = os.path.join(root, fname)
|
||||
if ' ' in fpath:
|
||||
raise RuntimeError(f'Invalid path with space: {fpath}')
|
||||
|
||||
if _testpath(fpath):
|
||||
return True
|
||||
unchanged_count += 1
|
||||
print(f'{CLRBLU}Skipping build of {tnamepretty}'
|
||||
f' ({unchanged_count} inputs unchanged){CLREND}')
|
||||
return False
|
||||
|
||||
|
||||
def lazy_build(target: str, category: SourceCategory, command: str) -> None:
|
||||
"""Run a build if anything in category is newer than target.
|
||||
|
||||
Note that target's mod-time will always be updated when the build happens
|
||||
regardless of whether the build itself did so itself.
|
||||
"""
|
||||
paths: List[str]
|
||||
if category is SourceCategory.CODE_GEN:
|
||||
# Everything possibly affecting generated code.
|
||||
paths = ['tools/generate_code', 'src/generated_src']
|
||||
elif category is SourceCategory.ASSETS:
|
||||
paths = ['tools/convert_util', 'assets/src']
|
||||
elif category is SourceCategory.CMAKE:
|
||||
# Everything possibly affecting CMake builds.
|
||||
paths = ['src', 'ballisticacore-cmake/CMakeLists.txt']
|
||||
elif category is SourceCategory.WIN:
|
||||
# Everything possibly affecting Windows binary builds.
|
||||
paths = ['src', 'resources/src']
|
||||
elif category is SourceCategory.RESOURCES:
|
||||
# Everything possibly affecting resources builds.
|
||||
paths = ['resources/src', 'resources/Makefile']
|
||||
else:
|
||||
raise ValueError(f'Invalid source category: {category}')
|
||||
|
||||
# Now do the thing if any our our input mod times changed.
|
||||
if _checkpaths(paths, category, target):
|
||||
|
||||
subprocess.run(command, shell=True, check=True)
|
||||
|
||||
# We also explicitly update the mod-time of the target;
|
||||
# the command we (such as a VM build) may not have actually
|
||||
# done anything but we still want to update our target to
|
||||
# be newer than all the lazy sources.
|
||||
os.makedirs(os.path.dirname(target), exist_ok=True)
|
||||
Path(target).touch()
|
||||
@ -30,6 +30,7 @@ from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
from typing import TYPE_CHECKING
|
||||
from multiprocessing import cpu_count
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
@ -52,6 +53,8 @@ STRIP_END_TAG = '#__EFROCACHE_STRIP_END__'
|
||||
CACHE_DIR_NAME = '.efrocache'
|
||||
CACHE_MAP_NAME = '.efrocachemap'
|
||||
|
||||
UPLOAD_STATE_CACHE_FILE = '.cache/efrocache_upload_state'
|
||||
|
||||
|
||||
def get_file_hash(path: str) -> str:
|
||||
"""Return the hash used for caching.
|
||||
@ -69,7 +72,6 @@ def get_file_hash(path: str) -> str:
|
||||
def get_target(path: str) -> None:
|
||||
"""Fetch a target path from the cache, downloading if need be."""
|
||||
|
||||
import json
|
||||
from efrotools import run
|
||||
with open(CACHE_MAP_NAME) as infile:
|
||||
efrocachemap = json.loads(infile.read())
|
||||
@ -160,20 +162,18 @@ def filter_makefile(makefile_dir: str, contents: str) -> str:
|
||||
|
||||
|
||||
def update_cache(makefile_dirs: List[str]) -> None:
|
||||
"""Given a list of directories containing makefiles, update caches."""
|
||||
"""Given a list of directories containing Makefiles, update caches."""
|
||||
|
||||
import multiprocessing
|
||||
from efrotools import run
|
||||
cpus = multiprocessing.cpu_count()
|
||||
fnames1: List[str] = []
|
||||
fnames2: List[str] = []
|
||||
fhashes1: Set[str] = set()
|
||||
for path in makefile_dirs:
|
||||
cdp = f'cd {path} && ' if path else ''
|
||||
|
||||
# First, make sure all cache files are built.
|
||||
mfpath = os.path.join(path, 'Makefile')
|
||||
print(f'Building cache targets for {mfpath}...')
|
||||
print(f'Building efrocache targets for {CLRBLU}{mfpath}{CLREND}...')
|
||||
subprocess.run(f'{cdp}make -j{cpus} efrocache-build',
|
||||
shell=True,
|
||||
check=True)
|
||||
@ -196,18 +196,60 @@ def update_cache(makefile_dirs: List[str]) -> None:
|
||||
|
||||
# The main reason for this cache is to reduce round trips to
|
||||
# the staging server for tiny files, so let's include small files
|
||||
# only here. For larger stuff its ok to have a request per file.
|
||||
# only here. For larger stuff its ok to have a request per file..
|
||||
if os.path.getsize(fullpath) < 100000:
|
||||
fnames1.append(fullpath)
|
||||
else:
|
||||
fnames2.append(fullpath)
|
||||
|
||||
# if bool(True):
|
||||
# print("1", fnames1)
|
||||
# print("2", fnames2)
|
||||
# print('SO FAR SO GOOD')
|
||||
# sys.exit(0)
|
||||
# Ok, we've got 2 lists of filenames that we need to cache in the cloud.
|
||||
# First, however, let's look up modtimes for everything and if everything
|
||||
# is exactly the same as last time we can skip this step.
|
||||
mtimes = _gen_modtimes(fnames1 + fnames2)
|
||||
if os.path.isfile(UPLOAD_STATE_CACHE_FILE):
|
||||
with open(UPLOAD_STATE_CACHE_FILE) as infile:
|
||||
mtimes_existing = infile.read()
|
||||
else:
|
||||
mtimes_existing = ''
|
||||
if mtimes == mtimes_existing:
|
||||
print(
|
||||
f'{CLRBLU}Efrocache state unchanged;'
|
||||
f' skipping cache push.{CLREND}',
|
||||
flush=True)
|
||||
else:
|
||||
_upload_cache(fnames1, fnames2, mtimes, mtimes_existing)
|
||||
|
||||
print(f'{CLRBLU}Efrocache update successful!{CLREND}')
|
||||
|
||||
# Write the cache state so we can skip the next run if nothing changes.
|
||||
os.makedirs(os.path.dirname(UPLOAD_STATE_CACHE_FILE), exist_ok=True)
|
||||
with open(UPLOAD_STATE_CACHE_FILE, 'w') as outfile:
|
||||
outfile.write(mtimes)
|
||||
|
||||
|
||||
def _upload_cache(fnames1: List[str], fnames2: List[str], mtimes_str: str,
|
||||
mtimes_existing_str: str) -> None:
|
||||
from efrotools import run
|
||||
|
||||
# First, if we've run before, print the files causing us to re-run:
|
||||
if mtimes_existing_str != '':
|
||||
changed_files: Set[str] = set()
|
||||
mtimes = json.loads(mtimes_str)
|
||||
mtimes_existing = json.loads(mtimes_existing_str)
|
||||
for fname, ftime in mtimes.items():
|
||||
if ftime != mtimes_existing.get(fname, ''):
|
||||
changed_files.add(fname)
|
||||
|
||||
# We've covered modifications and additions; add deletions:
|
||||
for fname in mtimes_existing:
|
||||
if fname not in mtimes:
|
||||
changed_files.add(fname)
|
||||
print(f'{CLRBLU}Updating cache with'
|
||||
f' {len(changed_files)} changes:{CLREND}')
|
||||
for fname in sorted(changed_files):
|
||||
print(f' {CLRBLU}{fname}{CLREND}')
|
||||
|
||||
# Now do the thing.
|
||||
staging_dir = 'build/efrocache'
|
||||
mapping_file = 'build/efrocachemap'
|
||||
run(f'rm -rf {staging_dir}')
|
||||
@ -215,11 +257,11 @@ def update_cache(makefile_dirs: List[str]) -> None:
|
||||
|
||||
_write_cache_files(fnames1, fnames2, staging_dir, mapping_file)
|
||||
|
||||
print(f"Starter cache includes {len(fnames1)} items;"
|
||||
f" excludes {len(fnames2)}")
|
||||
print(f"{CLRBLU}Starter cache includes {len(fnames1)} items;"
|
||||
f" excludes {len(fnames2)}{CLREND}")
|
||||
|
||||
# Sync all individual cache files to the staging server.
|
||||
print('Pushing cache to staging...', flush=True)
|
||||
print(f'{CLRBLU}Pushing cache to staging...{CLREND}', flush=True)
|
||||
run('rsync --progress --recursive build/efrocache/'
|
||||
' ubuntu@ballistica.net:files.ballistica.net/cache/ba1/')
|
||||
|
||||
@ -227,14 +269,18 @@ def update_cache(makefile_dirs: List[str]) -> None:
|
||||
run('ssh -oBatchMode=yes -oStrictHostKeyChecking=yes ubuntu@ballistica.net'
|
||||
' "cd files.ballistica.net/cache/ba1 && python3 genstartercache.py"')
|
||||
|
||||
print(f'Cache update successful!')
|
||||
|
||||
def _gen_modtimes(fnames: List[str]) -> str:
|
||||
fdict: Dict[str, float] = {}
|
||||
for fname in fnames:
|
||||
fdict[fname] = os.path.getmtime(fname)
|
||||
return json.dumps(fdict, separators=(',', ':'))
|
||||
|
||||
|
||||
def _write_cache_files(fnames1: List[str], fnames2: List[str],
|
||||
staging_dir: str, mapping_file: str) -> None:
|
||||
fhashes1: Set[str] = set()
|
||||
import functools
|
||||
import json
|
||||
mapping: Dict[str, str] = {}
|
||||
call = functools.partial(_write_cache_file, staging_dir)
|
||||
|
||||
@ -299,8 +345,8 @@ def _write_cache_file(staging_dir: str, fname: str) -> Tuple[str, str]:
|
||||
path = os.path.join(staging_dir, hashpath)
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
|
||||
# Fancy pipe stuff which will give us deterministic
|
||||
# tar.gz files (no embedded timestamps)
|
||||
# Fancy pipe stuff which will give us deterministic tar.gz files
|
||||
# with no embedded timestamps.
|
||||
# Note: The 'COPYFILE_DISABLE' prevents mac tar from adding
|
||||
# file attributes/resource-forks to the archive as as ._filename.
|
||||
run(f'COPYFILE_DISABLE=1 tar cf - {fname} | gzip -n > {path}')
|
||||
@ -330,7 +376,6 @@ def _check_warm_start_entries(entries: List[Tuple[str, str]]) -> None:
|
||||
|
||||
def warm_start_cache() -> None:
|
||||
"""Run a pre-pass on the efrocache to improve efficiency."""
|
||||
import json
|
||||
from efrotools import run
|
||||
|
||||
# We maintain a starter-cache on the staging server, which
|
||||
|
||||
@ -34,7 +34,6 @@ from __future__ import annotations
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import efrotools
|
||||
@ -540,6 +539,7 @@ def clean_orphaned_assets() -> None:
|
||||
|
||||
def py_examine() -> None:
|
||||
"""Run a python examination at a given point in a given file."""
|
||||
from pathlib import Path
|
||||
if len(sys.argv) != 7:
|
||||
print('ERROR: expected 7 args')
|
||||
sys.exit(255)
|
||||
@ -567,6 +567,7 @@ def py_examine() -> None:
|
||||
|
||||
def push_ipa() -> None:
|
||||
"""Construct and push ios IPA for testing."""
|
||||
from pathlib import Path
|
||||
from efrotools import ios
|
||||
root = Path(sys.argv[0], '../..').resolve()
|
||||
if len(sys.argv) != 3:
|
||||
@ -652,6 +653,7 @@ def get_modern_make() -> None:
|
||||
|
||||
def warm_start_asset_build() -> None:
|
||||
"""Prep asset builds to run faster."""
|
||||
from pathlib import Path
|
||||
from efrotools import get_config
|
||||
public: bool = get_config(PROJROOT)['public']
|
||||
|
||||
@ -876,73 +878,18 @@ def update_makebob() -> None:
|
||||
print('All builds complete!', flush=True)
|
||||
|
||||
|
||||
def _printpaths(inpaths: List[str], category: str,
|
||||
target: Optional[str]) -> None:
|
||||
allpaths: List[str] = []
|
||||
for inpath in inpaths:
|
||||
# Add files verbatim; recurse through dirs.
|
||||
if os.path.isfile(inpath):
|
||||
allpaths.append(inpath)
|
||||
continue
|
||||
for root, _dnames, fnames in os.walk(inpath):
|
||||
# Always skip these..
|
||||
if (root.startswith('src/generated_src')
|
||||
or root.startswith('src/tools')):
|
||||
continue
|
||||
# Skip some of these...
|
||||
if root.startswith('src/external'):
|
||||
if category == 'win' and root.startswith(
|
||||
'src/external/windows'):
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
# Ignore python cache files.
|
||||
if '__pycache__' in root:
|
||||
continue
|
||||
for fname in fnames:
|
||||
# Ignore dot files
|
||||
if fname.startswith('.'):
|
||||
continue
|
||||
path = os.path.join(root, fname)
|
||||
if ' ' in path:
|
||||
raise RuntimeError(f'Invalid path with space: {path}')
|
||||
allpaths.append(path)
|
||||
print(' '.join(allpaths))
|
||||
|
||||
|
||||
def sources() -> None:
|
||||
"""Print source files of different categories for use as Makefile deps.
|
||||
|
||||
These are used as broad, redundant filters for expensive build ops.
|
||||
For instance, when running a build through a VM we might want to skip
|
||||
even spinning up the VM if absolutely no source files have changed.
|
||||
|
||||
With a single category arg, all input files for that category are printed.
|
||||
If a target filename is passed as a second arg, sources older than the
|
||||
target may be withheld to speed up the Make process.
|
||||
"""
|
||||
def lazybuild() -> None:
|
||||
"""Testing."""
|
||||
from batools.build import lazy_build, SourceCategory
|
||||
if len(sys.argv) < 5:
|
||||
raise CleanError('Expected at least 3 args')
|
||||
try:
|
||||
if len(sys.argv) not in (3, 4):
|
||||
raise CleanError('Expected one or two arguments.')
|
||||
category = sys.argv[2]
|
||||
target = sys.argv[3] if len(sys.argv) > 3 else None
|
||||
if category == 'gen':
|
||||
_printpaths(['tools', 'src/generated_src'], category, target)
|
||||
elif category == 'assets':
|
||||
_printpaths(['tools', 'assets/src'], category, target)
|
||||
elif category in ('cmake', 'win'):
|
||||
_printpaths(['tools', 'src'], category, target)
|
||||
elif category == 'resources':
|
||||
_printpaths(['tools', 'resources/src', 'resources/Makefile'],
|
||||
category, target)
|
||||
else:
|
||||
raise ValueError(f'Invalid source category: {category}')
|
||||
|
||||
except Exception as exc:
|
||||
# We're used by ${shell} cmds in Makefiles so need to fail in a
|
||||
# fashion that is noticeable there:
|
||||
print(f'Error in sources snippet: {exc}', file=sys.stderr)
|
||||
print('__nonexistent_error_output__')
|
||||
category = SourceCategory(sys.argv[2])
|
||||
except ValueError as exc:
|
||||
raise CleanError(exc)
|
||||
target = sys.argv[3]
|
||||
cmd = ' '.join(sys.argv[4:])
|
||||
lazy_build(target, category, cmd)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -47,7 +47,7 @@ if TYPE_CHECKING:
|
||||
|
||||
CLRHDR = '\033[95m' # Header.
|
||||
CLRGRN = '\033[92m' # Green.
|
||||
CLRBLU = '\033[94m' # Glue.
|
||||
CLRBLU = '\033[94m' # Blue.
|
||||
CLRRED = '\033[91m' # Red.
|
||||
CLREND = '\033[0m' # End.
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user