cleaning up efrocache updater to allow for multiple starter caches

This commit is contained in:
Eric 2023-09-01 09:49:24 -07:00
parent 110f5143ef
commit 2e8a0568e8
No known key found for this signature in database
GPG Key ID: 89C93F0F8D6D5A98
5 changed files with 148 additions and 142 deletions

56
.efrocachemap generated
View File

@ -4064,26 +4064,26 @@
"build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1", "build/assets/windows/Win32/ucrtbased.dll": "2def5335207d41b21b9823f6805997f1",
"build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae", "build/assets/windows/Win32/vc_redist.x86.exe": "b08a55e2e77623fe657bea24f223a3ae",
"build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599", "build/assets/windows/Win32/vcruntime140d.dll": "865b2af4d1e26a1a8073c89acb06e599",
"build/prefab/full/linux_arm64_gui/debug/ballisticakit": "3403caf882f5efc1c5a62bf452994bd1", "build/prefab/full/linux_arm64_gui/debug/ballisticakit": "a44a6d1692466ce3ef645e287df9d40c",
"build/prefab/full/linux_arm64_gui/release/ballisticakit": "c046dcf69b1d3cb122869ee1f8df06d6", "build/prefab/full/linux_arm64_gui/release/ballisticakit": "d10895a3d5db3eb87444c977d3cc7d5b",
"build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "10f90e814342ba8553c193d8ce3221d1", "build/prefab/full/linux_arm64_server/debug/dist/ballisticakit_headless": "7c8673d3c53823f7337d9cc06363c52b",
"build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "2827cb64562372b70fe78c30cc4dbf1e", "build/prefab/full/linux_arm64_server/release/dist/ballisticakit_headless": "e0912c7058d289c72ececf1f82af05e9",
"build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "37e25714c6b19f5edc1135820edb0ef4", "build/prefab/full/linux_x86_64_gui/debug/ballisticakit": "08d0270c1ff62138d90cfd8f03c2a1f4",
"build/prefab/full/linux_x86_64_gui/release/ballisticakit": "bacec90936f644c739eb9230b61c1528", "build/prefab/full/linux_x86_64_gui/release/ballisticakit": "43fc3db1e9c0faea8583d728ba62c9cf",
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "51e044f01c1f783f2c88d8ba490370a6", "build/prefab/full/linux_x86_64_server/debug/dist/ballisticakit_headless": "f0ce38fea6c063d526363df303a3eaff",
"build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "01aff77ee4c84b06b54664e26aaed0cf", "build/prefab/full/linux_x86_64_server/release/dist/ballisticakit_headless": "ffa4b4d37c326be0f5fc90209aa826c4",
"build/prefab/full/mac_arm64_gui/debug/ballisticakit": "f9a14d7937cb4ac43038f4bd86212f1a", "build/prefab/full/mac_arm64_gui/debug/ballisticakit": "0a73555124eb90070bced01136dea1f2",
"build/prefab/full/mac_arm64_gui/release/ballisticakit": "de88562b3c51a56a5a89bc35a7888e58", "build/prefab/full/mac_arm64_gui/release/ballisticakit": "8f3b965f338dea1db2a87e761f178926",
"build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "7e94dfd5ce0b6bef197a09ef095e5c3a", "build/prefab/full/mac_arm64_server/debug/dist/ballisticakit_headless": "46526e8ca5d33be7828fc39c0c483058",
"build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "78150d99f0e8dfd2f6785af4335b3f49", "build/prefab/full/mac_arm64_server/release/dist/ballisticakit_headless": "c47bcafdc7a6ac65cd7e30878a68b56c",
"build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "732845014020ac6c7a0575a1c49ee4c7", "build/prefab/full/mac_x86_64_gui/debug/ballisticakit": "2b41adbceeb183e7024ca0f8afa0770e",
"build/prefab/full/mac_x86_64_gui/release/ballisticakit": "bab389beb2e52641e5a7e5cf9a62bf69", "build/prefab/full/mac_x86_64_gui/release/ballisticakit": "837c943294f33316645071acf649d66c",
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "7df7ca640e896ec966599e3f89975169", "build/prefab/full/mac_x86_64_server/debug/dist/ballisticakit_headless": "09eb1243aaf52998ad182cff4efb7375",
"build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "cf35da279c2ff3d4a8241e51199fea5d", "build/prefab/full/mac_x86_64_server/release/dist/ballisticakit_headless": "476f9d1a9ae840cb458d862460ba1c59",
"build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "0c1514eef06f41cb301406e12c4a9ed1", "build/prefab/full/windows_x86_gui/debug/BallisticaKit.exe": "a4674b228aa7e966fabf4822594ea7dd",
"build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "765d1d5b248c5102da16dd6405996580", "build/prefab/full/windows_x86_gui/release/BallisticaKit.exe": "326cb9f1e606f0a106e9a346f23a4930",
"build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "37817c2b289726c0b710c424df9f178f", "build/prefab/full/windows_x86_server/debug/dist/BallisticaKitHeadless.exe": "fd000a93901f067421d4c337ef788fa5",
"build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "65d3ae6e675d4ca9d950354ee0ad2e01", "build/prefab/full/windows_x86_server/release/dist/BallisticaKitHeadless.exe": "3c317e611a66b8f307c20b2893af19e6",
"build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "a3607fd941915ab11503f82acfc392b5", "build/prefab/lib/linux_arm64_gui/debug/libballisticaplus.a": "a3607fd941915ab11503f82acfc392b5",
"build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "b5a129d83796c9e7015ab5e319d2c22f", "build/prefab/lib/linux_arm64_gui/release/libballisticaplus.a": "b5a129d83796c9e7015ab5e319d2c22f",
"build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "a3607fd941915ab11503f82acfc392b5", "build/prefab/lib/linux_arm64_server/debug/libballisticaplus.a": "a3607fd941915ab11503f82acfc392b5",
@ -4100,14 +4100,14 @@
"build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "c5c40967e63471c9c4abd6dfbef892df", "build/prefab/lib/mac_x86_64_gui/release/libballisticaplus.a": "c5c40967e63471c9c4abd6dfbef892df",
"build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "d34c0a142e7d391a109a33ea3cc77c08", "build/prefab/lib/mac_x86_64_server/debug/libballisticaplus.a": "d34c0a142e7d391a109a33ea3cc77c08",
"build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "c5c40967e63471c9c4abd6dfbef892df", "build/prefab/lib/mac_x86_64_server/release/libballisticaplus.a": "c5c40967e63471c9c4abd6dfbef892df",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "189f112c8fe01f0986c6f055d6b11ade", "build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.lib": "3539e7c953250aea0ce2b17444bf2416",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "9e6c25f054ad5b316f442b6fdabeb3d5", "build/prefab/lib/windows/Debug_Win32/BallisticaKitGenericPlus.pdb": "70b578f8b50222347aeb7fdc4e232eae",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "5bed56449cb11318a2542c911716b129", "build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.lib": "8d04b891a8edc9f36789fadd18f8b44f",
"build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "8371c50dda62dd03ecf16a8d936c5678", "build/prefab/lib/windows/Debug_Win32/BallisticaKitHeadlessPlus.pdb": "5963b640147b3a762f09860eefded894",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "de284cc96dda52b4112d70de50730365", "build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.lib": "e55e703e9920612601ada6011450b8e0",
"build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "d0d59c523846e6988ae71d9a1e9cab16", "build/prefab/lib/windows/Release_Win32/BallisticaKitGenericPlus.pdb": "d983ab4d7130d3945c5a71e8b762e7f1",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "5fbe3d7931813e8313004603b255a5aa", "build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.lib": "5cb3581574e84116b76333c89ca5550f",
"build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "d3a30a9e74713fac8afaa13f1d11ecf8", "build/prefab/lib/windows/Release_Win32/BallisticaKitHeadlessPlus.pdb": "477579f388832b2d296c89c467b24094",
"src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c", "src/assets/ba_data/python/babase/_mgen/__init__.py": "f885fed7f2ed98ff2ba271f9dbe3391c",
"src/assets/ba_data/python/babase/_mgen/enums.py": "f8cd3af311ac63147882590123b78318", "src/assets/ba_data/python/babase/_mgen/enums.py": "f8cd3af311ac63147882590123b78318",
"src/ballistica/base/mgen/pyembed/binding_base.inc": "ad347097a38e0d7ede9eb6dec6a80ee9", "src/ballistica/base/mgen/pyembed/binding_base.inc": "ad347097a38e0d7ede9eb6dec6a80ee9",

View File

@ -1,4 +1,4 @@
### 1.7.28 (build 21293, api 8, 2023-08-31) ### 1.7.28 (build 21295, api 8, 2023-09-01)
- Added some high level functionality for copying and deleting feature-sets to - Added some high level functionality for copying and deleting feature-sets to
the `tools/spinoff` tool. For example, to create your own `poo` feature-set, the `tools/spinoff` tool. For example, to create your own `poo` feature-set,
@ -7,6 +7,8 @@
`import bapoo` to get at your nice shiny poo feature-set. When you are done `import bapoo` to get at your nice shiny poo feature-set. When you are done
playing, you can do `tools/spinoff fset-delete poo` to blow away any traces of playing, you can do `tools/spinoff fset-delete poo` to blow away any traces of
it. it.
- Public builds now properly reconstruct the CMakeLists.txt file for project
changes.
### 1.7.27 (build 21282, api 8, 2023-08-30) ### 1.7.27 (build 21282, api 8, 2023-08-30)

View File

@ -52,7 +52,7 @@ if TYPE_CHECKING:
# Build number and version of the ballistica binary we expect to be # Build number and version of the ballistica binary we expect to be
# using. # using.
TARGET_BALLISTICA_BUILD = 21293 TARGET_BALLISTICA_BUILD = 21295
TARGET_BALLISTICA_VERSION = '1.7.28' TARGET_BALLISTICA_VERSION = '1.7.28'

View File

@ -39,7 +39,7 @@ auto main(int argc, char** argv) -> int {
namespace ballistica { namespace ballistica {
// These are set automatically via script; don't modify them here. // These are set automatically via script; don't modify them here.
const int kEngineBuildNumber = 21293; const int kEngineBuildNumber = 21295;
const char* kEngineVersion = "1.7.28"; const char* kEngineVersion = "1.7.28";
const int kEngineApiVersion = 8; const int kEngineApiVersion = 8;

View File

@ -266,7 +266,7 @@ def filter_makefile(makefile_dir: str, contents: str) -> str:
lines = contents.splitlines() lines = contents.splitlines()
if makefile_dir == '': if makefile_dir == '':
# In root makefile just use standard pcommandbatch var. # In root Makefile, just use standard pcommandbatch var.
pcommand = '$(PCOMMANDBATCH)' pcommand = '$(PCOMMANDBATCH)'
elif makefile_dir == 'src/assets': elif makefile_dir == 'src/assets':
# Currently efrocache_get needs to be run from project-root so # Currently efrocache_get needs to be run from project-root so
@ -275,7 +275,7 @@ def filter_makefile(makefile_dir: str, contents: str) -> str:
pcommand = '$(PCOMMANDBATCHFROMROOT)' pcommand = '$(PCOMMANDBATCHFROMROOT)'
elif makefile_dir == 'src/resources': elif makefile_dir == 'src/resources':
# Not yet enough stuff in resources to justify supporting # Not yet enough stuff in resources to justify supporting
# pcommandbatch there; sticking with regular for now. # pcommandbatch there; sticking with regular pcommand for now.
pcommand = 'tools/pcommand' pcommand = 'tools/pcommand'
else: else:
raise RuntimeError(f"Unsupported makefile_dir: '{makefile_dir}'.") raise RuntimeError(f"Unsupported makefile_dir: '{makefile_dir}'.")
@ -301,8 +301,11 @@ def update_cache(makefile_dirs: list[str]) -> None:
import multiprocessing import multiprocessing
cpus = multiprocessing.cpu_count() cpus = multiprocessing.cpu_count()
fnames1: list[str] = []
fnames2: list[str] = [] # Build a list of files going into our starter cache, files going
# into our headless starter cache, and all files.
fnames_starter: list[str] = []
fnames_all: list[str] = []
for path in makefile_dirs: for path in makefile_dirs:
cdp = f'cd {path} && ' if path else '' cdp = f'cd {path} && ' if path else ''
@ -338,17 +341,17 @@ def update_cache(makefile_dirs: list[str]) -> None:
fullpath = _project_centric_path(os.path.join(path, rawpath)) fullpath = _project_centric_path(os.path.join(path, rawpath))
# The main reason for this cache is to reduce round trips to # The main reason for this cache is to reduce round trips to
# the staging server for tiny files, so let's include small files # the staging server for tiny files, so let's include small
# only here. For larger stuff its ok to have a request per file.. # files only here. For larger stuff its ok to have a request
# per file..
if os.path.getsize(fullpath) < 100000: if os.path.getsize(fullpath) < 100000:
fnames1.append(fullpath) fnames_starter.append(fullpath)
else: fnames_all.append(fullpath)
fnames2.append(fullpath)
# Ok, we've got 2 lists of filenames that we need to cache in the cloud. # Ok, we've got a big list of filenames we need to cache in the
# First, however, let's do a big hash of everything and if everything # cloud. First, however, let's do a big hash of everything and if
# is exactly the same as last time we can skip this step. # everything is exactly the same as last time we can skip this step.
hashes = _gen_complete_state_hashes(fnames1 + fnames2) hashes = _gen_complete_state_hashes(fnames_all)
if os.path.isfile(UPLOAD_STATE_CACHE_FILE): if os.path.isfile(UPLOAD_STATE_CACHE_FILE):
with open(UPLOAD_STATE_CACHE_FILE, encoding='utf-8') as infile: with open(UPLOAD_STATE_CACHE_FILE, encoding='utf-8') as infile:
hashes_existing = infile.read() hashes_existing = infile.read()
@ -361,7 +364,7 @@ def update_cache(makefile_dirs: list[str]) -> None:
flush=True, flush=True,
) )
else: else:
_upload_cache(fnames1, fnames2, hashes, hashes_existing) _update_cloud_cache(fnames_starter, fnames_all, hashes, hashes_existing)
print(f'{Clr.SBLU}Efrocache update successful!{Clr.RST}') print(f'{Clr.SBLU}Efrocache update successful!{Clr.RST}')
@ -372,72 +375,6 @@ def update_cache(makefile_dirs: list[str]) -> None:
outfile.write(hashes) outfile.write(hashes)
def _upload_cache(
fnames1: list[str],
fnames2: list[str],
hashes_str: str,
hashes_existing_str: str,
) -> None:
# First, if we've run before, print the files causing us to re-run:
if hashes_existing_str != '':
changed_files: set[str] = set()
hashes = json.loads(hashes_str)
hashes_existing = json.loads(hashes_existing_str)
for fname, ftime in hashes.items():
if ftime != hashes_existing.get(fname, ''):
changed_files.add(fname)
# We've covered modifications and additions; add deletions:
for fname in hashes_existing:
if fname not in hashes:
changed_files.add(fname)
print(
f'{Clr.SBLU}Updating efrocache due to'
f' {len(changed_files)} changes:{Clr.RST}'
)
for fname in sorted(changed_files):
print(f' {Clr.SBLU}{fname}{Clr.RST}')
# Now do the thing.
staging_dir = 'build/efrocache'
mapping_file = 'build/efrocachemap'
subprocess.run(['rm', '-rf', staging_dir], check=True)
subprocess.run(['mkdir', '-p', staging_dir], check=True)
_write_cache_files(fnames1, fnames2, staging_dir, mapping_file)
print(
f'{Clr.SBLU}Starter cache includes {len(fnames1)} items;'
f' excludes {len(fnames2)}{Clr.RST}'
)
# Sync all individual cache files to the staging server.
print(f'{Clr.SBLU}Pushing cache to staging...{Clr.RST}', flush=True)
subprocess.run(
[
'rsync',
'--progress',
'--recursive',
'--human-readable',
'build/efrocache/',
'ubuntu@staging.ballistica.net:files.ballistica.net/cache/ba1/',
],
check=True,
)
# Now generate the starter cache on the server..
subprocess.run(
[
'ssh',
'-oBatchMode=yes',
'-oStrictHostKeyChecking=yes',
'ubuntu@staging.ballistica.net',
'cd files.ballistica.net/cache/ba1 && python3 genstartercache.py',
],
check=True,
)
def _gen_complete_state_hashes(fnames: list[str]) -> str: def _gen_complete_state_hashes(fnames: list[str]) -> str:
import hashlib import hashlib
@ -454,55 +391,122 @@ def _gen_complete_state_hashes(fnames: list[str]) -> str:
return json.dumps(hashes, separators=(',', ':')) return json.dumps(hashes, separators=(',', ':'))
def _write_cache_files( def _update_cloud_cache(
fnames1: list[str], fnames2: list[str], staging_dir: str, mapping_file: str fnames_starter: list[str],
fnames_all: list[str],
hashes_str: str,
hashes_existing_str: str,
) -> None: ) -> None:
# First, if we've run before, print the files causing us to re-run:
if hashes_existing_str != '':
changed_files: set[str] = set()
hashes = json.loads(hashes_str)
hashes_existing = json.loads(hashes_existing_str)
for fname, ftime in hashes.items():
if ftime != hashes_existing.get(fname, ''):
changed_files.add(fname)
# We've covered modifications and additions; add deletions.
for fname in hashes_existing:
if fname not in hashes:
changed_files.add(fname)
print(
f'{Clr.SBLU}Updating efrocache due to'
f' {len(changed_files)} changes:{Clr.RST}'
)
for fname in sorted(changed_files):
print(f' {Clr.SBLU}{fname}{Clr.RST}')
# Now do the thing.
staging_dir = 'build/efrocache'
mapping_file = 'build/efrocachemap'
subprocess.run(['rm', '-rf', staging_dir], check=True)
subprocess.run(['mkdir', '-p', staging_dir], check=True)
_gather_cache_files(fnames_starter, fnames_all, staging_dir, mapping_file)
print(
f'{Clr.SBLU}Starter cache includes {len(fnames_starter)} items;'
f' excludes {len(fnames_all) - len(fnames_starter)}{Clr.RST}'
)
# Sync all individual cache files to the staging server.
print(f'{Clr.SBLU}Pushing cache to staging...{Clr.RST}', flush=True)
subprocess.run(
[
'rsync',
'--progress',
'--recursive',
'--human-readable',
'build/efrocache/',
'ubuntu@staging.ballistica.net:files.ballistica.net/cache/ba1/',
],
check=True,
)
# Now generate the starter cache on the server.
subprocess.run(
[
'ssh',
'-oBatchMode=yes',
'-oStrictHostKeyChecking=yes',
'ubuntu@staging.ballistica.net',
'cd files.ballistica.net/cache/ba1 && python3 genstartercache.py',
],
check=True,
)
def _gather_cache_files(
fnames_starter: list[str],
fnames_all: list[str],
staging_dir: str,
mapping_file: str,
) -> None:
# pylint: disable=too-many-locals
import functools import functools
fhashes1: set[str] = set() fhashpaths_all: set[str] = set()
fhashes2: set[str] = set() names_to_hashes: dict[str, str] = {}
mapping: dict[str, str] = {} names_to_hashpaths: dict[str, str] = {}
writecall = functools.partial(_write_cache_file, staging_dir) writecall = functools.partial(_write_cache_file, staging_dir)
# Do the first set. # Calc hashes and hash-paths for all cache files.
with ThreadPoolExecutor(max_workers=cpu_count()) as executor: with ThreadPoolExecutor(max_workers=cpu_count()) as executor:
results = executor.map(writecall, fnames1) for fname, fhash, fhashpath in executor.map(writecall, fnames_all):
for result in results: names_to_hashes[fname] = fhash
# mapping[result[0]] = f'{base_url}/{result[1]}' names_to_hashpaths[fname] = fhashpath
mapping[result[0]] = result[1] fhashpaths_all.add(fhashpath)
fhashes1.add(result[2])
# Now finish up with the second set. # Now calc hashpaths for our starter file set.
with ThreadPoolExecutor(max_workers=cpu_count()) as executor: fhashpaths_starter: set[str] = set()
results = executor.map(writecall, fnames2) for fname in fnames_starter:
for result in results: fhashpaths_starter.add(names_to_hashpaths[fname])
# mapping[result[0]] = f'{base_url}/result[1]'
mapping[result[0]] = result[1]
fhashes2.add(result[2])
# We want the server to have a startercache.tar.xz file which # We want the server to have a startercache(server).tar.xz files
# contains the entire first set. It is much more efficient to build # which contain the entire subsets we were passed. It is much more
# that file on the server than it is to build it here and upload the # efficient to build those files on the server than it is to build
# whole thing. ...so let's simply write a script to generate it and # them here and upload the whole thing. ...so let's simply write a
# upload that. # script to generate them and upload that.
# Also let's have the script touch both sets of files so we can use # Also let's have the script touch the full set of files we're still
# mod-times to prune older files. Otherwise files that never change # using so we can use mod-times to prune unused ones eventually.
# Otherwise files that we're still using but which never change
# might have very old mod times. # might have very old mod times.
script = ( script = (
'import os\n' 'import os\n'
'import pathlib\n' 'import pathlib\n'
'import subprocess\n' 'import subprocess\n'
'fnames = ' + repr(fhashes1) + '\n' f'fnames_starter = {repr(fhashpaths_starter)}\n'
'fnames2 = ' + repr(fhashes2) + '\n' f'fnames_all = {repr(fhashpaths_all)}\n'
'subprocess.run(["rm", "-rf", "efrocache"], check=True)\n' 'subprocess.run(["rm", "-rf", "efrocache"], check=True)\n'
'print("Copying starter cache files...", flush=True)\n' 'print("Gathering starter cache files...", flush=True)\n'
'for fname in fnames:\n' 'for fname in fnames_starter:\n'
' dst = os.path.join("efrocache", fname)\n' ' dst = os.path.join("efrocache", fname)\n'
' os.makedirs(os.path.dirname(dst), exist_ok=True)\n' ' os.makedirs(os.path.dirname(dst), exist_ok=True)\n'
' subprocess.run(["cp", fname, dst], check=True)\n' ' subprocess.run(["cp", fname, dst], check=True)\n'
'print("Touching full file set...", flush=True)\n' 'print("Touching full file set...", flush=True)\n'
'for fname in list(fnames) + list(fnames2):\n' 'for fname in fnames_all:\n'
' fpath = pathlib.Path(fname)\n' ' fpath = pathlib.Path(fname)\n'
' assert fpath.exists()\n' ' assert fpath.exists()\n'
' fpath.touch()\n' ' fpath.touch()\n'
@ -521,7 +525,7 @@ def _write_cache_files(
outfile.write(script) outfile.write(script)
with open(mapping_file, 'w', encoding='utf-8') as outfile: with open(mapping_file, 'w', encoding='utf-8') as outfile:
outfile.write(json.dumps(mapping, indent=2, sort_keys=True)) outfile.write(json.dumps(names_to_hashes, indent=2, sort_keys=True))
def _path_from_hash(hashstr: str) -> str: def _path_from_hash(hashstr: str) -> str: