more work on efrosync system

This commit is contained in:
Eric Froemling 2019-10-12 16:41:47 -07:00
parent 422ba1cb2b
commit 3992db622b
6 changed files with 18623 additions and 8 deletions

1
.gitignore vendored
View File

@ -11,6 +11,7 @@ local.properties
*.swo
*.vscode
.sconsign.dblite
.efrocache
.mayaSwatches
.gdb_history
.dmypy.json

View File

@ -34,10 +34,8 @@ all: help
# Tell make which of these targets don't represent files.
.PHONY: all
# EFROCACHE_TARGET
build/testfile:
mkdir -p $(dir $@)
echo foobar > $@
build/testfile: $(PROJ_DIR)/.efrocachemap
tools/snippets efrocache_get $@
################################################################################
@ -302,17 +300,17 @@ preflightfull2:
# This should give the cpu count on linux and mac; may need to expand this
# if using this on other platforms.
CPUS = $(shell getconf _NPROCESSORS_ONLN || echo 8)
ROOT_DIR = ${abspath ${CURDIR}}
PROJ_DIR = ${abspath ${CURDIR}}
VERSION = $(shell tools/version_utils version)
BUILD_NUMBER = $(shell tools/version_utils build)
DIST_DIR = ${ROOT_DIR}/build
BUILD_DIR = ${PROJ_DIR}/build
# Things to ignore when doing root level cleans.
ROOT_CLEAN_IGNORES = --exclude=assets/src_master \
--exclude=config/localconfig.json \
--exclude=.spinoffdata
CHECK_CLEAN_SAFETY = ${ROOT_DIR}/tools/snippets check_clean_safety
CHECK_CLEAN_SAFETY = ${PROJ_DIR}/tools/snippets check_clean_safety
# Some tool configs that need filtering (mainly injecting projroot path).
TOOL_CFG_INST = tools/snippets tool_config_install

18511
assets/Makefile Normal file

File diff suppressed because it is too large Load Diff

View File

@ -29,6 +29,101 @@ from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import List, Dict
TARGET_TAG = '#__EFROCACHE_TARGET__'
STRIP_BEGIN_TAG = '#__EFROCACHE_STRIP_BEGIN__'
STRIP_END_TAG = '#__EFROCACHE_STRIP_END__'
def get_file_hash(path: str) -> str:
"""Return the hash used for caching.
This incorporates the file contents as well as its path.
"""
import hashlib
md5 = hashlib.md5()
with open(path, 'rb') as infile:
md5.update(infile.read())
md5.update(path.encode())
return md5.hexdigest()
def get_target(path: str) -> None:
"""Fetch a target path from the cache, downloading if need be."""
import json
from efrotools import run
with open('.efrocachemap') as infile:
efrocachemap = json.loads(infile.read())
if path not in efrocachemap:
raise RuntimeError(f'Path not found in efrocache: {path}')
url = efrocachemap[path]
subpath = '/'.join(url.split('/')[-3:])
local_cache_path = os.path.join('.efrocache', subpath)
local_cache_path_dl = local_cache_path + '.download'
hashval = ''.join(subpath.split('/'))
# First off: if there's already a file in place, check its hash.
# If it matches the cache, we can just update its timestamp and
# call it a day.
if os.path.isfile(path):
existing_hash = get_file_hash(path)
if existing_hash == hashval:
print('FOUND VALID FILE; TOUCHING')
run(f'touch {path}')
return
# Ok there's not a valid file in place already.
# Clear out whatever is there to start with.
if os.path.exists(path):
os.unlink(path)
# Now if we don't have this entry in our local cache,
# download it.
if not os.path.exists(local_cache_path):
os.makedirs(os.path.dirname(local_cache_path), exist_ok=True)
print('Downloading:', path)
run(f'curl {url} > {local_cache_path_dl}')
run(f'mv {local_cache_path_dl} {local_cache_path}')
# Ok we should have a valid .tar.gz file in our cache dir at this point.
# Just expand it and it get placed wherever it belongs.
run(f'tar -zxf {local_cache_path}')
# The file will wind up with the timestamp it was compressed with,
# so let's update its timestamp or else it will still be considered
# dirty.
run(f'touch {path}')
if not os.path.exists(path):
raise RuntimeError(f'File {path} did not wind up as expected.')
def filter_makefile(makefile_dir: str, contents: str) -> str:
"""Filter makefile contents to use efrocache lookups."""
cachemap = '$(PROJ_DIR)/.efrocachemap'
lines = contents.splitlines()
snippets = 'tools/snippets'
# Strip out parts they don't want.
while STRIP_BEGIN_TAG in lines:
index = lines.index(TARGET_TAG)
endindex = index
while lines[endindex] != STRIP_END_TAG:
endindex += 1
del lines[index:endindex + 1]
# Replace cachable targets with cache lookups
while TARGET_TAG in lines:
index = lines.index(TARGET_TAG)
endindex = index
while lines[endindex].strip() != '':
endindex += 1
tname = lines[index + 1].split(':')[0]
del lines[index:endindex]
lines.insert(index, tname + ': ' + cachemap)
target = (makefile_dir + '/' + '$@') if makefile_dir else '$@'
pre = 'cd $(PROJ_DIR) && ' if makefile_dir else ''
lines.insert(index + 1, f'\t{pre}{snippets} efrocache_get {target}')
return '\n'.join(lines) + '\n'
def update_cache(makefile_dirs: List[str]) -> None:
"""Given a list of directories containing makefiles, update caches."""

View File

@ -606,7 +606,10 @@ def efrocache_update() -> None:
def efrocache_get() -> None:
"""Get a file from efrocache."""
print('WOULD GET FROM EFROCACHE')
from efrotools.efrocache import get_target
if len(sys.argv) != 3:
raise RuntimeError('Expected exactly 1 arg')
get_target(sys.argv[2])
if __name__ == '__main__':

View File

@ -330,6 +330,7 @@ class App:
raise RuntimeError(f'Pub license not found in {fname}')
def _check_python_file(self, fname: str) -> None:
# pylint: disable=too-many-branches
from efrotools import get_public_license
with open(fname) as infile:
contents = infile.read()
@ -350,6 +351,12 @@ class App:
else:
copyrightline = 0
# Special case: it there's spinoff autogenerate notice there,
# look below it.
if (lines[copyrightline] == ''
and 'THIS FILE IS AUTOGENERATED' in lines[copyrightline + 1]):
copyrightline += 2
# In all cases, look for our one-line legal notice.
# In the public case, look for the rest of our public license too.
public_license = get_public_license('python')