mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-02-05 23:13:46 +08:00
Updated tools for new color stuff
This commit is contained in:
parent
ac90429389
commit
172ca83ccb
@ -34,7 +34,7 @@ NOTE: This file was autogenerated by gendummymodule; do not edit by hand.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# (hash we can use to see if this file is out of date)
|
# (hash we can use to see if this file is out of date)
|
||||||
# SOURCES_HASH=10577028919433802623543258674047257154
|
# SOURCES_HASH=46017893405708946031504998063019866722
|
||||||
|
|
||||||
# I'm sorry Pylint. I know this file saddens you. Be strong.
|
# I'm sorry Pylint. I know this file saddens you. Be strong.
|
||||||
# pylint: disable=useless-suppression
|
# pylint: disable=useless-suppression
|
||||||
|
|||||||
@ -36,6 +36,8 @@ import tempfile
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from efro.terminal import Clr
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Optional, Dict, Tuple, List, BinaryIO
|
from typing import Optional, Dict, Tuple, List, BinaryIO
|
||||||
|
|
||||||
@ -52,12 +54,6 @@ MASTER_SERVER_ADDRESS = (
|
|||||||
else 'https://1-dot-test-dot-bamaster.appspot.com' if os.environ.get(
|
else 'https://1-dot-test-dot-bamaster.appspot.com' if os.environ.get(
|
||||||
'BACLOUD_SERVER') == 'TEST' else 'https://bamaster.appspot.com')
|
'BACLOUD_SERVER') == 'TEST' else 'https://bamaster.appspot.com')
|
||||||
|
|
||||||
CLRHDR = '\033[95m' # Header.
|
|
||||||
CLRGRN = '\033[92m' # Green.
|
|
||||||
CLRBLU = '\033[94m' # Glue.
|
|
||||||
CLRRED = '\033[91m' # Red.
|
|
||||||
CLREND = '\033[0m' # End.
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class StateData:
|
class StateData:
|
||||||
@ -244,8 +240,8 @@ class App:
|
|||||||
with open(self._state_data_path, 'r') as infile:
|
with open(self._state_data_path, 'r') as infile:
|
||||||
self._state = StateData(**json.loads(infile.read()))
|
self._state = StateData(**json.loads(infile.read()))
|
||||||
except Exception:
|
except Exception:
|
||||||
print(f'{CLRRED}Error loading {TOOL_NAME} data;'
|
print(f'{Clr.SRED}Error loading {TOOL_NAME} data;'
|
||||||
f' resetting to defaults.{CLREND}')
|
f' resetting to defaults.{Clr.RST}')
|
||||||
|
|
||||||
def _save_state(self) -> None:
|
def _save_state(self) -> None:
|
||||||
if not self._state_dir.exists():
|
if not self._state_dir.exists():
|
||||||
@ -292,7 +288,7 @@ class App:
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
def _upload_file(self, filename: str, call: str, args: Dict) -> None:
|
def _upload_file(self, filename: str, call: str, args: Dict) -> None:
|
||||||
print(f'{CLRBLU}Uploading {filename}{CLREND}', flush=True)
|
print(f'{Clr.SBLU}Uploading {filename}{Clr.RST}', flush=True)
|
||||||
with tempfile.TemporaryDirectory() as tempdir:
|
with tempfile.TemporaryDirectory() as tempdir:
|
||||||
srcpath = Path(filename)
|
srcpath = Path(filename)
|
||||||
gzpath = Path(tempdir, 'file.gz')
|
gzpath = Path(tempdir, 'file.gz')
|
||||||
@ -448,5 +444,5 @@ if __name__ == '__main__':
|
|||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
except CleanError as exc:
|
except CleanError as exc:
|
||||||
if str(exc):
|
if str(exc):
|
||||||
print(f'{CLRRED}{exc}{CLREND}')
|
print(f'{Clr.SRED}{exc}{Clr.RST}')
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|||||||
@ -35,15 +35,11 @@ from typing import TYPE_CHECKING
|
|||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
from efro.terminal import Clr
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import List, Dict, Tuple, Set
|
from typing import List, Dict, Tuple, Set
|
||||||
|
|
||||||
CLRHDR = '\033[95m' # Header.
|
|
||||||
CLRGRN = '\033[92m' # Green.
|
|
||||||
CLRBLU = '\033[94m' # Glue.
|
|
||||||
CLRRED = '\033[91m' # Red.
|
|
||||||
CLREND = '\033[0m' # End.
|
|
||||||
|
|
||||||
BASE_URL = 'https://files.ballistica.net/cache/ba1/'
|
BASE_URL = 'https://files.ballistica.net/cache/ba1/'
|
||||||
|
|
||||||
TARGET_TAG = '#__EFROCACHE_TARGET__'
|
TARGET_TAG = '#__EFROCACHE_TARGET__'
|
||||||
@ -102,7 +98,7 @@ def get_target(path: str) -> None:
|
|||||||
# download it.
|
# download it.
|
||||||
if not os.path.exists(local_cache_path):
|
if not os.path.exists(local_cache_path):
|
||||||
os.makedirs(os.path.dirname(local_cache_path), exist_ok=True)
|
os.makedirs(os.path.dirname(local_cache_path), exist_ok=True)
|
||||||
print(f'Downloading: {CLRBLU}{path}{CLREND}')
|
print(f'Downloading: {Clr.SBLU}{path}{Clr.RST}')
|
||||||
run(f'curl --silent {url} > {local_cache_path_dl}')
|
run(f'curl --silent {url} > {local_cache_path_dl}')
|
||||||
run(f'mv {local_cache_path_dl} {local_cache_path}')
|
run(f'mv {local_cache_path_dl} {local_cache_path}')
|
||||||
|
|
||||||
@ -173,7 +169,7 @@ def update_cache(makefile_dirs: List[str]) -> None:
|
|||||||
|
|
||||||
# First, make sure all cache files are built.
|
# First, make sure all cache files are built.
|
||||||
mfpath = os.path.join(path, 'Makefile')
|
mfpath = os.path.join(path, 'Makefile')
|
||||||
print(f'Building efrocache targets for {CLRBLU}{mfpath}{CLREND}...')
|
print(f'Building efrocache targets for {Clr.SBLU}{mfpath}{Clr.RST}...')
|
||||||
subprocess.run(f'{cdp}make -j{cpus} efrocache-build',
|
subprocess.run(f'{cdp}make -j{cpus} efrocache-build',
|
||||||
shell=True,
|
shell=True,
|
||||||
check=True)
|
check=True)
|
||||||
@ -213,13 +209,13 @@ def update_cache(makefile_dirs: List[str]) -> None:
|
|||||||
hashes_existing = ''
|
hashes_existing = ''
|
||||||
if hashes == hashes_existing:
|
if hashes == hashes_existing:
|
||||||
print(
|
print(
|
||||||
f'{CLRBLU}Efrocache state unchanged;'
|
f'{Clr.SBLU}Efrocache state unchanged;'
|
||||||
f' skipping cache push.{CLREND}',
|
f' skipping cache push.{Clr.RST}',
|
||||||
flush=True)
|
flush=True)
|
||||||
else:
|
else:
|
||||||
_upload_cache(fnames1, fnames2, hashes, hashes_existing)
|
_upload_cache(fnames1, fnames2, hashes, hashes_existing)
|
||||||
|
|
||||||
print(f'{CLRBLU}Efrocache update successful!{CLREND}')
|
print(f'{Clr.SBLU}Efrocache update successful!{Clr.RST}')
|
||||||
|
|
||||||
# Write the cache state so we can skip the next run if nothing changes.
|
# Write the cache state so we can skip the next run if nothing changes.
|
||||||
os.makedirs(os.path.dirname(UPLOAD_STATE_CACHE_FILE), exist_ok=True)
|
os.makedirs(os.path.dirname(UPLOAD_STATE_CACHE_FILE), exist_ok=True)
|
||||||
@ -244,10 +240,10 @@ def _upload_cache(fnames1: List[str], fnames2: List[str], hashes_str: str,
|
|||||||
for fname in hashes_existing:
|
for fname in hashes_existing:
|
||||||
if fname not in hashes:
|
if fname not in hashes:
|
||||||
changed_files.add(fname)
|
changed_files.add(fname)
|
||||||
print(f'{CLRBLU}Updating efrocache due to'
|
print(f'{Clr.SBLU}Updating efrocache due to'
|
||||||
f' {len(changed_files)} changes:{CLREND}')
|
f' {len(changed_files)} changes:{Clr.RST}')
|
||||||
for fname in sorted(changed_files):
|
for fname in sorted(changed_files):
|
||||||
print(f' {CLRBLU}{fname}{CLREND}')
|
print(f' {Clr.SBLU}{fname}{Clr.RST}')
|
||||||
|
|
||||||
# Now do the thing.
|
# Now do the thing.
|
||||||
staging_dir = 'build/efrocache'
|
staging_dir = 'build/efrocache'
|
||||||
@ -257,11 +253,11 @@ def _upload_cache(fnames1: List[str], fnames2: List[str], hashes_str: str,
|
|||||||
|
|
||||||
_write_cache_files(fnames1, fnames2, staging_dir, mapping_file)
|
_write_cache_files(fnames1, fnames2, staging_dir, mapping_file)
|
||||||
|
|
||||||
print(f'{CLRBLU}Starter cache includes {len(fnames1)} items;'
|
print(f'{Clr.SBLU}Starter cache includes {len(fnames1)} items;'
|
||||||
f' excludes {len(fnames2)}{CLREND}')
|
f' excludes {len(fnames2)}{Clr.RST}')
|
||||||
|
|
||||||
# Sync all individual cache files to the staging server.
|
# Sync all individual cache files to the staging server.
|
||||||
print(f'{CLRBLU}Pushing cache to staging...{CLREND}', flush=True)
|
print(f'{Clr.SBLU}Pushing cache to staging...{Clr.RST}', flush=True)
|
||||||
run('rsync --progress --recursive build/efrocache/'
|
run('rsync --progress --recursive build/efrocache/'
|
||||||
' ubuntu@ballistica.net:files.ballistica.net/cache/ba1/')
|
' ubuntu@ballistica.net:files.ballistica.net/cache/ba1/')
|
||||||
|
|
||||||
|
|||||||
@ -24,15 +24,15 @@ A snippet is a mini-program that directly takes input from stdin and does
|
|||||||
some focused task. This module is a repository of common snippets that can
|
some focused task. This module is a repository of common snippets that can
|
||||||
be imported into projects' snippets script for easy reuse.
|
be imported into projects' snippets script for easy reuse.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from efro.terminal import Clr
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Dict, Any, List
|
from typing import Dict, Any, List
|
||||||
|
|
||||||
@ -44,11 +44,6 @@ class CleanError(Exception):
|
|||||||
# Absolute path of the project root.
|
# Absolute path of the project root.
|
||||||
PROJROOT = Path(__file__).resolve().parents[2]
|
PROJROOT = Path(__file__).resolve().parents[2]
|
||||||
|
|
||||||
CLRHDR = '\033[95m'
|
|
||||||
CLRRED = '\033[91m'
|
|
||||||
CLRBLU = '\033[94m'
|
|
||||||
CLREND = '\033[0m'
|
|
||||||
|
|
||||||
|
|
||||||
def snippets_main(globs: Dict[str, Any]) -> None:
|
def snippets_main(globs: Dict[str, Any]) -> None:
|
||||||
"""Run a snippet contained in the snippets script.
|
"""Run a snippet contained in the snippets script.
|
||||||
@ -63,7 +58,7 @@ def snippets_main(globs: Dict[str, Any]) -> None:
|
|||||||
show_help = False
|
show_help = False
|
||||||
retval = 0
|
retval = 0
|
||||||
if len(sys.argv) < 2:
|
if len(sys.argv) < 2:
|
||||||
print(f'{CLRRED}ERROR: command expected.{CLREND}')
|
print(f'{Clr.SRED}ERROR: command expected.{Clr.RST}')
|
||||||
show_help = True
|
show_help = True
|
||||||
retval = 255
|
retval = 255
|
||||||
else:
|
else:
|
||||||
@ -81,7 +76,7 @@ def snippets_main(globs: Dict[str, Any]) -> None:
|
|||||||
try:
|
try:
|
||||||
funcs[sys.argv[1]]()
|
funcs[sys.argv[1]]()
|
||||||
except CleanError as exc:
|
except CleanError as exc:
|
||||||
print(CLRRED + str(exc) + CLREND)
|
print(Clr.SRED + str(exc) + Clr.RST)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
else:
|
else:
|
||||||
print('Unknown snippets command: "' + sys.argv[1] + '"',
|
print('Unknown snippets command: "' + sys.argv[1] + '"',
|
||||||
@ -95,7 +90,7 @@ def snippets_main(globs: Dict[str, Any]) -> None:
|
|||||||
print('Available commands:')
|
print('Available commands:')
|
||||||
for func, obj in sorted(funcs.items()):
|
for func, obj in sorted(funcs.items()):
|
||||||
doc = getattr(obj, '__doc__', '').splitlines()[0].strip()
|
doc = getattr(obj, '__doc__', '').splitlines()[0].strip()
|
||||||
print(f'{CLRHDR}{func}{CLRBLU} - {doc}{CLREND}')
|
print(f'{Clr.SMAG}{func}{Clr.SBLU} - {doc}{Clr.RST}')
|
||||||
sys.exit(retval)
|
sys.exit(retval)
|
||||||
|
|
||||||
|
|
||||||
@ -156,6 +151,7 @@ def _spelling(words: List[str]) -> None:
|
|||||||
|
|
||||||
def spelling_all() -> None:
|
def spelling_all() -> None:
|
||||||
"""Add all misspellings from a pycharm run."""
|
"""Add all misspellings from a pycharm run."""
|
||||||
|
import subprocess
|
||||||
|
|
||||||
print('Running "make pycharm-full"...')
|
print('Running "make pycharm-full"...')
|
||||||
lines = [
|
lines = [
|
||||||
@ -182,6 +178,7 @@ def check_clean_safety() -> None:
|
|||||||
Use to avoid losing work if we accidentally do a clean without
|
Use to avoid losing work if we accidentally do a clean without
|
||||||
adding something.
|
adding something.
|
||||||
"""
|
"""
|
||||||
|
import subprocess
|
||||||
if len(sys.argv) != 2:
|
if len(sys.argv) != 2:
|
||||||
raise Exception('invalid arguments')
|
raise Exception('invalid arguments')
|
||||||
|
|
||||||
@ -364,8 +361,9 @@ def sync_all() -> None:
|
|||||||
This assumes that there is a 'sync-full' and 'sync-list' Makefile target
|
This assumes that there is a 'sync-full' and 'sync-list' Makefile target
|
||||||
under each project.
|
under each project.
|
||||||
"""
|
"""
|
||||||
|
import subprocess
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
print(f'{CLRBLU}Updating formatting for all projects...{CLREND}')
|
print(f'{Clr.SBLU}Updating formatting for all projects...{Clr.RST}')
|
||||||
projects_str = os.environ.get('EFROTOOLS_SYNC_PROJECTS')
|
projects_str = os.environ.get('EFROTOOLS_SYNC_PROJECTS')
|
||||||
if projects_str is None:
|
if projects_str is None:
|
||||||
raise CleanError('EFROTOOL_SYNC_PROJECTS is not defined.')
|
raise CleanError('EFROTOOL_SYNC_PROJECTS is not defined.')
|
||||||
@ -396,17 +394,17 @@ def sync_all() -> None:
|
|||||||
# Real mode
|
# Real mode
|
||||||
for i in range(2):
|
for i in range(2):
|
||||||
if i == 0:
|
if i == 0:
|
||||||
print(CLRBLU + 'Running sync pass 1:'
|
print(Clr.SBLU + 'Running sync pass 1:'
|
||||||
' (ensures all changes at dsts are pushed to src)' +
|
' (ensures all changes at dsts are pushed to src)' +
|
||||||
CLREND)
|
Clr.RST)
|
||||||
else:
|
else:
|
||||||
print(CLRBLU + 'Running sync pass 2:'
|
print(Clr.SBLU + 'Running sync pass 2:'
|
||||||
' (ensures latest src is pulled to all dsts)' + CLREND)
|
' (ensures latest src is pulled to all dsts)' + Clr.RST)
|
||||||
for project in projects_str.split(':'):
|
for project in projects_str.split(':'):
|
||||||
cmd = f'cd "{project}" && make sync-full'
|
cmd = f'cd "{project}" && make sync-full'
|
||||||
print(cmd)
|
print(cmd)
|
||||||
subprocess.run(cmd, shell=True, check=True)
|
subprocess.run(cmd, shell=True, check=True)
|
||||||
print(CLRBLU + 'Sync-all successful!' + CLREND)
|
print(Clr.SBLU + 'Sync-all successful!' + Clr.RST)
|
||||||
|
|
||||||
|
|
||||||
def sync() -> None:
|
def sync() -> None:
|
||||||
@ -446,6 +444,7 @@ def compile_python_files() -> None:
|
|||||||
def pytest() -> None:
|
def pytest() -> None:
|
||||||
"""Run pytest with project environment set up properly."""
|
"""Run pytest with project environment set up properly."""
|
||||||
import platform
|
import platform
|
||||||
|
import subprocess
|
||||||
from efrotools import get_config, PYTHON_BIN
|
from efrotools import get_config, PYTHON_BIN
|
||||||
|
|
||||||
# Grab our python paths for the project and stuff them in PYTHONPATH.
|
# Grab our python paths for the project and stuff them in PYTHONPATH.
|
||||||
@ -522,5 +521,5 @@ def makefile_target_list() -> None:
|
|||||||
continue
|
continue
|
||||||
print('\n' + entry.title + '\n' + '-' * len(entry.title))
|
print('\n' + entry.title + '\n' + '-' * len(entry.title))
|
||||||
elif entry.kind == 'target':
|
elif entry.kind == 'target':
|
||||||
print(CLRHDR + entry.title + CLRBLU + _docstr(lines, entry.line) +
|
print(Clr.SMAG + entry.title + Clr.SBLU +
|
||||||
CLREND)
|
_docstr(lines, entry.line) + Clr.RST)
|
||||||
|
|||||||
@ -30,15 +30,11 @@ from enum import Enum
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from efro.terminal import Clr
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import List, Tuple, Optional, Sequence
|
from typing import List, Tuple, Optional, Sequence
|
||||||
|
|
||||||
CLRHDR = '\033[95m'
|
|
||||||
CLRGRN = '\033[92m'
|
|
||||||
CLRBLU = '\033[94m'
|
|
||||||
CLRRED = '\033[91m'
|
|
||||||
CLREND = '\033[0m'
|
|
||||||
|
|
||||||
|
|
||||||
class Mode(Enum):
|
class Mode(Enum):
|
||||||
"""Modes for sync operations."""
|
"""Modes for sync operations."""
|
||||||
@ -147,9 +143,9 @@ def sync_paths(src_proj: str, src: Path, dst: Path, mode: Mode) -> int:
|
|||||||
if not dstfile.is_file() or mode == Mode.FORCE:
|
if not dstfile.is_file() or mode == Mode.FORCE:
|
||||||
if mode == Mode.LIST:
|
if mode == Mode.LIST:
|
||||||
print(f'Would pull from {src_proj}:'
|
print(f'Would pull from {src_proj}:'
|
||||||
f' {CLRGRN}{dstfile}{CLREND}')
|
f' {Clr.SGRN}{dstfile}{Clr.RST}')
|
||||||
else:
|
else:
|
||||||
print(f'Pulling from {src_proj}: {CLRGRN}{dstfile}{CLREND}')
|
print(f'Pulling from {src_proj}: {Clr.SGRN}{dstfile}{Clr.RST}')
|
||||||
|
|
||||||
# No dst file; pull src across.
|
# No dst file; pull src across.
|
||||||
with dstfile.open('w') as outfile:
|
with dstfile.open('w') as outfile:
|
||||||
@ -164,9 +160,9 @@ def sync_paths(src_proj: str, src: Path, dst: Path, mode: Mode) -> int:
|
|||||||
if src_hash != marker_hash and dst_hash == marker_hash:
|
if src_hash != marker_hash and dst_hash == marker_hash:
|
||||||
if mode == Mode.LIST:
|
if mode == Mode.LIST:
|
||||||
print(f'Would pull from {src_proj}:'
|
print(f'Would pull from {src_proj}:'
|
||||||
f' {CLRGRN}{dstfile}{CLREND}')
|
f' {Clr.SGRN}{dstfile}{Clr.RST}')
|
||||||
else:
|
else:
|
||||||
print(f'Pulling from {src_proj}: {CLRGRN}{dstfile}{CLREND}')
|
print(f'Pulling from {src_proj}: {Clr.SGRN}{dstfile}{Clr.RST}')
|
||||||
|
|
||||||
# Src has changed; simply pull across to dst.
|
# Src has changed; simply pull across to dst.
|
||||||
with dstfile.open('w') as outfile:
|
with dstfile.open('w') as outfile:
|
||||||
@ -177,9 +173,10 @@ def sync_paths(src_proj: str, src: Path, dst: Path, mode: Mode) -> int:
|
|||||||
# Dst has changed; we only copy backwards to src
|
# Dst has changed; we only copy backwards to src
|
||||||
# if we're in full mode.
|
# if we're in full mode.
|
||||||
if mode == Mode.LIST:
|
if mode == Mode.LIST:
|
||||||
print(f'Would push to {src_proj}: {CLRBLU}{dstfile}{CLREND}')
|
print(f'Would push to {src_proj}:'
|
||||||
|
f' {Clr.SBLU}{dstfile}{Clr.RST}')
|
||||||
elif mode == Mode.FULL:
|
elif mode == Mode.FULL:
|
||||||
print(f'Pushing to {src_proj}: {CLRBLU}{dstfile}{CLREND}')
|
print(f'Pushing to {src_proj}: {Clr.SBLU}{dstfile}{Clr.RST}')
|
||||||
with srcfile.open('w') as outfile:
|
with srcfile.open('w') as outfile:
|
||||||
outfile.write(dstdata)
|
outfile.write(dstdata)
|
||||||
|
|
||||||
@ -201,10 +198,10 @@ def sync_paths(src_proj: str, src: Path, dst: Path, mode: Mode) -> int:
|
|||||||
if mode == Mode.LIST:
|
if mode == Mode.LIST:
|
||||||
print(f'Would update dst hash (both files changed'
|
print(f'Would update dst hash (both files changed'
|
||||||
f' identically) from {src_proj}:'
|
f' identically) from {src_proj}:'
|
||||||
f' {CLRGRN}{dstfile}{CLREND}')
|
f' {Clr.SGRN}{dstfile}{Clr.RST}')
|
||||||
else:
|
else:
|
||||||
print(f'Updating hash (both files changed)'
|
print(f'Updating hash (both files changed)'
|
||||||
f' from {src_proj}: {CLRGRN}{dstfile}{CLREND}')
|
f' from {src_proj}: {Clr.SGRN}{dstfile}{Clr.RST}')
|
||||||
with dstfile.open('w') as outfile:
|
with dstfile.open('w') as outfile:
|
||||||
outfile.write(add_marker(src_proj, srcdata))
|
outfile.write(add_marker(src_proj, srcdata))
|
||||||
continue
|
continue
|
||||||
@ -237,10 +234,10 @@ def sync_paths(src_proj: str, src: Path, dst: Path, mode: Mode) -> int:
|
|||||||
if os.path.exists(killpath):
|
if os.path.exists(killpath):
|
||||||
if mode == Mode.LIST:
|
if mode == Mode.LIST:
|
||||||
print(f'Would remove orphaned sync path:'
|
print(f'Would remove orphaned sync path:'
|
||||||
f' {CLRRED}{killpath}{CLREND}')
|
f' {Clr.SRED}{killpath}{Clr.RST}')
|
||||||
else:
|
else:
|
||||||
print(f'Removing orphaned sync path:'
|
print(f'Removing orphaned sync path:'
|
||||||
f' {CLRRED}{killpath}{CLREND}')
|
f' {Clr.SRED}{killpath}{Clr.RST}')
|
||||||
os.system('rm -rf "' + str(killpath) + '"')
|
os.system('rm -rf "' + str(killpath) + '"')
|
||||||
|
|
||||||
# Lastly throw an error if we found any changed dst files and aren't
|
# Lastly throw an error if we found any changed dst files and aren't
|
||||||
|
|||||||
@ -28,7 +28,6 @@ Functions can be placed here when they're not complex enough to warrant
|
|||||||
their own files. Often these functions act as user-facing entry points
|
their own files. Often these functions act as user-facing entry points
|
||||||
to functionality contained in efrotools or other standalone tool modules.
|
to functionality contained in efrotools or other standalone tool modules.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|||||||
@ -42,15 +42,11 @@ import subprocess
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from efro.terminal import Clr
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Optional, Tuple, List, Dict, Set
|
from typing import Optional, Tuple, List, Dict, Set
|
||||||
|
|
||||||
CLRHDR = '\033[95m' # Header.
|
|
||||||
CLRGRN = '\033[92m' # Green.
|
|
||||||
CLRBLU = '\033[94m' # Blue.
|
|
||||||
CLRRED = '\033[91m' # Red.
|
|
||||||
CLREND = '\033[0m' # End.
|
|
||||||
|
|
||||||
|
|
||||||
def get_legal_notice_private() -> str:
|
def get_legal_notice_private() -> str:
|
||||||
"""Return the one line legal notice we expect private files to have."""
|
"""Return the one line legal notice we expect private files to have."""
|
||||||
@ -148,7 +144,8 @@ class App:
|
|||||||
# been updated.
|
# been updated.
|
||||||
if os.path.exists('tools/gendummymodule.py'):
|
if os.path.exists('tools/gendummymodule.py'):
|
||||||
if os.system('tools/gendummymodule.py' + self._checkarg) != 0:
|
if os.system('tools/gendummymodule.py' + self._checkarg) != 0:
|
||||||
print(CLRRED + 'Error checking/updating dummy module' + CLREND)
|
print(Clr.SRED + 'Error checking/updating dummy module' +
|
||||||
|
Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _update_docs_md(self) -> None:
|
def _update_docs_md(self) -> None:
|
||||||
@ -160,8 +157,8 @@ class App:
|
|||||||
if os.path.exists('tools/gendocs.py'):
|
if os.path.exists('tools/gendocs.py'):
|
||||||
if os.system('tools/snippets update_docs_md' +
|
if os.system('tools/snippets update_docs_md' +
|
||||||
self._checkarg) != 0:
|
self._checkarg) != 0:
|
||||||
print(CLRRED + 'Error checking/updating docs markdown.' +
|
print(Clr.SRED + 'Error checking/updating docs markdown.' +
|
||||||
CLREND)
|
Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _update_compile_commands_file(self) -> None:
|
def _update_compile_commands_file(self) -> None:
|
||||||
@ -169,7 +166,7 @@ class App:
|
|||||||
# our cmake stuff. Do this at end so cmake changes already happened.
|
# our cmake stuff. Do this at end so cmake changes already happened.
|
||||||
if not self._check and os.path.exists('ballisticacore-cmake'):
|
if not self._check and os.path.exists('ballisticacore-cmake'):
|
||||||
if os.system('make .irony/compile_commands.json') != 0:
|
if os.system('make .irony/compile_commands.json') != 0:
|
||||||
print(CLRRED + 'Error updating compile-commands.' + CLREND)
|
print(Clr.SRED + 'Error updating compile-commands.' + Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _apply_file_changes(self) -> None:
|
def _apply_file_changes(self) -> None:
|
||||||
@ -187,11 +184,11 @@ class App:
|
|||||||
unchanged_project_count += 1
|
unchanged_project_count += 1
|
||||||
else:
|
else:
|
||||||
if self._check:
|
if self._check:
|
||||||
print(f'{CLRRED}ERROR: found out-of-date'
|
print(f'{Clr.SRED}ERROR: found out-of-date'
|
||||||
f' project file: {fname}{CLREND}')
|
f' project file: {fname}{Clr.RST}')
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
print(f'{CLRBLU}Writing project file: {fname}{CLREND}')
|
print(f'{Clr.SBLU}Writing project file: {fname}{Clr.RST}')
|
||||||
with open(fname, 'w') as outfile:
|
with open(fname, 'w') as outfile:
|
||||||
outfile.write(fcode)
|
outfile.write(fcode)
|
||||||
if unchanged_project_count > 0:
|
if unchanged_project_count > 0:
|
||||||
@ -214,40 +211,43 @@ class App:
|
|||||||
# If there are any manual-only entries, list then and bail.
|
# If there are any manual-only entries, list then and bail.
|
||||||
# (Don't wanna allow auto-apply unless it fixes everything)
|
# (Don't wanna allow auto-apply unless it fixes everything)
|
||||||
if manual_changes:
|
if manual_changes:
|
||||||
print(f'{CLRRED}Found erroneous lines '
|
print(f'{Clr.SRED}Found erroneous lines '
|
||||||
f'requiring manual correction:{CLREND}')
|
f'requiring manual correction:{Clr.RST}')
|
||||||
for change in manual_changes:
|
for change in manual_changes:
|
||||||
print(f'{CLRRED}{change[0]}:{change[1].line_number + 1}:'
|
print(
|
||||||
f' Expected line to be:\n {change[1].expected}{CLREND}')
|
f'{Clr.SRED}{change[0]}:{change[1].line_number + 1}:'
|
||||||
|
f' Expected line to be:\n {change[1].expected}{Clr.RST}')
|
||||||
|
|
||||||
# Make a note on copyright lines that this can be disabled.
|
# Make a note on copyright lines that this can be disabled.
|
||||||
if 'Copyright' in change[1].expected:
|
if 'Copyright' in change[1].expected:
|
||||||
print(f'{CLRRED}NOTE: You can disable copyright'
|
print(f'{Clr.SRED}NOTE: You can disable copyright'
|
||||||
f' checks by adding "copyright_checks": false\n'
|
f' checks by adding "copyright_checks": false\n'
|
||||||
f'to the root dict in config/localconfig.json.\n'
|
f'to the root dict in config/localconfig.json.\n'
|
||||||
f'see https://github.com/efroemling/ballistica/wiki'
|
f'see https://github.com/efroemling/ballistica/wiki'
|
||||||
f'/Knowledge-Nuggets#'
|
f'/Knowledge-Nuggets#'
|
||||||
f'hello-world-creating-a-new-game-type{CLREND}')
|
f'hello-world-creating-a-new-game-type{Clr.RST}')
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
# Now, if we've got auto entries, either list or auto-correct them.
|
# Now, if we've got auto entries, either list or auto-correct them.
|
||||||
if auto_changes:
|
if auto_changes:
|
||||||
if not self._fix:
|
if not self._fix:
|
||||||
for i, change in enumerate(auto_changes):
|
for i, change in enumerate(auto_changes):
|
||||||
print(f'{CLRRED}#{i}: {change[0]}:{CLREND}')
|
print(f'{Clr.SRED}#{i}: {change[0]}:{Clr.RST}')
|
||||||
print(f'{CLRRED} Expected "{change[1].expected}"{CLREND}')
|
print(
|
||||||
|
f'{Clr.SRED} Expected "{change[1].expected}"{Clr.RST}'
|
||||||
|
)
|
||||||
with open(change[0]) as infile:
|
with open(change[0]) as infile:
|
||||||
lines = infile.read().splitlines()
|
lines = infile.read().splitlines()
|
||||||
line = lines[change[1].line_number]
|
line = lines[change[1].line_number]
|
||||||
print(f'{CLRRED} Found "{line}"{CLREND}')
|
print(f'{Clr.SRED} Found "{line}"{Clr.RST}')
|
||||||
print(CLRRED +
|
print(Clr.SRED +
|
||||||
f'All {len(auto_changes)} errors are auto-fixable;'
|
f'All {len(auto_changes)} errors are auto-fixable;'
|
||||||
' run tools/update_project --fix to apply corrections.' +
|
' run tools/update_project --fix to apply corrections.' +
|
||||||
CLREND)
|
Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
else:
|
else:
|
||||||
for i, change in enumerate(auto_changes):
|
for i, change in enumerate(auto_changes):
|
||||||
print(f'{CLRBLU}Correcting file: {change[0]}{CLREND}')
|
print(f'{Clr.SBLU}Correcting file: {change[0]}{Clr.RST}')
|
||||||
with open(change[0]) as infile:
|
with open(change[0]) as infile:
|
||||||
lines = infile.read().splitlines()
|
lines = infile.read().splitlines()
|
||||||
lines[change[1].line_number] = change[1].expected
|
lines[change[1].line_number] = change[1].expected
|
||||||
@ -269,9 +269,9 @@ class App:
|
|||||||
# Could just ignore these but it probably means I intended
|
# Could just ignore these but it probably means I intended
|
||||||
# to save something and forgot.
|
# to save something and forgot.
|
||||||
if '/.#' in fsrc:
|
if '/.#' in fsrc:
|
||||||
print(f'{CLRRED}'
|
print(f'{Clr.SRED}'
|
||||||
f'ERROR: Found an unsaved emacs file: "{fsrc}"'
|
f'ERROR: Found an unsaved emacs file: "{fsrc}"'
|
||||||
f'{CLREND}')
|
f'{Clr.RST}')
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
fname = 'src/ballistica' + fsrc
|
fname = 'src/ballistica' + fsrc
|
||||||
@ -393,8 +393,8 @@ class App:
|
|||||||
'tools/devtool', 'tools/version_utils', 'tools/vmshell'
|
'tools/devtool', 'tools/version_utils', 'tools/vmshell'
|
||||||
]:
|
]:
|
||||||
if not contents.startswith('#!/usr/bin/env python3.7'):
|
if not contents.startswith('#!/usr/bin/env python3.7'):
|
||||||
print(f'{CLRRED}Incorrect shebang (first line) for '
|
print(f'{Clr.SRED}Incorrect shebang (first line) for '
|
||||||
f'{fname}.{CLREND}')
|
f'{fname}.{Clr.RST}')
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
else:
|
else:
|
||||||
copyrightline = 0
|
copyrightline = 0
|
||||||
@ -482,9 +482,9 @@ class App:
|
|||||||
if ('__pycache__' not in root
|
if ('__pycache__' not in root
|
||||||
and os.path.basename(root) != '.vscode'):
|
and os.path.basename(root) != '.vscode'):
|
||||||
if '__init__.py' not in files:
|
if '__init__.py' not in files:
|
||||||
print(CLRRED +
|
print(Clr.SRED +
|
||||||
'Error: no __init__.py in package dir: ' + root +
|
'Error: no __init__.py in package dir: ' + root +
|
||||||
CLREND)
|
Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _update_visual_studio_project(self, fname: str, src_root: str) -> None:
|
def _update_visual_studio_project(self, fname: str, src_root: str) -> None:
|
||||||
@ -630,40 +630,40 @@ class App:
|
|||||||
# Make sure none of our sync targets have been mucked with since
|
# Make sure none of our sync targets have been mucked with since
|
||||||
# their last sync.
|
# their last sync.
|
||||||
if os.system('tools/snippets sync check') != 0:
|
if os.system('tools/snippets sync check') != 0:
|
||||||
print(CLRRED + 'Sync check failed; you may need to run "sync".' +
|
print(Clr.SRED + 'Sync check failed; you may need to run "sync".' +
|
||||||
CLREND)
|
Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _update_assets_makefile(self) -> None:
|
def _update_assets_makefile(self) -> None:
|
||||||
if os.path.exists('tools/update_assets_makefile'):
|
if os.path.exists('tools/update_assets_makefile'):
|
||||||
if os.system('tools/update_assets_makefile' + self._checkarg) != 0:
|
if os.system('tools/update_assets_makefile' + self._checkarg) != 0:
|
||||||
print(CLRRED + 'Error checking/updating assets Makefile' +
|
print(Clr.SRED + 'Error checking/updating assets Makefile' +
|
||||||
CLREND)
|
Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _update_generated_code_makefile(self) -> None:
|
def _update_generated_code_makefile(self) -> None:
|
||||||
if os.path.exists('tools/update_generated_code_makefile'):
|
if os.path.exists('tools/update_generated_code_makefile'):
|
||||||
if os.system('tools/update_generated_code_makefile' +
|
if os.system('tools/update_generated_code_makefile' +
|
||||||
self._checkarg) != 0:
|
self._checkarg) != 0:
|
||||||
print(CLRRED +
|
print(Clr.SRED +
|
||||||
'Error checking/updating generated-code Makefile' +
|
'Error checking/updating generated-code Makefile' +
|
||||||
CLREND)
|
Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _update_resources_makefile(self) -> None:
|
def _update_resources_makefile(self) -> None:
|
||||||
if os.path.exists('tools/update_resources_makefile'):
|
if os.path.exists('tools/update_resources_makefile'):
|
||||||
if os.system('tools/update_resources_makefile' +
|
if os.system('tools/update_resources_makefile' +
|
||||||
self._checkarg) != 0:
|
self._checkarg) != 0:
|
||||||
print(CLRRED + 'Error checking/updating resources Makefile' +
|
print(Clr.SRED + 'Error checking/updating resources Makefile' +
|
||||||
CLREND)
|
Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _update_python_enums_module(self) -> None:
|
def _update_python_enums_module(self) -> None:
|
||||||
if os.path.exists('tools/update_python_enums_module'):
|
if os.path.exists('tools/update_python_enums_module'):
|
||||||
if os.system('tools/update_python_enums_module' +
|
if os.system('tools/update_python_enums_module' +
|
||||||
self._checkarg) != 0:
|
self._checkarg) != 0:
|
||||||
print(CLRRED + 'Error checking/updating python enums module' +
|
print(Clr.SRED +
|
||||||
CLREND)
|
'Error checking/updating python enums module' + Clr.RST)
|
||||||
sys.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user