diff --git a/.idea/dictionaries/ericf.xml b/.idea/dictionaries/ericf.xml
index 9d1a8c2b..a06d8cd4 100644
--- a/.idea/dictionaries/ericf.xml
+++ b/.idea/dictionaries/ericf.xml
@@ -207,6 +207,8 @@
calced
calcing
calcs
+ callargs
+ callname
callnode
cameraflash
camerashake
@@ -292,6 +294,8 @@
compat
compileall
compilelocations
+ completeargs
+ completecmd
compounddict
compoundlist
configerror
@@ -619,6 +623,7 @@
ftxt
fullclean
fullcleanlist
+ fullfilepath
fullpath
fullprice
fullscreen
@@ -783,6 +788,7 @@
incrementbuild
indentfilter
indentstr
+ indexfile
indexfilename
indicies
indstr
@@ -919,6 +925,7 @@
lnumend
lnumorig
lnums
+ loadpackage
localconfig
locationgroup
locationgroups
@@ -1075,6 +1082,7 @@
newdbpath
newnode
newpath
+ nextcall
nextfilenum
nextlevel
nfoo
@@ -1148,6 +1156,8 @@
packagedir
packagedirs
packagename
+ packagepath
+ packagepathstr
packageversion
painttxtattr
palmos
@@ -1757,6 +1767,8 @@
updatethencheck
updatethencheckfast
updatethencheckfull
+ uploadargs
+ uploadcmd
uptime
useragent
useragentstring
diff --git a/tools/cloudtool b/tools/cloudtool
index e654a537..de6e0307 100755
--- a/tools/cloudtool
+++ b/tools/cloudtool
@@ -27,10 +27,9 @@ from __future__ import annotations
import sys
import os
-from enum import Enum
from pathlib import Path
from typing import TYPE_CHECKING
-from dataclasses import dataclass
+from dataclasses import dataclass, asdict
import json
import subprocess
import tempfile
@@ -38,7 +37,7 @@ import tempfile
import requests
if TYPE_CHECKING:
- from typing import Optional, Dict, Any, Tuple, List, BinaryIO
+ from typing import Optional, Dict, Tuple, List, BinaryIO
# Version is sent to the master-server with all commands. Can be incremented
# if we need to change behavior server-side to go along with client changes.
@@ -50,7 +49,6 @@ TOOL_NAME = 'cloudtool'
MASTER_SERVER_ADDRESS = ('http://localhost:23524'
if os.environ.get('CLOUDTOOL_LOCAL') == '1' else
'https://bamaster.appspot.com')
-USER_AGENT_STRING = 'cloudtool'
CACHE_DIR = Path('.cache/cloudtool')
CACHE_DATA_PATH = Path(CACHE_DIR, 'state')
@@ -60,17 +58,6 @@ CLRBLU = '\033[94m' # Glue.
CLRRED = '\033[91m' # Red.
CLREND = '\033[0m' # End.
-CMD_LOGIN = 'login'
-CMD_LOGOUT = 'logout'
-CMD_ASSETPACK = 'assetpack'
-CMD_HELP = 'help'
-
-# Note to self: keep this synced with server-side logic.
-ASSET_PACKAGE_NAME_VALID_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_'
-ASSET_PACKAGE_NAME_MAX_LENGTH = 32
-ASSET_PATH_VALID_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_'
-ASSET_PATH_MAX_LENGTH = 128
-
@dataclass
class StateData:
@@ -78,43 +65,39 @@ class StateData:
login_token: Optional[str] = None
+# noinspection PyUnresolvedReferences
@dataclass
class Response:
- """Response data from the master server for a command."""
- message: Optional[str]
- error: Optional[str]
- data: Any
+ """Response sent from the cloudtool server to the client.
+
+ Attributes:
+ message: If present, client should print this message.
+ error: If present, client should abort with this error message.
+ loadpackage: If present, client should load this package from a
+ location on disk (arg1) and push its manifest to a server command
+ (arg2) with provided args (arg3). The manifest should be added to
+ the args as 'manifest'. arg4 is the index file name whose
+ contents should be included with the manifest.
+ upload: If present, client should upload the requested files (arg1)
+ from the loaded package to a server command (arg2) with provided
+ args (arg3). Arg4 and arg5 are a server call and args which should
+ be called once all file uploads finish.
+ login: If present, a token that should be stored client-side and passed
+ with subsequent commands.
+ logout: If True, any existing client-side token should be discarded.
+ """
+ message: Optional[str] = None
+ error: Optional[str] = None
+ loadpackage: Optional[Tuple[str, str, Dict, str]] = None
+ upload: Optional[Tuple[List[str], str, Dict, str, Dict]] = None
+ login: Optional[str] = None
+ logout: bool = False
class CleanError(Exception):
"""Exception resulting in a clean error string print and exit."""
-class AssetType(Enum):
- """Types for asset files."""
- TEXTURE = 'texture'
- SOUND = 'sound'
- DATA = 'data'
-
-
-ASSET_SOURCE_FILE_EXTS = {
- AssetType.TEXTURE: 'png',
- AssetType.SOUND: 'wav',
- AssetType.DATA: 'yaml',
-}
-
-
-class Asset:
- """Data for a single asset."""
-
- def __init__(self, package: AssetPackage, assettype: AssetType,
- path: str) -> None:
- self.assettype = assettype
- self.path = path
- self.filepath = os.path.join(
- package.path, path + '.' + ASSET_SOURCE_FILE_EXTS[assettype])
-
-
def get_tz_offset_seconds() -> float:
"""Return the offset between utc and local time in seconds."""
import time
@@ -125,121 +108,60 @@ def get_tz_offset_seconds() -> float:
return utc_offset
-# Note to self: keep this synced with server-side validation func...
-def validate_asset_package_name(name: str) -> None:
- """Throw an exception on an invalid asset-package name."""
- if len(name) > ASSET_PACKAGE_NAME_MAX_LENGTH:
- raise CleanError(f'Asset package name is too long: "{name}"')
- if not name:
- raise CleanError(f'Asset package name cannot be empty.')
- if name[0] == '_' or name[-1] == '_':
- raise CleanError(
- f'Asset package name cannot start or end with underscore.')
- if '__' in name:
- raise CleanError(
- f'Asset package name cannot contain sequential underscores.')
- for char in name:
- if char not in ASSET_PACKAGE_NAME_VALID_CHARS:
- raise CleanError(
- f'Found invalid char "{char}" in asset package name "{name}".')
+@dataclass
+class File:
+ """Represents a single file within a Package."""
+ filehash: str
+ filesize: int
-# Note to self: keep this synced with server-side validation func...
-def validate_asset_path(path: str) -> None:
- """Throw an exception on an invalid asset path."""
- if len(path) > ASSET_PATH_MAX_LENGTH:
- raise CleanError(f'Asset path is too long: "{path}"')
- names = path.split('/')
- for name in names:
- if not name:
- raise CleanError(f'Found empty component in asset path "{path}".')
- for char in name:
- if char not in ASSET_PATH_VALID_CHARS:
- raise CleanError(
- f'Found invalid char "{char}" in asset path "{path}".')
-
-
-class AssetPackage:
- """Data for local or remote asset packages."""
+class Package:
+ """Represents a directory of files with some common purpose."""
def __init__(self) -> None:
- self.assets: Dict[str, Asset] = {}
self.path = Path('')
- self.name = 'untitled'
- self.index = ''
+ self.files: Dict[str, File] = {}
@classmethod
- def load_from_disk(cls, path: Path) -> AssetPackage:
- """Load an asset package from files on disk."""
- import yaml
- indexfilename = 'assetpackage.yaml'
- package = AssetPackage()
+ def load_from_disk(cls, path: Path) -> Package:
+ """Create a package populated from a directory on disk."""
+ package = Package()
if not path.is_dir():
raise CleanError(f'Directory not found: "{path}"')
-
package.path = path
- with open(Path(path, indexfilename)) as infile:
- package.index = infile.read()
- index = yaml.safe_load(package.index)
- if not isinstance(index, dict):
- raise CleanError(f'Root dict not found in {indexfilename}')
- # Pull our name from the index file.
- # (NOTE: can probably just let the server do this)
- name = index.get('name')
- if not isinstance(name, str):
- raise CleanError(f'No "name" str found in {indexfilename}')
- validate_asset_package_name(name)
- package.name = name
+ packagepathstr = str(path)
- # Build our list of Asset objs from the index.
- assets = index.get('assets')
- if not isinstance(assets, dict):
- raise CleanError(f'No "assets" dict found in {indexfilename}')
- for assetpath, assetdata in assets.items():
- validate_asset_path(assetpath)
- if not isinstance(assetdata, dict):
- raise CleanError(
- f'Invalid asset data for {assetpath} in {indexfilename}')
- assettypestr = assetdata.get('type')
- if not isinstance(assettypestr, str):
- raise CleanError(
- f'Invalid asset type for {assetpath} in {indexfilename}')
- assettype = AssetType(assettypestr)
- package.assets[assetpath] = Asset(package, assettype, assetpath)
+ paths: List[str] = []
- return package
+ # Build the full list of package-relative paths.
+ for basename, _dirnames, filenames in os.walk(path):
+ for filename in filenames:
+ fullname = os.path.join(basename, filename)
+ assert fullname.startswith(packagepathstr)
+ paths.append(fullname[len(packagepathstr) + 1:])
- def get_manifest(self) -> Dict:
- """Build a manifest of hashes and other info for the package."""
import hashlib
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import cpu_count
- manifest: Dict = {'name': self.name, 'files': {}, 'index': self.index}
-
- def _get_asset_info(iasset: Asset) -> Tuple[Asset, Dict]:
+ def _get_file_info(filepath: str) -> Tuple[str, File]:
sha = hashlib.sha256()
- with open(iasset.filepath, 'rb') as infile:
+ fullfilepath = os.path.join(packagepathstr, filepath)
+ if not os.path.isfile(fullfilepath):
+ raise Exception(f'File not found: "{fullfilepath}"')
+ with open(fullfilepath, 'rb') as infile:
filebytes = infile.read()
filesize = len(filebytes)
sha.update(filebytes)
- if not os.path.isfile(iasset.filepath):
- raise Exception(f'Asset file not found: "{iasset.filepath}"')
- info_out: Dict = {
- 'hash': sha.hexdigest(),
- 'size': filesize,
- 'ext': ASSET_SOURCE_FILE_EXTS[iasset.assettype]
- }
- return iasset, info_out
+ return (filepath, File(filehash=sha.hexdigest(),
+ filesize=filesize))
- # Use all procs to hash files for extra speedy goodness.
+ # Now use all procs to hash the files efficiently.
with ThreadPoolExecutor(max_workers=cpu_count()) as executor:
- for result in executor.map(_get_asset_info, self.assets.values()):
- asset, info = result
- manifest['files'][asset.path] = info
+ package.files = dict(executor.map(_get_file_info, paths))
- return manifest
+ return package
class App:
@@ -247,6 +169,7 @@ class App:
def __init__(self) -> None:
self._state = StateData()
+ self._package: Optional[Package] = None
def run(self) -> None:
"""Run the tool."""
@@ -264,39 +187,29 @@ class App:
raise CleanError('"make prereqs" check failed. '
'Install missing requirements and try again.')
- self._load_cache()
+ self._load_state()
if len(sys.argv) < 2:
print(f'{CLRRED}You must provide one or more arguments.{CLREND}')
- self.do_misc_command(['help'])
+ self.run_command(['help'])
raise CleanError()
- cmd = sys.argv[1]
- if cmd == CMD_LOGIN:
- self.do_login()
- elif cmd == CMD_LOGOUT:
- self.do_logout()
- elif (cmd == CMD_ASSETPACK and len(sys.argv) > 2
- and sys.argv[2] == 'put'):
- self.do_assetpack_put()
- else:
- # For all other commands, simply pass them to the server verbatim.
- self.do_misc_command(sys.argv[1:])
+ # Simply pass all args to the server and let it do the thing.
+ self.run_command(sys.argv[1:])
- self._save_cache()
+ self._save_state()
- def _load_cache(self) -> None:
+ def _load_state(self) -> None:
if not os.path.exists(CACHE_DATA_PATH):
return
try:
with open(CACHE_DATA_PATH, 'r') as infile:
self._state = StateData(**json.loads(infile.read()))
except Exception:
- print(CLRRED +
- f'Error loading {TOOL_NAME} data; resetting to defaults.' +
- CLREND)
+ print(f'{CLRRED}Error loading {TOOL_NAME} data;'
+ f' resetting to defaults.{CLREND}')
- def _save_cache(self) -> None:
+ def _save_state(self) -> None:
if not CACHE_DIR.exists():
CACHE_DIR.mkdir(parents=True, exist_ok=True)
with open(CACHE_DATA_PATH, 'w') as outfile:
@@ -321,16 +234,16 @@ class App:
response_raw_2.raise_for_status() # Except if anything went wrong.
assert isinstance(response_raw_2.content, bytes)
output = json.loads(response_raw_2.content.decode())
- assert isinstance(output, dict)
- assert isinstance(output['m'], (str, type(None)))
- assert isinstance(output['e'], (str, type(None)))
- assert 'd' in output
- response = Response(message=output['m'],
- data=output['d'],
- error=output['e'])
- # Handle errors and print messages;
- # (functionality common to all command types).
+ # Create a default Response and fill in only attrs we're aware of.
+ # (server may send attrs unknown to older clients)
+ response = Response()
+ for key, val in output.items():
+ if hasattr(response, key):
+ setattr(response, key, val)
+
+ # Handle common responses (can move these out of here at some point)
+
if response.error is not None:
raise CleanError(response.error)
@@ -339,93 +252,93 @@ class App:
return response
- def do_login(self) -> None:
- """Run the login command."""
+ def _upload_file(self, filename: str, call: str, args: Dict) -> None:
+ print(f'{CLRBLU}Uploading {filename}{CLREND}', flush=True)
+ assert self._package is not None
+ with tempfile.TemporaryDirectory() as tempdir:
+ srcpath = Path(self._package.path, filename)
+ gzpath = Path(tempdir, 'file.gz')
+ subprocess.run(f'gzip --stdout "{srcpath}" > "{gzpath}"',
+ shell=True,
+ check=True)
+ with open(gzpath, 'rb') as infile:
+ putfiles: Dict = {'file': infile}
+ _response = self._servercmd(
+ call,
+ args,
+ files=putfiles,
+ )
- if len(sys.argv) != 3:
- raise CleanError('Expected a login code.')
+ def _handle_loadpackage_response(
+ self, response: Response) -> Optional[Tuple[str, Dict]]:
+ assert response.loadpackage is not None
+ assert len(response.loadpackage) == 4
+ (packagepath, callname, callargs, indexfile) = response.loadpackage
+ assert isinstance(packagepath, str)
+ assert isinstance(callname, str)
+ assert isinstance(callargs, dict)
+ assert isinstance(indexfile, str)
+ self._package = Package.load_from_disk(Path(packagepath))
- login_code = sys.argv[2]
- response = self._servercmd('login', {'c': login_code})
+ # Make the remote call they gave us with the package
+ # manifest added in.
+ with Path(self._package.path, indexfile).open() as infile:
+ index = infile.read()
+ callargs['manifest'] = {
+ 'index': index,
+ 'files': {
+ key: asdict(val)
+ for key, val in self._package.files.items()
+ }
+ }
+ return callname, callargs
- # If the command returned cleanly, we should have a token we can use
- # to log in.
- token = response.data['logintoken']
- assert isinstance(token, str)
+ def _handle_upload_response(
+ self, response: Response) -> Optional[Tuple[str, Dict]]:
+ from concurrent.futures import ThreadPoolExecutor
+ assert response.upload is not None
+ assert self._package is not None
+ assert len(response.upload) == 5
+ (filenames, uploadcmd, uploadargs, completecmd,
+ completeargs) = response.upload
+ assert isinstance(filenames, list)
+ assert isinstance(uploadcmd, str)
+ assert isinstance(uploadargs, dict)
+ assert isinstance(completecmd, str)
+ assert isinstance(completeargs, dict)
- aname = response.data['accountname']
- assert isinstance(aname, str)
+ def _do_filename(filename: str) -> None:
+ self._upload_file(filename, uploadcmd, uploadargs)
- print(f'{CLRGRN}Now logged in as {aname}.{CLREND}')
- self._state.login_token = token
+ # Here we can run uploads concurrently if that goes faster...
+ # (should keep an eye on this to make sure its thread safe
+ # and behaves itself)
+ with ThreadPoolExecutor(max_workers=4) as executor:
+ # Convert the generator to a list to trigger any
+ # exceptions that occurred.
+ list(executor.map(_do_filename, filenames))
- def do_logout(self) -> None:
- """Run the logout command."""
- self._state.login_token = None
- print(f'{CLRGRN}Cloudtool is now logged out.{CLREND}')
+ # Lastly, run the 'upload complete' command we were passed.
+ return completecmd, completeargs
- def do_assetpack_put(self) -> None:
- """Run an assetpackput command."""
+ def run_command(self, args: List[str]) -> None:
+ """Run a command to completion."""
- if len(sys.argv) != 4:
- raise CleanError('Expected a path to an assetpackage directory.')
+ nextcall: Optional[Tuple[str, Dict]] = ('toplevel', {'a': args})
- path = Path(sys.argv[3])
- package = AssetPackage.load_from_disk(path)
+ # Now talk to the server in a loop until they are done with us.
+ while nextcall is not None:
+ response = self._servercmd(*nextcall)
+ nextcall = None
- # Send the server a manifest of everything we've got locally.
- manifest = package.get_manifest()
- response = self._servercmd('assetpackputmanifest', {'m': manifest})
-
- # The server should give us a version and a set of files it wants.
- # Upload each of those.
- upload_files: List[str] = response.data['upload_files']
- assert isinstance(upload_files, list)
- assert all(isinstance(f, str) for f in upload_files)
- version = response.data['package_version']
- assert isinstance(version, str)
- self._assetpack_put_upload(package, version, upload_files)
-
- # Lastly, send a 'finish' command - this will prompt a response
- # with info about the completed package.
- _response = self._servercmd('assetpackputfinish', {
- 'packageversion': version,
- })
-
- def _assetpack_put_upload(self, package: AssetPackage, version: str,
- files: List[str]) -> None:
-
- # Upload the files one at a time.
- # (we can potentially do this in parallel in the future).
- for fnum, fname in enumerate(files):
- print(
- f'{CLRBLU}Uploading file {fnum+1} of {len(files)}: '
- f'{fname}{CLREND}',
- flush=True)
- with tempfile.TemporaryDirectory() as tempdir:
- asset = package.assets[fname]
- srcpath = Path(asset.filepath)
- gzpath = Path(tempdir, 'file.gz')
- subprocess.run(f'gzip --stdout "{srcpath}" > "{gzpath}"',
- shell=True,
- check=True)
- with open(gzpath, 'rb') as infile:
- putfiles: Dict = {'file': infile}
- _response = self._servercmd(
- 'assetpackputupload',
- {
- 'packageversion': version,
- 'path': asset.path
- },
- files=putfiles,
- )
-
- def do_misc_command(self, args: List[str]) -> None:
- """Run a miscellaneous command."""
-
- # We don't do anything special with the response here; the normal
- # error-handling/message-printing is all that happens.
- self._servercmd('misc', {'a': args})
+ if response.loadpackage is not None:
+ nextcall = self._handle_loadpackage_response(response)
+ if response.upload is not None:
+ nextcall = self._handle_upload_response(response)
+ if response.login is not None:
+ self._state.login_token = response.login
+ if response.logout:
+ self._state.login_token = None
if __name__ == '__main__':
diff --git a/tools/snippets b/tools/snippets
index 2d1dc1a2..b4e23adc 100755
--- a/tools/snippets
+++ b/tools/snippets
@@ -327,7 +327,7 @@ def gen_fulltest_buildfile_linux() -> None:
def resize_image() -> None:
- """Resize an image and saves it to a new location.
+ """Resize an image and save it to a new location.
args: xres, yres, src, dst
"""