mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-02-02 13:23:27 +08:00
latest bacloud work
This commit is contained in:
parent
53221397ba
commit
9eff8dff65
122
tools/bacloud
122
tools/bacloud
@ -86,6 +86,8 @@ class Response:
|
|||||||
be written to the client. This should only be used for relatively
|
be written to the client. This should only be used for relatively
|
||||||
small files as they are all included inline as part of the response.
|
small files as they are all included inline as part of the response.
|
||||||
deletes: If present, file paths that should be deleted on the client.
|
deletes: If present, file paths that should be deleted on the client.
|
||||||
|
dirpruneempty: If present, all empty dirs under this one should be
|
||||||
|
removed.
|
||||||
endmessage: If present, a message that should be printed after all other
|
endmessage: If present, a message that should be printed after all other
|
||||||
response processing is done.
|
response processing is done.
|
||||||
endcommand: If present, this command is run with these args at the end
|
endcommand: If present, this command is run with these args at the end
|
||||||
@ -100,6 +102,7 @@ class Response:
|
|||||||
uploads_inline: Optional[List[str]] = None
|
uploads_inline: Optional[List[str]] = None
|
||||||
downloads_inline: Optional[Dict[str, str]] = None
|
downloads_inline: Optional[Dict[str, str]] = None
|
||||||
deletes: Optional[List[str]] = None
|
deletes: Optional[List[str]] = None
|
||||||
|
dirpruneempty: Optional[str] = None
|
||||||
endmessage: Optional[str] = None
|
endmessage: Optional[str] = None
|
||||||
endcommand: Optional[Tuple[str, Dict]] = None
|
endcommand: Optional[Tuple[str, Dict]] = None
|
||||||
|
|
||||||
@ -119,41 +122,44 @@ def get_tz_offset_seconds() -> float:
|
|||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class PackageFile:
|
class DirManifestFile:
|
||||||
"""Represents a single file within a Package."""
|
"""Represents a single file within a DirManifest."""
|
||||||
filehash: str
|
filehash: str
|
||||||
filesize: int
|
filesize: int
|
||||||
|
|
||||||
|
|
||||||
class Package:
|
class DirManifest:
|
||||||
"""Represents a directory of files with some common purpose."""
|
"""Represents a directory of files with some common purpose."""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.path = Path('')
|
self.path = Path('')
|
||||||
self.files: Dict[str, PackageFile] = {}
|
self.files: Dict[str, DirManifestFile] = {}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_from_disk(cls, path: Path) -> Package:
|
def load_from_disk(cls, path: Path) -> DirManifest:
|
||||||
"""Create a package populated from a directory on disk."""
|
"""Create a package populated from a directory on disk."""
|
||||||
package = Package()
|
package = DirManifest()
|
||||||
if not path.is_dir():
|
|
||||||
raise CleanError(f'Directory not found: "{path}"')
|
|
||||||
package.path = path
|
package.path = path
|
||||||
packagepathstr = str(path)
|
packagepathstr = str(path)
|
||||||
paths: List[str] = []
|
paths: List[str] = []
|
||||||
|
|
||||||
# Build the full list of package-relative paths.
|
# Simply return empty manifests if the given path isn't a dir.
|
||||||
for basename, _dirnames, filenames in os.walk(path):
|
# (the server may intend to create it and is just asking what's
|
||||||
for filename in filenames:
|
# there already)
|
||||||
fullname = os.path.join(basename, filename)
|
if path.is_dir():
|
||||||
assert fullname.startswith(packagepathstr)
|
# Build the full list of package-relative paths.
|
||||||
paths.append(fullname[len(packagepathstr) + 1:])
|
for basename, _dirnames, filenames in os.walk(path):
|
||||||
|
for filename in filenames:
|
||||||
|
fullname = os.path.join(basename, filename)
|
||||||
|
assert fullname.startswith(packagepathstr)
|
||||||
|
paths.append(fullname[len(packagepathstr) + 1:])
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
|
|
||||||
def _get_file_info(filepath: str) -> Tuple[str, PackageFile]:
|
def _get_file_info(filepath: str) -> Tuple[str, DirManifestFile]:
|
||||||
sha = hashlib.sha256()
|
sha = hashlib.sha256()
|
||||||
fullfilepath = os.path.join(packagepathstr, filepath)
|
fullfilepath = os.path.join(packagepathstr, filepath)
|
||||||
if not os.path.isfile(fullfilepath):
|
if not os.path.isfile(fullfilepath):
|
||||||
@ -163,7 +169,8 @@ class Package:
|
|||||||
filesize = len(filebytes)
|
filesize = len(filebytes)
|
||||||
sha.update(filebytes)
|
sha.update(filebytes)
|
||||||
return (filepath,
|
return (filepath,
|
||||||
PackageFile(filehash=sha.hexdigest(), filesize=filesize))
|
DirManifestFile(filehash=sha.hexdigest(),
|
||||||
|
filesize=filesize))
|
||||||
|
|
||||||
# Now use all procs to hash the files efficiently.
|
# Now use all procs to hash the files efficiently.
|
||||||
with ThreadPoolExecutor(max_workers=cpu_count()) as executor:
|
with ThreadPoolExecutor(max_workers=cpu_count()) as executor:
|
||||||
@ -287,49 +294,24 @@ class App:
|
|||||||
files=putfiles,
|
files=putfiles,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _handle_dirmanifest_response(self, response: Response) -> None:
|
def _handle_dirmanifest_response(self, dirmanifest: str) -> None:
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
assert response.dirmanifest is not None
|
manifest = DirManifest.load_from_disk(Path(dirmanifest))
|
||||||
# assert len(response.dirmanifest) == 2
|
|
||||||
# (packagepath, indexfile) = response.dirmanifest
|
|
||||||
assert isinstance(response.dirmanifest, str)
|
|
||||||
# assert isinstance(callname, str)
|
|
||||||
# assert isinstance(callargs, dict)
|
|
||||||
# assert indexfile is None or isinstance(indexfile, str)
|
|
||||||
package = Package.load_from_disk(Path(response.dirmanifest))
|
|
||||||
|
|
||||||
# Make the remote call they gave us with the package
|
|
||||||
# manifest added in.
|
|
||||||
# if indexfile is not None:
|
|
||||||
# with Path(package.path, indexfile).open() as infile:
|
|
||||||
# index = infile.read()
|
|
||||||
# else:
|
|
||||||
# index = ''
|
|
||||||
# callargs['manifest'] = {
|
|
||||||
# 'index': index,
|
|
||||||
# 'files': {key: asdict(val)
|
|
||||||
# for key, val in package.files.items()}
|
|
||||||
# }
|
|
||||||
|
|
||||||
# Store the manifest to be included with our next called command.
|
# Store the manifest to be included with our next called command.
|
||||||
self._end_command_args['manifest'] = {
|
self._end_command_args['manifest'] = {
|
||||||
'files': {key: asdict(val)
|
'files': {key: asdict(val)
|
||||||
for key, val in package.files.items()}
|
for key, val in manifest.files.items()}
|
||||||
}
|
}
|
||||||
# return callname, callargs
|
|
||||||
|
|
||||||
def _handle_uploads(self, response: Response) -> None:
|
def _handle_uploads(self, uploads: Tuple[List[str], str, Dict]) -> None:
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
assert response.uploads is not None
|
assert len(uploads) == 3
|
||||||
assert len(response.uploads) == 3
|
filenames, uploadcmd, uploadargs = uploads
|
||||||
filenames, uploadcmd, uploadargs = response.uploads
|
|
||||||
assert isinstance(filenames, list)
|
assert isinstance(filenames, list)
|
||||||
assert isinstance(uploadcmd, str)
|
assert isinstance(uploadcmd, str)
|
||||||
assert isinstance(uploadargs, dict)
|
assert isinstance(uploadargs, dict)
|
||||||
|
|
||||||
# assert isinstance(completecmd, str)
|
|
||||||
# assert isinstance(completeargs, dict)
|
|
||||||
|
|
||||||
def _do_filename(filename: str) -> None:
|
def _do_filename(filename: str) -> None:
|
||||||
self._upload_file(filename, uploadcmd, uploadargs)
|
self._upload_file(filename, uploadcmd, uploadargs)
|
||||||
|
|
||||||
@ -341,33 +323,29 @@ class App:
|
|||||||
# exceptions that occurred.
|
# exceptions that occurred.
|
||||||
list(executor.map(_do_filename, filenames))
|
list(executor.map(_do_filename, filenames))
|
||||||
|
|
||||||
# Lastly, run the 'upload complete' command we were passed.
|
def _handle_downloads_inline(self, downloads_inline: Dict[str,
|
||||||
# return completecmd, completeargs
|
str]) -> None:
|
||||||
|
|
||||||
def _handle_downloads_inline(self, response: Response) -> None:
|
|
||||||
"""Handle inline file data to be saved to the client."""
|
"""Handle inline file data to be saved to the client."""
|
||||||
import base64
|
import base64
|
||||||
import zlib
|
import zlib
|
||||||
assert response.downloads_inline is not None
|
for fname, fdata in downloads_inline.items():
|
||||||
for fname, fdata in response.downloads_inline.items():
|
os.makedirs(os.path.dirname(fname), exist_ok=True)
|
||||||
data_zipped = base64.b64decode(fdata)
|
data_zipped = base64.b64decode(fdata)
|
||||||
data = zlib.decompress(data_zipped)
|
data = zlib.decompress(data_zipped)
|
||||||
with open(fname, 'wb') as outfile:
|
with open(fname, 'wb') as outfile:
|
||||||
outfile.write(data)
|
outfile.write(data)
|
||||||
|
|
||||||
def _handle_deletes(self, response: Response) -> None:
|
def _handle_deletes(self, deletes: List[str]) -> None:
|
||||||
"""Handle file deletes."""
|
"""Handle file deletes."""
|
||||||
assert response.deletes is not None
|
for fname in deletes:
|
||||||
for fname in response.deletes:
|
|
||||||
os.unlink(fname)
|
os.unlink(fname)
|
||||||
|
|
||||||
def _handle_uploads_inline(self, response: Response) -> None:
|
def _handle_uploads_inline(self, uploads_inline: List[str]) -> None:
|
||||||
"""Handle uploading files inline."""
|
"""Handle uploading files inline."""
|
||||||
import base64
|
import base64
|
||||||
import zlib
|
import zlib
|
||||||
assert response.uploads_inline is not None
|
|
||||||
files: Dict[str, str] = {}
|
files: Dict[str, str] = {}
|
||||||
for filepath in response.uploads_inline:
|
for filepath in uploads_inline:
|
||||||
if not os.path.exists(filepath):
|
if not os.path.exists(filepath):
|
||||||
raise CleanError(f'File not found: {filepath}')
|
raise CleanError(f'File not found: {filepath}')
|
||||||
with open(filepath, 'rb') as infile:
|
with open(filepath, 'rb') as infile:
|
||||||
@ -377,6 +355,20 @@ class App:
|
|||||||
files[filepath] = data_base64
|
files[filepath] = data_base64
|
||||||
self._end_command_args['uploads_inline'] = files
|
self._end_command_args['uploads_inline'] = files
|
||||||
|
|
||||||
|
def _handle_dirpruneempty(self, prunedir: str) -> None:
|
||||||
|
"""Handle pruning empty directories."""
|
||||||
|
# Walk the tree bottom-up so we can properly kill recursive empty dirs.
|
||||||
|
for basename, dirnames, filenames in os.walk(prunedir, topdown=False):
|
||||||
|
# It seems that child dirs we kill during the walk are still
|
||||||
|
# listed when the parent dir is visited, so lets make sure
|
||||||
|
# to only acknowledge still-existing ones.
|
||||||
|
dirnames = [
|
||||||
|
d for d in dirnames
|
||||||
|
if os.path.exists(os.path.join(basename, d))
|
||||||
|
]
|
||||||
|
if not dirnames and not filenames and basename != prunedir:
|
||||||
|
os.rmdir(basename)
|
||||||
|
|
||||||
def run_user_command(self, args: List[str]) -> None:
|
def run_user_command(self, args: List[str]) -> None:
|
||||||
"""Run a single user command to completion."""
|
"""Run a single user command to completion."""
|
||||||
|
|
||||||
@ -392,15 +384,17 @@ class App:
|
|||||||
if response.logout:
|
if response.logout:
|
||||||
self._state.login_token = None
|
self._state.login_token = None
|
||||||
if response.dirmanifest is not None:
|
if response.dirmanifest is not None:
|
||||||
self._handle_dirmanifest_response(response)
|
self._handle_dirmanifest_response(response.dirmanifest)
|
||||||
if response.uploads_inline is not None:
|
if response.uploads_inline is not None:
|
||||||
self._handle_uploads_inline(response)
|
self._handle_uploads_inline(response.uploads_inline)
|
||||||
if response.uploads is not None:
|
if response.uploads is not None:
|
||||||
self._handle_uploads(response)
|
self._handle_uploads(response.uploads)
|
||||||
if response.downloads_inline:
|
if response.downloads_inline:
|
||||||
self._handle_downloads_inline(response)
|
self._handle_downloads_inline(response.downloads_inline)
|
||||||
if response.deletes:
|
if response.deletes:
|
||||||
self._handle_deletes(response)
|
self._handle_deletes(response.deletes)
|
||||||
|
if response.dirpruneempty:
|
||||||
|
self._handle_dirpruneempty(response.dirpruneempty)
|
||||||
if response.endmessage is not None:
|
if response.endmessage is not None:
|
||||||
print(response.endmessage, flush=True)
|
print(response.endmessage, flush=True)
|
||||||
if response.endcommand is not None:
|
if response.endcommand is not None:
|
||||||
|
|||||||
@ -47,8 +47,8 @@ def build_apple(arch: str, debug: bool = False) -> None:
|
|||||||
os.chdir(builddir)
|
os.chdir(builddir)
|
||||||
|
|
||||||
# TEMP: Check out a particular commit while the branch head is broken.
|
# TEMP: Check out a particular commit while the branch head is broken.
|
||||||
efrotools.run('git checkout 1a9c71dca298c03517e8236b81cf1d9c8c521cbf')
|
# efrotools.run('git checkout 1a9c71dca298c03517e8236b81cf1d9c8c521cbf')
|
||||||
# efrotools.run(f'git checkout {PYTHON_VERSION_MAJOR}')
|
efrotools.run(f'git checkout {PYTHON_VERSION_MAJOR}')
|
||||||
|
|
||||||
# On mac we currently have to add the _scproxy module or urllib will
|
# On mac we currently have to add the _scproxy module or urllib will
|
||||||
# fail.
|
# fail.
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user