added testing setup

This commit is contained in:
Eric Froemling 2019-12-13 16:55:31 -08:00
parent 0124e74ab5
commit 57b3552d96
16 changed files with 310 additions and 195 deletions

1
.gitignore vendored
View File

@ -17,6 +17,7 @@ local.properties
.dmypy.json .dmypy.json
.cache .cache
.mypy_cache .mypy_cache
.pytest_cache
.mypy.ini .mypy.ini
.pycheckers .pycheckers
.flycheck-dir-locals.el .flycheck-dir-locals.el

View File

@ -60,11 +60,9 @@
<excludePattern pattern=".spinoffdata" /> <excludePattern pattern=".spinoffdata" />
<excludePattern pattern=".asset_manifest_*.json" /> <excludePattern pattern=".asset_manifest_*.json" />
<excludePattern pattern=".efrocachemap" /> <excludePattern pattern=".efrocachemap" />
<excludePattern pattern=".pytest_cache" />
</content> </content>
<orderEntry type="jdk" jdkName="Python 3.7" jdkType="Python SDK" /> <orderEntry type="jdk" jdkName="Python 3.7" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
</component>
</module> </module>

View File

@ -336,6 +336,7 @@
<w>dataclassutils</w> <w>dataclassutils</w>
<w>datamodule</w> <w>datamodule</w>
<w>dataname</w> <w>dataname</w>
<w>dataval</w>
<w>datetimemodule</w> <w>datetimemodule</w>
<w>datetimes</w> <w>datetimes</w>
<w>daynum</w> <w>daynum</w>
@ -1250,6 +1251,9 @@
<w>pushlist</w> <w>pushlist</w>
<w>putasset</w> <w>putasset</w>
<w>putassetmanifest</w> <w>putassetmanifest</w>
<w>putassetpack</w>
<w>putassetpackmanifest</w>
<w>putassetpackupload</w>
<w>putassetupload</w> <w>putassetupload</w>
<w>putfiles</w> <w>putfiles</w>
<w>pval</w> <w>pval</w>
@ -1291,6 +1295,7 @@
<w>pyoffs</w> <w>pyoffs</w>
<w>pypaths</w> <w>pypaths</w>
<w>pysources</w> <w>pysources</w>
<w>pytest</w>
<w>pythonpath</w> <w>pythonpath</w>
<w>pythonw</w> <w>pythonw</w>
<w>pytree</w> <w>pytree</w>
@ -1575,6 +1580,7 @@
<w>testcapimodule</w> <w>testcapimodule</w>
<w>testclass</w> <w>testclass</w>
<w>testfoo</w> <w>testfoo</w>
<w>testfooooo</w>
<w>testhelpers</w> <w>testhelpers</w>
<w>testimportmultiple</w> <w>testimportmultiple</w>
<w>testm</w> <w>testm</w>

View File

@ -320,6 +320,19 @@ pycharmfull: prereqs
mypyfull pycharm pycharmfull mypyfull pycharm pycharmfull
################################################################################
# #
# Testing #
# #
################################################################################
# Run all tests.
# Note: need to disable bytecode writing so we don't cause errors due to
# unexpected __pycache__ dirs popping up.
test: prereqs
@tools/snippets pytest tests
################################################################################ ################################################################################
# # # #
# Updating / Preflighting # # Updating / Preflighting #

View File

@ -91,12 +91,17 @@ class BaseField(DataHandler):
# more than a single field entry so this is unused) # more than a single field entry so this is unused)
self.d_key = d_key self.d_key = d_key
# IMPORTANT: this method should only be overridden in the eyes of the
# type-checker (to specify exact return types). Subclasses should instead
# override get_with_data() for doing the actual work, since that method
# may sometimes be called explicitly instead of through __get__
def __get__(self, obj: Any, type_in: Any = None) -> Any: def __get__(self, obj: Any, type_in: Any = None) -> Any:
if obj is None: if obj is None:
# when called on the type, we return the field # when called on the type, we return the field
return self return self
return self.get_with_data(obj.d_data) return self.get_with_data(obj.d_data)
# IMPORTANT: same deal as __get__() (see note above)
def __set__(self, obj: Any, value: Any) -> None: def __set__(self, obj: Any, value: Any) -> None:
assert obj is not None assert obj is not None
self.set_with_data(obj.d_data, value, error=True) self.set_with_data(obj.d_data, value, error=True)

View File

@ -71,7 +71,7 @@ class Field(BaseField, Generic[T]):
# Use default runtime get/set but let type-checker know our types. # Use default runtime get/set but let type-checker know our types.
# Note: we actually return a bound-field when accessed on # Note: we actually return a bound-field when accessed on
# a type instead of an instance, but we don't reflect that here yet # a type instead of an instance, but we don't reflect that here yet
# (need to write a mypy plugin so sub-field access works first) # (would need to write a mypy plugin so sub-field access works first)
def __get__(self, obj: Any, cls: Any = None) -> T: def __get__(self, obj: Any, cls: Any = None) -> T:
... ...
@ -125,46 +125,38 @@ class CompoundField(BaseField, Generic[TC]):
def __set__(self: CompoundField[TC], obj: Any, value: TC) -> None: def __set__(self: CompoundField[TC], obj: Any, value: TC) -> None:
... ...
else: def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundValue(self.d_value, data[self.d_key])
def __get__(self, obj, cls=None): def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
if obj is None: from bafoundation.entity._value import CompoundValue
# when called on the type, we return the field
return self
# (this is only ever called on entity root fields
# so no need to worry about custom d_key case)
assert self.d_key in obj.d_data
return BoundCompoundValue(self.d_value, obj.d_data[self.d_key])
def __set__(self, obj, value): # Ok here's the deal: our type checking above allows any subtype
from bafoundation.entity._value import CompoundValue # of our CompoundValue in here, but we want to be more picky than
# that. Let's check fields for equality. This way we'll allow
# assigning something like a Carentity to a Car field
# (where the data is the same), but won't allow assigning a Car
# to a Vehicle field (as Car probably adds more fields).
value1: CompoundValue
if isinstance(value, BoundCompoundValue):
value1 = value.d_value
elif isinstance(value, CompoundValue):
value1 = value
else:
raise ValueError(f"Can't assign from object type {type(value)}")
dataval = getattr(value, 'd_data', None)
if dataval is None:
raise ValueError(f"Can't assign from unbound object {value}")
if self.d_value.get_fields() != value1.get_fields():
raise ValueError(f"Can't assign to {self.d_value} from"
f" incompatible type {value.d_value}; "
f"sub-fields do not match.")
# Ok here's the deal: our type checking above allows any subtype # If we're allowing this to go through, we can simply copy the
# of our CompoundValue in here, but we want to be more picky than # data from the passed in value. The fields match so it should
# that. Let's check fields for equality. This way we'll allow # be in a valid state already.
# assigning something like a Carentity to a Car field data[self.d_key] = copy.deepcopy(dataval)
# (where the data is the same), but won't allow assigning a Car
# to a Vehicle field (as Car probably adds more fields).
value1: CompoundValue
if isinstance(value, BoundCompoundValue):
value1 = value.d_value
elif isinstance(value, CompoundValue):
value1 = value
else:
raise ValueError(
f"Can't assign from object type {type(value)}")
data = getattr(value, 'd_data', None)
if data is None:
raise ValueError(f"Can't assign from unbound object {value}")
if self.d_value.get_fields() != value1.get_fields():
raise ValueError(f"Can't assign to {self.d_value} from"
f" incompatible type {value.d_value}; "
f"sub-fields do not match.")
# If we're allowing this to go through, we can simply copy the
# data from the passed in value. The fields match so it should
# be in a valid state already.
obj.d_data[self.d_key] = copy.deepcopy(data)
class ListField(BaseField, Generic[T]): class ListField(BaseField, Generic[T]):
@ -199,25 +191,25 @@ class ListField(BaseField, Generic[T]):
# When accessed on a FieldInspector we return a sub-field FieldInspector. # When accessed on a FieldInspector we return a sub-field FieldInspector.
# When accessed on an instance we return a BoundListField. # When accessed on an instance we return a BoundListField.
@overload
def __get__(self, obj: None, cls: Any = None) -> FieldInspector:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundListField[T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
if obj is None:
# When called on the type, we return the field.
return self
return BoundListField(self, obj.d_data[self.d_key])
if TYPE_CHECKING: if TYPE_CHECKING:
@overload
def __get__(self, obj: None, cls: Any = None) -> FieldInspector:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundListField[T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
def __set__(self, obj: Any, value: List[T]) -> None: def __set__(self, obj: Any, value: List[T]) -> None:
... ...
def get_with_data(self, data: Any) -> Any:
return BoundListField(self, data[self.d_key])
class DictField(BaseField, Generic[TK, T]): class DictField(BaseField, Generic[TK, T]):
"""A field of values in a dict with a specified index type.""" """A field of values in a dict with a specified index type."""
@ -258,25 +250,25 @@ class DictField(BaseField, Generic[TK, T]):
# change the dict, but we can prune completely if empty (and allowed) # change the dict, but we can prune completely if empty (and allowed)
return not data and not self._store_default return not data and not self._store_default
@overload
def __get__(self, obj: None, cls: Any = None) -> DictField[TK, T]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundDictField[TK, T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
if obj is None:
# When called on the type, we return the field.
return self
return BoundDictField(self._keytype, self, obj.d_data[self.d_key])
if TYPE_CHECKING: if TYPE_CHECKING:
@overload
def __get__(self, obj: None, cls: Any = None) -> DictField[TK, T]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundDictField[TK, T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
def __set__(self, obj: Any, value: Dict[TK, T]) -> None: def __set__(self, obj: Any, value: Dict[TK, T]) -> None:
... ...
def get_with_data(self, data: Any) -> Any:
return BoundDictField(self._keytype, self, data[self.d_key])
class CompoundListField(BaseField, Generic[TC]): class CompoundListField(BaseField, Generic[TC]):
"""A field consisting of repeated instances of a compound-value. """A field consisting of repeated instances of a compound-value.
@ -323,49 +315,47 @@ class CompoundListField(BaseField, Generic[TC]):
# We can also optionally prune the whole list if empty and allowed. # We can also optionally prune the whole list if empty and allowed.
return not data and not self._store_default return not data and not self._store_default
@overload
def __get__(self, obj: None, cls: Any = None) -> CompoundListField[TC]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundCompoundListField[TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
# On access we simply provide a version of ourself
# bound to our corresponding sub-data.
if obj is None:
# when called on the type, we return the field
return self
assert self.d_key in obj.d_data
return BoundCompoundListField(self, obj.d_data[self.d_key])
# Note:
# When setting the list, we tell the type-checker that we accept
# a raw list of CompoundValue objects, but at runtime we actually
# deal with BoundCompoundValue objects (see note in BoundCompoundListField)
if TYPE_CHECKING: if TYPE_CHECKING:
@overload
def __get__(self, obj: None, cls: Any = None) -> CompoundListField[TC]:
...
@overload
def __get__(self,
obj: Any,
cls: Any = None) -> BoundCompoundListField[TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
# Note:
# When setting the list, we tell the type-checker that we accept
# a raw list of CompoundValue objects, but at runtime we actually
# deal with BoundCompoundValue objects (see note in
# BoundCompoundListField)
def __set__(self, obj: Any, value: List[TC]) -> None: def __set__(self, obj: Any, value: List[TC]) -> None:
... ...
else: def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundListField(self, data[self.d_key])
def __set__(self, obj, value): def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
if not isinstance(value, list): if not isinstance(value, list):
raise TypeError( raise TypeError('CompoundListField expected list value on set.')
'CompoundListField expected list value on set.')
# Allow assigning only from a sequence of our existing children. # Allow assigning only from a sequence of our existing children.
# (could look into expanding this to other children if we can # (could look into expanding this to other children if we can
# be sure the underlying data will line up; for example two # be sure the underlying data will line up; for example two
# CompoundListFields with different child_field values should not # CompoundListFields with different child_field values should not
# be inter-assignable. # be inter-assignable.
if (not all(isinstance(i, BoundCompoundValue) for i in value) if (not all(isinstance(i, BoundCompoundValue) for i in value)
or not all(i.d_value is self.d_value for i in value)): or not all(i.d_value is self.d_value for i in value)):
raise ValueError('CompoundListField assignment must be a ' raise ValueError('CompoundListField assignment must be a '
'list containing only its existing children.') 'list containing only its existing children.')
obj.d_data[self.d_key] = [i.d_data for i in value] data[self.d_key] = [i.d_data for i in value]
class CompoundDictField(BaseField, Generic[TK, TC]): class CompoundDictField(BaseField, Generic[TK, TC]):
@ -420,54 +410,45 @@ class CompoundDictField(BaseField, Generic[TK, TC]):
# We can also optionally prune the whole list if empty and allowed. # We can also optionally prune the whole list if empty and allowed.
return not data and not self._store_default return not data and not self._store_default
@overload # ONLY overriding these in type-checker land to clarify types.
def __get__(self, obj: None, cls: Any = None) -> CompoundDictField[TK, TC]: # (see note in BaseField)
...
@overload
def __get__(self,
obj: Any,
cls: Any = None) -> BoundCompoundDictField[TK, TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
# On access we simply provide a version of ourself
# bound to our corresponding sub-data.
if obj is None:
# when called on the type, we return the field
return self
assert self.d_key in obj.d_data
return BoundCompoundDictField(self, obj.d_data[self.d_key])
# In the type-checker's eyes we take CompoundValues but at runtime
# we actually take BoundCompoundValues (see note in BoundCompoundDictField)
if TYPE_CHECKING: if TYPE_CHECKING:
@overload
def __get__(self,
obj: None,
cls: Any = None) -> CompoundDictField[TK, TC]:
...
@overload
def __get__(self,
obj: Any,
cls: Any = None) -> BoundCompoundDictField[TK, TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
def __set__(self, obj: Any, value: Dict[TK, TC]) -> None: def __set__(self, obj: Any, value: Dict[TK, TC]) -> None:
... ...
else: def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundDictField(self, data[self.d_key])
def __set__(self, obj, value): def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
if not isinstance(value, dict): if not isinstance(value, dict):
raise TypeError( raise TypeError('CompoundDictField expected dict value on set.')
'CompoundDictField expected dict value on set.')
# Allow assigning only from a sequence of our existing children. # Allow assigning only from a sequence of our existing children.
# (could look into expanding this to other children if we can # (could look into expanding this to other children if we can
# be sure the underlying data will line up; for example two # be sure the underlying data will line up; for example two
# CompoundListFields with different child_field values should not # CompoundListFields with different child_field values should not
# be inter-assignable. # be inter-assignable.
print('val', value) if (not all(isinstance(i, self.d_keytype) for i in value.keys())
if (not all(isinstance(i, self.d_keytype) for i in value.keys()) or not all(
or not all( isinstance(i, BoundCompoundValue) for i in value.values())
isinstance(i, BoundCompoundValue) or not all(i.d_value is self.d_value for i in value.values())):
for i in value.values()) raise ValueError('CompoundDictField assignment must be a '
or not all(i.d_value is self.d_value 'dict containing only its existing children.')
for i in value.values())): data[self.d_key] = {key: val.d_data for key, val in value.items()}
raise ValueError('CompoundDictField assignment must be a '
'dict containing only its existing children.')
obj.d_data[self.d_key] = {
key: val.d_data
for key, val in value.items()
}

View File

@ -59,10 +59,10 @@ class BoundCompoundValue:
return compound_eq(self, other) return compound_eq(self, other)
def __getattr__(self, name: str, default: Any = None) -> Any: def __getattr__(self, name: str, default: Any = None) -> Any:
# if this attribute corresponds to a field on our compound value's # If this attribute corresponds to a field on our compound value's
# unbound type, ask it to give us a value using our data # unbound type, ask it to give us a value using our data
field = getattr(type(object.__getattribute__(self, 'd_value')), name, d_value = type(object.__getattribute__(self, 'd_value'))
None) field = getattr(d_value, name, None)
if isinstance(field, BaseField): if isinstance(field, BaseField):
return field.get_with_data(self.d_data) return field.get_with_data(self.d_data)
raise AttributeError raise AttributeError

View File

@ -37,6 +37,7 @@
"assets/src/data/scripts", "assets/src/data/scripts",
"assets/src/server", "assets/src/server",
"src/generated_src", "src/generated_src",
"tools" "tools",
"tests"
] ]
} }

View File

@ -1,6 +1,6 @@
<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND --> <!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->
<!--DOCSHASH=f60857a13d4c5fd4ba30988f084e00a4--> <!--DOCSHASH=b06760caff5d35273e974c2601857348-->
<h4><em>last updated on 2019-11-29 for Ballistica version 1.5.0 build 20001</em></h4> <h4><em>last updated on 2019-12-13 for Ballistica version 1.5.0 build 20001</em></h4>
<p>This page documents the Python classes and functions in the 'ba' module, <p>This page documents the Python classes and functions in the 'ba' module,
which are the ones most relevant to modding in Ballistica. If you come across something you feel should be included here or could be better explained, please <a href="mailto:support@froemling.net">let me know</a>. Happy modding!</p> which are the ones most relevant to modding in Ballistica. If you come across something you feel should be included here or could be better explained, please <a href="mailto:support@froemling.net">let me know</a>. Happy modding!</p>
<hr> <hr>

View File

@ -0,0 +1,20 @@
# Copyright (c) 2011-2019 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------

View File

@ -0,0 +1,38 @@
# Copyright (c) 2011-2019 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Testing tests."""
def inc(x: int) -> int:
"""Testing inc."""
return x + 1
def test_answer() -> None:
"""Testing answer."""
import bafoundation
print('testfooooo', dir(bafoundation))
assert inc(3) == 4
def test_answer2() -> None:
"""Testing answer."""
assert inc(3) == 4

View File

@ -62,9 +62,12 @@ CLREND = '\033[0m' # End.
CMD_LOGIN = 'login' CMD_LOGIN = 'login'
CMD_LOGOUT = 'logout' CMD_LOGOUT = 'logout'
CMD_PUTASSET = 'putasset' CMD_PUTASSETPACK = 'putassetpack'
CMD_HELP = 'help' CMD_HELP = 'help'
ASSET_PACKAGE_NAME_VALID_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_'
ASSET_PACKAGE_NAME_MAX_LENGTH = 32
ASSET_PATH_VALID_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_' ASSET_PATH_VALID_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_'
ASSET_PATH_MAX_LENGTH = 128 ASSET_PATH_MAX_LENGTH = 128
@ -104,8 +107,30 @@ class Asset:
self.filepath = os.path.join(package.path, path + exts[assettype]) self.filepath = os.path.join(package.path, path + exts[assettype])
# Note to self: keep this synced with server-side validation func...
def validate_asset_package_name(name: str) -> None:
"""Throw an exception on an invalid asset-package name."""
if len(name) > ASSET_PACKAGE_NAME_MAX_LENGTH:
raise CleanError(f'Asset package name is too long: "{name}"')
if not name:
raise CleanError(f'Asset package name cannot be empty.')
if name[0] == '_' or name[-1] == '_':
raise CleanError(
f'Asset package name cannot start or end with underscore.')
if '__' in name:
raise CleanError(
f'Asset package name cannot contain sequential underscores.')
for char in name:
if char not in ASSET_PACKAGE_NAME_VALID_CHARS:
raise CleanError(
f'Found invalid char "{char}" in asset package name "{name}".')
# Note to self: keep this synced with server-side validation func...
def validate_asset_path(path: str) -> None: def validate_asset_path(path: str) -> None:
"""Throw an exception on an invalid asset path.""" """Throw an exception on an invalid asset path."""
if len(path) > ASSET_PATH_MAX_LENGTH:
raise CleanError(f'Asset path is too long: "{path}"')
names = path.split('/') names = path.split('/')
for name in names: for name in names:
if not name: if not name:
@ -122,6 +147,7 @@ class AssetPackage:
def __init__(self) -> None: def __init__(self) -> None:
self.assets: Dict[str, Asset] = {} self.assets: Dict[str, Asset] = {}
self.path = Path('') self.path = Path('')
self.name = 'untitled'
@classmethod @classmethod
def load_from_disk(cls, path: Path) -> AssetPackage: def load_from_disk(cls, path: Path) -> AssetPackage:
@ -137,13 +163,16 @@ class AssetPackage:
index = yaml.safe_load(infile) index = yaml.safe_load(infile)
if not isinstance(index, dict): if not isinstance(index, dict):
raise CleanError(f'Root dict not found in {indexfilename}') raise CleanError(f'Root dict not found in {indexfilename}')
name = index.get('name')
if not isinstance(name, str):
raise CleanError(f'No "name" str found in {indexfilename}')
validate_asset_package_name(name)
package.name = name
assets = index.get('assets') assets = index.get('assets')
if not isinstance(assets, dict): if not isinstance(assets, dict):
raise CleanError(f'No "assets" dict found in {indexfilename}') raise CleanError(f'No "assets" dict found in {indexfilename}')
for assetpath, assetdata in assets.items(): for assetpath, assetdata in assets.items():
validate_asset_path(assetpath) validate_asset_path(assetpath)
if len(assetpath) > ASSET_PATH_MAX_LENGTH:
raise CleanError(f'Asset path is too long: "{assetpath}"')
if not isinstance(assetdata, dict): if not isinstance(assetdata, dict):
raise CleanError( raise CleanError(
f'Invalid asset data for {assetpath} in {indexfilename}') f'Invalid asset data for {assetpath} in {indexfilename}')
@ -159,12 +188,12 @@ class AssetPackage:
return package return package
def get_manifest(self) -> Dict: def get_manifest(self) -> Dict:
"""Build a manifest of hashes and other info for files on disk.""" """Build a manifest of hashes and other info for the package."""
import hashlib import hashlib
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from multiprocessing import cpu_count from multiprocessing import cpu_count
manifest: Dict = {'files': {}} manifest: Dict = {'name': self.name, 'files': {}}
def _get_asset_info(iasset: Asset) -> Tuple[Asset, Dict]: def _get_asset_info(iasset: Asset) -> Tuple[Asset, Dict]:
sha = hashlib.sha256() sha = hashlib.sha256()
@ -217,8 +246,8 @@ class App:
self.do_login() self.do_login()
elif cmd == CMD_LOGOUT: elif cmd == CMD_LOGOUT:
self.do_logout() self.do_logout()
elif cmd == CMD_PUTASSET: elif cmd == CMD_PUTASSETPACK:
self.do_putasset() self.do_putassetpack()
else: else:
# For all other commands, simply pass them to the server verbatim. # For all other commands, simply pass them to the server verbatim.
self.do_misc_command() self.do_misc_command()
@ -303,8 +332,8 @@ class App:
self._state.login_token = None self._state.login_token = None
print(f'{CLRGRN}Cloudtool is now logged out.{CLREND}') print(f'{CLRGRN}Cloudtool is now logged out.{CLREND}')
def do_putasset(self) -> None: def do_putassetpack(self) -> None:
"""Run a putasset command.""" """Run a putassetpack command."""
if len(sys.argv) != 3: if len(sys.argv) != 3:
raise CleanError('Expected a path to an assetpackage directory.') raise CleanError('Expected a path to an assetpackage directory.')
@ -315,19 +344,19 @@ class App:
# Send the server a manifest of everything we've got locally. # Send the server a manifest of everything we've got locally.
manifest = package.get_manifest() manifest = package.get_manifest()
print('SENDING PACKAGE MANIFEST:', manifest) print('SENDING PACKAGE MANIFEST:', manifest)
response = self._servercmd('putassetmanifest', {'m': manifest}) response = self._servercmd('putassetpackmanifest', {'m': manifest})
# The server should give us an upload id and a set of files it wants. # The server should give us an upload id and a set of files it wants.
# Upload each of those. # Upload each of those.
upload_files: List[str] = response.data['upload_files'] upload_files: List[str] = response.data['upload_files']
assert isinstance(upload_files, list) assert isinstance(upload_files, list)
assert all(isinstance(f, str) for f in upload_files) assert all(isinstance(f, str) for f in upload_files)
self._putasset_upload(package, upload_files) self._putassetpack_upload(package, upload_files)
print('Asset upload successful!') print('Asset upload successful!')
def _putasset_upload(self, package: AssetPackage, def _putassetpack_upload(self, package: AssetPackage,
files: List[str]) -> None: files: List[str]) -> None:
# Upload the files one at a time. # Upload the files one at a time.
# (we can potentially do this in parallel in the future). # (we can potentially do this in parallel in the future).
@ -345,7 +374,7 @@ class App:
check=True) check=True)
with open(gzpath, 'rb') as infile: with open(gzpath, 'rb') as infile:
putfiles: Dict = {'file': infile} putfiles: Dict = {'file': infile}
_response = self._servercmd('putassetupload', _response = self._servercmd('putassetpackupload',
{'path': asset.path}, {'path': asset.path},
files=putfiles) files=putfiles)

View File

@ -32,6 +32,9 @@ if TYPE_CHECKING:
from typing import Dict, Union, Sequence, Optional, Any from typing import Dict, Union, Sequence, Optional, Any
from typing_extensions import Literal from typing_extensions import Literal
# Python binary assumed by these tools.
PYTHON_BIN = 'python3.7'
MIT_LICENSE = """Copyright (c) 2011-2019 Eric Froemling MIT_LICENSE = """Copyright (c) 2011-2019 Eric Froemling
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy

View File

@ -510,8 +510,9 @@ def runmypy(filenames: List[str],
full: bool = False, full: bool = False,
check: bool = True) -> None: check: bool = True) -> None:
"""Run MyPy on provided filenames.""" """Run MyPy on provided filenames."""
from efrotools import PYTHON_BIN
args = [ args = [
'python3.7', '-m', 'mypy', '--pretty', '--no-error-summary', PYTHON_BIN, '-m', 'mypy', '--pretty', '--no-error-summary',
'--config-file', '.mypy.ini' '--config-file', '.mypy.ini'
] + filenames ] + filenames
if full: if full:

View File

@ -418,6 +418,25 @@ def compile_python_files() -> None:
invalidation_mode=mode) invalidation_mode=mode)
def pytest() -> None:
"""Run pytest with project environment set up properly."""
from efrotools import get_config, PYTHON_BIN
# Grab our python paths for the project and stuff them in PYTHONPATH.
pypaths = get_config(PROJROOT).get('python_paths')
if pypaths is None:
raise CleanError('python_paths not found in project config.')
os.environ['PYTHONPATH'] = ':'.join(pypaths)
# Also tell Python interpreters not to write __pycache__ dirs everywhere
# which can screw up our builds.
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
# Do the thing.
subprocess.run([PYTHON_BIN, '-m', 'pytest'] + sys.argv[2:], check=True)
def makefile_target_list() -> None: def makefile_target_list() -> None:
"""Prints targets in a makefile. """Prints targets in a makefile.

View File

@ -45,7 +45,7 @@ from efrotools.snippets import ( # pylint: disable=unused-import
PROJROOT, CleanError, snippets_main, formatcode, formatscripts, PROJROOT, CleanError, snippets_main, formatcode, formatscripts,
formatmakefile, cpplint, pylint, mypy, tool_config_install, sync, sync_all, formatmakefile, cpplint, pylint, mypy, tool_config_install, sync, sync_all,
scriptfiles, pycharm, clioncode, androidstudiocode, makefile_target_list, scriptfiles, pycharm, clioncode, androidstudiocode, makefile_target_list,
spelling, spelling_all, compile_python_files) spelling, spelling_all, compile_python_files, pytest)
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Optional, List, Sequence from typing import Optional, List, Sequence
@ -53,7 +53,7 @@ if TYPE_CHECKING:
# Parts of full-tests suite we only run on particular days. # Parts of full-tests suite we only run on particular days.
# (This runs in listed order so should be randomized by hand to avoid # (This runs in listed order so should be randomized by hand to avoid
# clustering similar tests too much) # clustering similar tests too much)
SPARSE_TESTS: List[List[str]] = [ SPARSE_TEST_BUILDS: List[List[str]] = [
['ios.pylibs.debug', 'android.pylibs.arm'], ['ios.pylibs.debug', 'android.pylibs.arm'],
['linux.package', 'android.pylibs.arm64'], ['linux.package', 'android.pylibs.arm64'],
['windows.package', 'mac.pylibs'], ['windows.package', 'mac.pylibs'],
@ -71,7 +71,7 @@ SPARSE_TESTS: List[List[str]] = [
# Currently only doing sparse-tests in core; not spinoffs. # Currently only doing sparse-tests in core; not spinoffs.
# (whole word will get subbed out in spinoffs so this will be false) # (whole word will get subbed out in spinoffs so this will be false)
DO_SPARSE_TESTS = 'ballistica' + 'core' == 'ballisticacore' DO_SPARSE_TEST_BUILDS = 'ballistica' + 'core' == 'ballisticacore'
# Python modules we require for this project. # Python modules we require for this project.
# (module name, required version, pip package (if it differs from module name)) # (module name, required version, pip package (if it differs from module name))
@ -83,6 +83,7 @@ REQUIRED_PYTHON_MODULES = [
('pytz', None, None), ('pytz', None, None),
('yaml', None, 'PyYAML'), ('yaml', None, 'PyYAML'),
('requests', None, None), ('requests', None, None),
('pytest', None, None),
] ]
@ -152,8 +153,8 @@ def gen_fulltest_buildfile_android() -> None:
' nice -n 15 make android-build') ' nice -n 15 make android-build')
# Now add sparse tests that land on today. # Now add sparse tests that land on today.
if DO_SPARSE_TESTS: if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)] extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('android.')] extras = [e for e in extras if e.startswith('android.')]
for extra in extras: for extra in extras:
if extra == 'android.pylibs.arm': if extra == 'android.pylibs.arm':
@ -212,8 +213,8 @@ def gen_fulltest_buildfile_windows() -> None:
f'WINDOWS_CONFIGURATION={cfg3} make windows-build') f'WINDOWS_CONFIGURATION={cfg3} make windows-build')
# Now add sparse tests that land on today. # Now add sparse tests that land on today.
if DO_SPARSE_TESTS: if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)] extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('windows.')] extras = [e for e in extras if e.startswith('windows.')]
for extra in extras: for extra in extras:
if extra == 'windows.package': if extra == 'windows.package':
@ -245,8 +246,8 @@ def gen_fulltest_buildfile_apple() -> None:
# iOS stuff # iOS stuff
lines.append('nice -n 18 make ios-build') lines.append('nice -n 18 make ios-build')
lines.append('nice -n 18 make ios-new-build') lines.append('nice -n 18 make ios-new-build')
if DO_SPARSE_TESTS: if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)] extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('ios.')] extras = [e for e in extras if e.startswith('ios.')]
for extra in extras: for extra in extras:
if extra == 'ios.pylibs': if extra == 'ios.pylibs':
@ -258,8 +259,8 @@ def gen_fulltest_buildfile_apple() -> None:
# tvOS stuff # tvOS stuff
lines.append('nice -n 18 make tvos-build') lines.append('nice -n 18 make tvos-build')
if DO_SPARSE_TESTS: if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)] extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('tvos.')] extras = [e for e in extras if e.startswith('tvos.')]
for extra in extras: for extra in extras:
if extra == 'tvos.pylibs': if extra == 'tvos.pylibs':
@ -276,8 +277,8 @@ def gen_fulltest_buildfile_apple() -> None:
lines.append('nice -n 18 make mac-new-build') lines.append('nice -n 18 make mac-new-build')
lines.append('nice -n 18 make mac-server-build') lines.append('nice -n 18 make mac-server-build')
lines.append('nice -n 18 make cmake-build') lines.append('nice -n 18 make cmake-build')
if DO_SPARSE_TESTS: if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)] extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('mac.')] extras = [e for e in extras if e.startswith('mac.')]
for extra in extras: for extra in extras:
if extra == 'mac.package': if extra == 'mac.package':
@ -310,8 +311,8 @@ def gen_fulltest_buildfile_linux() -> None:
for target in targets: for target in targets:
lines.append(f'{linflav} make linux-{target}') lines.append(f'{linflav} make linux-{target}')
if DO_SPARSE_TESTS: if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)] extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('linux.')] extras = [e for e in extras if e.startswith('linux.')]
for extra in extras: for extra in extras:
if extra == 'linux.package': if extra == 'linux.package':
@ -716,39 +717,38 @@ def pip_req_list() -> None:
def checkenv() -> None: def checkenv() -> None:
"""Check for tools necessary to build and run the app.""" """Check for tools necessary to build and run the app."""
from efrotools import PYTHON_BIN
print('Checking environment...', flush=True) print('Checking environment...', flush=True)
python_bin = 'python3.7'
# Make sure they've got our target python version. # Make sure they've got our target python version.
if subprocess.run(['which', python_bin], check=False, if subprocess.run(['which', PYTHON_BIN], check=False,
capture_output=True).returncode != 0: capture_output=True).returncode != 0:
raise CleanError(f'{python_bin} is required.') raise CleanError(f'{PYTHON_BIN} is required.')
# Make sure they've got pip for that python version. # Make sure they've got pip for that python version.
if subprocess.run(f"{python_bin} -m pip --version", if subprocess.run(f"{PYTHON_BIN} -m pip --version",
shell=True, shell=True,
check=False, check=False,
capture_output=True).returncode != 0: capture_output=True).returncode != 0:
raise CleanError('pip (for {python_bin}) is required.') raise CleanError('pip (for {PYTHON_BIN}) is required.')
# Check for some required python modules. # Check for some required python modules.
for modname, minver, packagename in REQUIRED_PYTHON_MODULES: for modname, minver, packagename in REQUIRED_PYTHON_MODULES:
if packagename is None: if packagename is None:
packagename = modname packagename = modname
if minver is not None: if minver is not None:
results = subprocess.run(f'{python_bin} -m {modname} --version', results = subprocess.run(f'{PYTHON_BIN} -m {modname} --version',
shell=True, shell=True,
check=False, check=False,
capture_output=True) capture_output=True)
else: else:
results = subprocess.run(f'{python_bin} -c "import {modname}"', results = subprocess.run(f'{PYTHON_BIN} -c "import {modname}"',
shell=True, shell=True,
check=False, check=False,
capture_output=True) capture_output=True)
if results.returncode != 0: if results.returncode != 0:
raise CleanError(f'{packagename} (for {python_bin}) is required.\n' raise CleanError(f'{packagename} (for {PYTHON_BIN}) is required.\n'
f'To install it, try: "{python_bin}' f'To install it, try: "{PYTHON_BIN}'
f' -m pip install {packagename}"') f' -m pip install {packagename}"')
if minver is not None: if minver is not None:
ver_line = results.stdout.decode().splitlines()[0] ver_line = results.stdout.decode().splitlines()[0]