added testing setup

This commit is contained in:
Eric Froemling 2019-12-13 16:55:31 -08:00
parent 0124e74ab5
commit 57b3552d96
16 changed files with 310 additions and 195 deletions

1
.gitignore vendored
View File

@ -17,6 +17,7 @@ local.properties
.dmypy.json
.cache
.mypy_cache
.pytest_cache
.mypy.ini
.pycheckers
.flycheck-dir-locals.el

View File

@ -60,11 +60,9 @@
<excludePattern pattern=".spinoffdata" />
<excludePattern pattern=".asset_manifest_*.json" />
<excludePattern pattern=".efrocachemap" />
<excludePattern pattern=".pytest_cache" />
</content>
<orderEntry type="jdk" jdkName="Python 3.7" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
</component>
</module>

View File

@ -336,6 +336,7 @@
<w>dataclassutils</w>
<w>datamodule</w>
<w>dataname</w>
<w>dataval</w>
<w>datetimemodule</w>
<w>datetimes</w>
<w>daynum</w>
@ -1250,6 +1251,9 @@
<w>pushlist</w>
<w>putasset</w>
<w>putassetmanifest</w>
<w>putassetpack</w>
<w>putassetpackmanifest</w>
<w>putassetpackupload</w>
<w>putassetupload</w>
<w>putfiles</w>
<w>pval</w>
@ -1291,6 +1295,7 @@
<w>pyoffs</w>
<w>pypaths</w>
<w>pysources</w>
<w>pytest</w>
<w>pythonpath</w>
<w>pythonw</w>
<w>pytree</w>
@ -1575,6 +1580,7 @@
<w>testcapimodule</w>
<w>testclass</w>
<w>testfoo</w>
<w>testfooooo</w>
<w>testhelpers</w>
<w>testimportmultiple</w>
<w>testm</w>

View File

@ -320,6 +320,19 @@ pycharmfull: prereqs
mypyfull pycharm pycharmfull
################################################################################
# #
# Testing #
# #
################################################################################
# Run all tests.
# Note: need to disable bytecode writing so we don't cause errors due to
# unexpected __pycache__ dirs popping up.
test: prereqs
@tools/snippets pytest tests
################################################################################
# #
# Updating / Preflighting #

View File

@ -91,12 +91,17 @@ class BaseField(DataHandler):
# more than a single field entry so this is unused)
self.d_key = d_key
# IMPORTANT: this method should only be overridden in the eyes of the
# type-checker (to specify exact return types). Subclasses should instead
# override get_with_data() for doing the actual work, since that method
# may sometimes be called explicitly instead of through __get__
def __get__(self, obj: Any, type_in: Any = None) -> Any:
if obj is None:
# when called on the type, we return the field
return self
return self.get_with_data(obj.d_data)
# IMPORTANT: same deal as __get__() (see note above)
def __set__(self, obj: Any, value: Any) -> None:
assert obj is not None
self.set_with_data(obj.d_data, value, error=True)

View File

@ -71,7 +71,7 @@ class Field(BaseField, Generic[T]):
# Use default runtime get/set but let type-checker know our types.
# Note: we actually return a bound-field when accessed on
# a type instead of an instance, but we don't reflect that here yet
# (need to write a mypy plugin so sub-field access works first)
# (would need to write a mypy plugin so sub-field access works first)
def __get__(self, obj: Any, cls: Any = None) -> T:
...
@ -125,46 +125,38 @@ class CompoundField(BaseField, Generic[TC]):
def __set__(self: CompoundField[TC], obj: Any, value: TC) -> None:
...
else:
def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundValue(self.d_value, data[self.d_key])
def __get__(self, obj, cls=None):
if obj is None:
# when called on the type, we return the field
return self
# (this is only ever called on entity root fields
# so no need to worry about custom d_key case)
assert self.d_key in obj.d_data
return BoundCompoundValue(self.d_value, obj.d_data[self.d_key])
def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
from bafoundation.entity._value import CompoundValue
def __set__(self, obj, value):
from bafoundation.entity._value import CompoundValue
# Ok here's the deal: our type checking above allows any subtype
# of our CompoundValue in here, but we want to be more picky than
# that. Let's check fields for equality. This way we'll allow
# assigning something like a Carentity to a Car field
# (where the data is the same), but won't allow assigning a Car
# to a Vehicle field (as Car probably adds more fields).
value1: CompoundValue
if isinstance(value, BoundCompoundValue):
value1 = value.d_value
elif isinstance(value, CompoundValue):
value1 = value
else:
raise ValueError(f"Can't assign from object type {type(value)}")
dataval = getattr(value, 'd_data', None)
if dataval is None:
raise ValueError(f"Can't assign from unbound object {value}")
if self.d_value.get_fields() != value1.get_fields():
raise ValueError(f"Can't assign to {self.d_value} from"
f" incompatible type {value.d_value}; "
f"sub-fields do not match.")
# Ok here's the deal: our type checking above allows any subtype
# of our CompoundValue in here, but we want to be more picky than
# that. Let's check fields for equality. This way we'll allow
# assigning something like a Carentity to a Car field
# (where the data is the same), but won't allow assigning a Car
# to a Vehicle field (as Car probably adds more fields).
value1: CompoundValue
if isinstance(value, BoundCompoundValue):
value1 = value.d_value
elif isinstance(value, CompoundValue):
value1 = value
else:
raise ValueError(
f"Can't assign from object type {type(value)}")
data = getattr(value, 'd_data', None)
if data is None:
raise ValueError(f"Can't assign from unbound object {value}")
if self.d_value.get_fields() != value1.get_fields():
raise ValueError(f"Can't assign to {self.d_value} from"
f" incompatible type {value.d_value}; "
f"sub-fields do not match.")
# If we're allowing this to go through, we can simply copy the
# data from the passed in value. The fields match so it should
# be in a valid state already.
obj.d_data[self.d_key] = copy.deepcopy(data)
# If we're allowing this to go through, we can simply copy the
# data from the passed in value. The fields match so it should
# be in a valid state already.
data[self.d_key] = copy.deepcopy(dataval)
class ListField(BaseField, Generic[T]):
@ -199,25 +191,25 @@ class ListField(BaseField, Generic[T]):
# When accessed on a FieldInspector we return a sub-field FieldInspector.
# When accessed on an instance we return a BoundListField.
@overload
def __get__(self, obj: None, cls: Any = None) -> FieldInspector:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundListField[T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
if obj is None:
# When called on the type, we return the field.
return self
return BoundListField(self, obj.d_data[self.d_key])
if TYPE_CHECKING:
@overload
def __get__(self, obj: None, cls: Any = None) -> FieldInspector:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundListField[T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
def __set__(self, obj: Any, value: List[T]) -> None:
...
def get_with_data(self, data: Any) -> Any:
return BoundListField(self, data[self.d_key])
class DictField(BaseField, Generic[TK, T]):
"""A field of values in a dict with a specified index type."""
@ -258,25 +250,25 @@ class DictField(BaseField, Generic[TK, T]):
# change the dict, but we can prune completely if empty (and allowed)
return not data and not self._store_default
@overload
def __get__(self, obj: None, cls: Any = None) -> DictField[TK, T]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundDictField[TK, T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
if obj is None:
# When called on the type, we return the field.
return self
return BoundDictField(self._keytype, self, obj.d_data[self.d_key])
if TYPE_CHECKING:
@overload
def __get__(self, obj: None, cls: Any = None) -> DictField[TK, T]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundDictField[TK, T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
def __set__(self, obj: Any, value: Dict[TK, T]) -> None:
...
def get_with_data(self, data: Any) -> Any:
return BoundDictField(self._keytype, self, data[self.d_key])
class CompoundListField(BaseField, Generic[TC]):
"""A field consisting of repeated instances of a compound-value.
@ -323,49 +315,47 @@ class CompoundListField(BaseField, Generic[TC]):
# We can also optionally prune the whole list if empty and allowed.
return not data and not self._store_default
@overload
def __get__(self, obj: None, cls: Any = None) -> CompoundListField[TC]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundCompoundListField[TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
# On access we simply provide a version of ourself
# bound to our corresponding sub-data.
if obj is None:
# when called on the type, we return the field
return self
assert self.d_key in obj.d_data
return BoundCompoundListField(self, obj.d_data[self.d_key])
# Note:
# When setting the list, we tell the type-checker that we accept
# a raw list of CompoundValue objects, but at runtime we actually
# deal with BoundCompoundValue objects (see note in BoundCompoundListField)
if TYPE_CHECKING:
@overload
def __get__(self, obj: None, cls: Any = None) -> CompoundListField[TC]:
...
@overload
def __get__(self,
obj: Any,
cls: Any = None) -> BoundCompoundListField[TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
# Note:
# When setting the list, we tell the type-checker that we accept
# a raw list of CompoundValue objects, but at runtime we actually
# deal with BoundCompoundValue objects (see note in
# BoundCompoundListField)
def __set__(self, obj: Any, value: List[TC]) -> None:
...
else:
def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundListField(self, data[self.d_key])
def __set__(self, obj, value):
if not isinstance(value, list):
raise TypeError(
'CompoundListField expected list value on set.')
def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
if not isinstance(value, list):
raise TypeError('CompoundListField expected list value on set.')
# Allow assigning only from a sequence of our existing children.
# (could look into expanding this to other children if we can
# be sure the underlying data will line up; for example two
# CompoundListFields with different child_field values should not
# be inter-assignable.
if (not all(isinstance(i, BoundCompoundValue) for i in value)
or not all(i.d_value is self.d_value for i in value)):
raise ValueError('CompoundListField assignment must be a '
'list containing only its existing children.')
obj.d_data[self.d_key] = [i.d_data for i in value]
# Allow assigning only from a sequence of our existing children.
# (could look into expanding this to other children if we can
# be sure the underlying data will line up; for example two
# CompoundListFields with different child_field values should not
# be inter-assignable.
if (not all(isinstance(i, BoundCompoundValue) for i in value)
or not all(i.d_value is self.d_value for i in value)):
raise ValueError('CompoundListField assignment must be a '
'list containing only its existing children.')
data[self.d_key] = [i.d_data for i in value]
class CompoundDictField(BaseField, Generic[TK, TC]):
@ -420,54 +410,45 @@ class CompoundDictField(BaseField, Generic[TK, TC]):
# We can also optionally prune the whole list if empty and allowed.
return not data and not self._store_default
@overload
def __get__(self, obj: None, cls: Any = None) -> CompoundDictField[TK, TC]:
...
@overload
def __get__(self,
obj: Any,
cls: Any = None) -> BoundCompoundDictField[TK, TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
# On access we simply provide a version of ourself
# bound to our corresponding sub-data.
if obj is None:
# when called on the type, we return the field
return self
assert self.d_key in obj.d_data
return BoundCompoundDictField(self, obj.d_data[self.d_key])
# In the type-checker's eyes we take CompoundValues but at runtime
# we actually take BoundCompoundValues (see note in BoundCompoundDictField)
# ONLY overriding these in type-checker land to clarify types.
# (see note in BaseField)
if TYPE_CHECKING:
@overload
def __get__(self,
obj: None,
cls: Any = None) -> CompoundDictField[TK, TC]:
...
@overload
def __get__(self,
obj: Any,
cls: Any = None) -> BoundCompoundDictField[TK, TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
def __set__(self, obj: Any, value: Dict[TK, TC]) -> None:
...
else:
def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundDictField(self, data[self.d_key])
def __set__(self, obj, value):
if not isinstance(value, dict):
raise TypeError(
'CompoundDictField expected dict value on set.')
def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
if not isinstance(value, dict):
raise TypeError('CompoundDictField expected dict value on set.')
# Allow assigning only from a sequence of our existing children.
# (could look into expanding this to other children if we can
# be sure the underlying data will line up; for example two
# CompoundListFields with different child_field values should not
# be inter-assignable.
print('val', value)
if (not all(isinstance(i, self.d_keytype) for i in value.keys())
or not all(
isinstance(i, BoundCompoundValue)
for i in value.values())
or not all(i.d_value is self.d_value
for i in value.values())):
raise ValueError('CompoundDictField assignment must be a '
'dict containing only its existing children.')
obj.d_data[self.d_key] = {
key: val.d_data
for key, val in value.items()
}
# Allow assigning only from a sequence of our existing children.
# (could look into expanding this to other children if we can
# be sure the underlying data will line up; for example two
# CompoundListFields with different child_field values should not
# be inter-assignable.
if (not all(isinstance(i, self.d_keytype) for i in value.keys())
or not all(
isinstance(i, BoundCompoundValue) for i in value.values())
or not all(i.d_value is self.d_value for i in value.values())):
raise ValueError('CompoundDictField assignment must be a '
'dict containing only its existing children.')
data[self.d_key] = {key: val.d_data for key, val in value.items()}

View File

@ -59,10 +59,10 @@ class BoundCompoundValue:
return compound_eq(self, other)
def __getattr__(self, name: str, default: Any = None) -> Any:
# if this attribute corresponds to a field on our compound value's
# If this attribute corresponds to a field on our compound value's
# unbound type, ask it to give us a value using our data
field = getattr(type(object.__getattribute__(self, 'd_value')), name,
None)
d_value = type(object.__getattribute__(self, 'd_value'))
field = getattr(d_value, name, None)
if isinstance(field, BaseField):
return field.get_with_data(self.d_data)
raise AttributeError

View File

@ -37,6 +37,7 @@
"assets/src/data/scripts",
"assets/src/server",
"src/generated_src",
"tools"
"tools",
"tests"
]
}

View File

@ -1,6 +1,6 @@
<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->
<!--DOCSHASH=f60857a13d4c5fd4ba30988f084e00a4-->
<h4><em>last updated on 2019-11-29 for Ballistica version 1.5.0 build 20001</em></h4>
<!--DOCSHASH=b06760caff5d35273e974c2601857348-->
<h4><em>last updated on 2019-12-13 for Ballistica version 1.5.0 build 20001</em></h4>
<p>This page documents the Python classes and functions in the 'ba' module,
which are the ones most relevant to modding in Ballistica. If you come across something you feel should be included here or could be better explained, please <a href="mailto:support@froemling.net">let me know</a>. Happy modding!</p>
<hr>

View File

@ -0,0 +1,20 @@
# Copyright (c) 2011-2019 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------

View File

@ -0,0 +1,38 @@
# Copyright (c) 2011-2019 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Testing tests."""
def inc(x: int) -> int:
"""Testing inc."""
return x + 1
def test_answer() -> None:
"""Testing answer."""
import bafoundation
print('testfooooo', dir(bafoundation))
assert inc(3) == 4
def test_answer2() -> None:
"""Testing answer."""
assert inc(3) == 4

View File

@ -62,9 +62,12 @@ CLREND = '\033[0m' # End.
CMD_LOGIN = 'login'
CMD_LOGOUT = 'logout'
CMD_PUTASSET = 'putasset'
CMD_PUTASSETPACK = 'putassetpack'
CMD_HELP = 'help'
ASSET_PACKAGE_NAME_VALID_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_'
ASSET_PACKAGE_NAME_MAX_LENGTH = 32
ASSET_PATH_VALID_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_'
ASSET_PATH_MAX_LENGTH = 128
@ -104,8 +107,30 @@ class Asset:
self.filepath = os.path.join(package.path, path + exts[assettype])
# Note to self: keep this synced with server-side validation func...
def validate_asset_package_name(name: str) -> None:
"""Throw an exception on an invalid asset-package name."""
if len(name) > ASSET_PACKAGE_NAME_MAX_LENGTH:
raise CleanError(f'Asset package name is too long: "{name}"')
if not name:
raise CleanError(f'Asset package name cannot be empty.')
if name[0] == '_' or name[-1] == '_':
raise CleanError(
f'Asset package name cannot start or end with underscore.')
if '__' in name:
raise CleanError(
f'Asset package name cannot contain sequential underscores.')
for char in name:
if char not in ASSET_PACKAGE_NAME_VALID_CHARS:
raise CleanError(
f'Found invalid char "{char}" in asset package name "{name}".')
# Note to self: keep this synced with server-side validation func...
def validate_asset_path(path: str) -> None:
"""Throw an exception on an invalid asset path."""
if len(path) > ASSET_PATH_MAX_LENGTH:
raise CleanError(f'Asset path is too long: "{path}"')
names = path.split('/')
for name in names:
if not name:
@ -122,6 +147,7 @@ class AssetPackage:
def __init__(self) -> None:
self.assets: Dict[str, Asset] = {}
self.path = Path('')
self.name = 'untitled'
@classmethod
def load_from_disk(cls, path: Path) -> AssetPackage:
@ -137,13 +163,16 @@ class AssetPackage:
index = yaml.safe_load(infile)
if not isinstance(index, dict):
raise CleanError(f'Root dict not found in {indexfilename}')
name = index.get('name')
if not isinstance(name, str):
raise CleanError(f'No "name" str found in {indexfilename}')
validate_asset_package_name(name)
package.name = name
assets = index.get('assets')
if not isinstance(assets, dict):
raise CleanError(f'No "assets" dict found in {indexfilename}')
for assetpath, assetdata in assets.items():
validate_asset_path(assetpath)
if len(assetpath) > ASSET_PATH_MAX_LENGTH:
raise CleanError(f'Asset path is too long: "{assetpath}"')
if not isinstance(assetdata, dict):
raise CleanError(
f'Invalid asset data for {assetpath} in {indexfilename}')
@ -159,12 +188,12 @@ class AssetPackage:
return package
def get_manifest(self) -> Dict:
"""Build a manifest of hashes and other info for files on disk."""
"""Build a manifest of hashes and other info for the package."""
import hashlib
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import cpu_count
manifest: Dict = {'files': {}}
manifest: Dict = {'name': self.name, 'files': {}}
def _get_asset_info(iasset: Asset) -> Tuple[Asset, Dict]:
sha = hashlib.sha256()
@ -217,8 +246,8 @@ class App:
self.do_login()
elif cmd == CMD_LOGOUT:
self.do_logout()
elif cmd == CMD_PUTASSET:
self.do_putasset()
elif cmd == CMD_PUTASSETPACK:
self.do_putassetpack()
else:
# For all other commands, simply pass them to the server verbatim.
self.do_misc_command()
@ -303,8 +332,8 @@ class App:
self._state.login_token = None
print(f'{CLRGRN}Cloudtool is now logged out.{CLREND}')
def do_putasset(self) -> None:
"""Run a putasset command."""
def do_putassetpack(self) -> None:
"""Run a putassetpack command."""
if len(sys.argv) != 3:
raise CleanError('Expected a path to an assetpackage directory.')
@ -315,19 +344,19 @@ class App:
# Send the server a manifest of everything we've got locally.
manifest = package.get_manifest()
print('SENDING PACKAGE MANIFEST:', manifest)
response = self._servercmd('putassetmanifest', {'m': manifest})
response = self._servercmd('putassetpackmanifest', {'m': manifest})
# The server should give us an upload id and a set of files it wants.
# Upload each of those.
upload_files: List[str] = response.data['upload_files']
assert isinstance(upload_files, list)
assert all(isinstance(f, str) for f in upload_files)
self._putasset_upload(package, upload_files)
self._putassetpack_upload(package, upload_files)
print('Asset upload successful!')
def _putasset_upload(self, package: AssetPackage,
files: List[str]) -> None:
def _putassetpack_upload(self, package: AssetPackage,
files: List[str]) -> None:
# Upload the files one at a time.
# (we can potentially do this in parallel in the future).
@ -345,7 +374,7 @@ class App:
check=True)
with open(gzpath, 'rb') as infile:
putfiles: Dict = {'file': infile}
_response = self._servercmd('putassetupload',
_response = self._servercmd('putassetpackupload',
{'path': asset.path},
files=putfiles)

View File

@ -32,6 +32,9 @@ if TYPE_CHECKING:
from typing import Dict, Union, Sequence, Optional, Any
from typing_extensions import Literal
# Python binary assumed by these tools.
PYTHON_BIN = 'python3.7'
MIT_LICENSE = """Copyright (c) 2011-2019 Eric Froemling
Permission is hereby granted, free of charge, to any person obtaining a copy

View File

@ -510,8 +510,9 @@ def runmypy(filenames: List[str],
full: bool = False,
check: bool = True) -> None:
"""Run MyPy on provided filenames."""
from efrotools import PYTHON_BIN
args = [
'python3.7', '-m', 'mypy', '--pretty', '--no-error-summary',
PYTHON_BIN, '-m', 'mypy', '--pretty', '--no-error-summary',
'--config-file', '.mypy.ini'
] + filenames
if full:

View File

@ -418,6 +418,25 @@ def compile_python_files() -> None:
invalidation_mode=mode)
def pytest() -> None:
"""Run pytest with project environment set up properly."""
from efrotools import get_config, PYTHON_BIN
# Grab our python paths for the project and stuff them in PYTHONPATH.
pypaths = get_config(PROJROOT).get('python_paths')
if pypaths is None:
raise CleanError('python_paths not found in project config.')
os.environ['PYTHONPATH'] = ':'.join(pypaths)
# Also tell Python interpreters not to write __pycache__ dirs everywhere
# which can screw up our builds.
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
# Do the thing.
subprocess.run([PYTHON_BIN, '-m', 'pytest'] + sys.argv[2:], check=True)
def makefile_target_list() -> None:
"""Prints targets in a makefile.

View File

@ -45,7 +45,7 @@ from efrotools.snippets import ( # pylint: disable=unused-import
PROJROOT, CleanError, snippets_main, formatcode, formatscripts,
formatmakefile, cpplint, pylint, mypy, tool_config_install, sync, sync_all,
scriptfiles, pycharm, clioncode, androidstudiocode, makefile_target_list,
spelling, spelling_all, compile_python_files)
spelling, spelling_all, compile_python_files, pytest)
if TYPE_CHECKING:
from typing import Optional, List, Sequence
@ -53,7 +53,7 @@ if TYPE_CHECKING:
# Parts of full-tests suite we only run on particular days.
# (This runs in listed order so should be randomized by hand to avoid
# clustering similar tests too much)
SPARSE_TESTS: List[List[str]] = [
SPARSE_TEST_BUILDS: List[List[str]] = [
['ios.pylibs.debug', 'android.pylibs.arm'],
['linux.package', 'android.pylibs.arm64'],
['windows.package', 'mac.pylibs'],
@ -71,7 +71,7 @@ SPARSE_TESTS: List[List[str]] = [
# Currently only doing sparse-tests in core; not spinoffs.
# (whole word will get subbed out in spinoffs so this will be false)
DO_SPARSE_TESTS = 'ballistica' + 'core' == 'ballisticacore'
DO_SPARSE_TEST_BUILDS = 'ballistica' + 'core' == 'ballisticacore'
# Python modules we require for this project.
# (module name, required version, pip package (if it differs from module name))
@ -83,6 +83,7 @@ REQUIRED_PYTHON_MODULES = [
('pytz', None, None),
('yaml', None, 'PyYAML'),
('requests', None, None),
('pytest', None, None),
]
@ -152,8 +153,8 @@ def gen_fulltest_buildfile_android() -> None:
' nice -n 15 make android-build')
# Now add sparse tests that land on today.
if DO_SPARSE_TESTS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)]
if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('android.')]
for extra in extras:
if extra == 'android.pylibs.arm':
@ -212,8 +213,8 @@ def gen_fulltest_buildfile_windows() -> None:
f'WINDOWS_CONFIGURATION={cfg3} make windows-build')
# Now add sparse tests that land on today.
if DO_SPARSE_TESTS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)]
if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('windows.')]
for extra in extras:
if extra == 'windows.package':
@ -245,8 +246,8 @@ def gen_fulltest_buildfile_apple() -> None:
# iOS stuff
lines.append('nice -n 18 make ios-build')
lines.append('nice -n 18 make ios-new-build')
if DO_SPARSE_TESTS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)]
if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('ios.')]
for extra in extras:
if extra == 'ios.pylibs':
@ -258,8 +259,8 @@ def gen_fulltest_buildfile_apple() -> None:
# tvOS stuff
lines.append('nice -n 18 make tvos-build')
if DO_SPARSE_TESTS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)]
if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('tvos.')]
for extra in extras:
if extra == 'tvos.pylibs':
@ -276,8 +277,8 @@ def gen_fulltest_buildfile_apple() -> None:
lines.append('nice -n 18 make mac-new-build')
lines.append('nice -n 18 make mac-server-build')
lines.append('nice -n 18 make cmake-build')
if DO_SPARSE_TESTS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)]
if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('mac.')]
for extra in extras:
if extra == 'mac.package':
@ -310,8 +311,8 @@ def gen_fulltest_buildfile_linux() -> None:
for target in targets:
lines.append(f'{linflav} make linux-{target}')
if DO_SPARSE_TESTS:
extras = SPARSE_TESTS[dayoffset % len(SPARSE_TESTS)]
if DO_SPARSE_TEST_BUILDS:
extras = SPARSE_TEST_BUILDS[dayoffset % len(SPARSE_TEST_BUILDS)]
extras = [e for e in extras if e.startswith('linux.')]
for extra in extras:
if extra == 'linux.package':
@ -716,39 +717,38 @@ def pip_req_list() -> None:
def checkenv() -> None:
"""Check for tools necessary to build and run the app."""
from efrotools import PYTHON_BIN
print('Checking environment...', flush=True)
python_bin = 'python3.7'
# Make sure they've got our target python version.
if subprocess.run(['which', python_bin], check=False,
if subprocess.run(['which', PYTHON_BIN], check=False,
capture_output=True).returncode != 0:
raise CleanError(f'{python_bin} is required.')
raise CleanError(f'{PYTHON_BIN} is required.')
# Make sure they've got pip for that python version.
if subprocess.run(f"{python_bin} -m pip --version",
if subprocess.run(f"{PYTHON_BIN} -m pip --version",
shell=True,
check=False,
capture_output=True).returncode != 0:
raise CleanError('pip (for {python_bin}) is required.')
raise CleanError('pip (for {PYTHON_BIN}) is required.')
# Check for some required python modules.
for modname, minver, packagename in REQUIRED_PYTHON_MODULES:
if packagename is None:
packagename = modname
if minver is not None:
results = subprocess.run(f'{python_bin} -m {modname} --version',
results = subprocess.run(f'{PYTHON_BIN} -m {modname} --version',
shell=True,
check=False,
capture_output=True)
else:
results = subprocess.run(f'{python_bin} -c "import {modname}"',
results = subprocess.run(f'{PYTHON_BIN} -c "import {modname}"',
shell=True,
check=False,
capture_output=True)
if results.returncode != 0:
raise CleanError(f'{packagename} (for {python_bin}) is required.\n'
f'To install it, try: "{python_bin}'
raise CleanError(f'{packagename} (for {PYTHON_BIN}) is required.\n'
f'To install it, try: "{PYTHON_BIN}'
f' -m pip install {packagename}"')
if minver is not None:
ver_line = results.stdout.decode().splitlines()[0]