Syncing latest changes between public/private.

This commit is contained in:
Eric Froemling 2020-03-30 20:43:36 -07:00
parent c4dd59616b
commit 0577029294
14 changed files with 3087 additions and 2478 deletions

View File

@ -265,6 +265,7 @@
<w>checkroundover</w>
<w>checksums</w>
<w>childnode</w>
<w>chinesetraditional</w>
<w>chipfork</w>
<w>chosenone</w>
<w>chromebooks</w>
@ -347,6 +348,7 @@
<w>crashlytics</w>
<w>creditslist</w>
<w>cryptmodule</w>
<w>cssclass</w>
<w>cstr</w>
<w>csum</w>
<w>ctest</w>
@ -752,6 +754,7 @@
<w>hacky</w>
<w>halign</w>
<w>handlemessage</w>
<w>hant</w>
<w>hashlines</w>
<w>hashobj</w>
<w>hashopenssl</w>

View File

@ -424,6 +424,7 @@
"ba_data/data/languages/arabic.json",
"ba_data/data/languages/belarussian.json",
"ba_data/data/languages/chinese.json",
"ba_data/data/languages/chinesetraditional.json",
"ba_data/data/languages/croatian.json",
"ba_data/data/languages/czech.json",
"ba_data/data/languages/danish.json",

View File

@ -17719,6 +17719,7 @@ DATA_TARGETS = \
build/ba_data/data/languages/dutch.json \
build/ba_data/data/languages/greek.json \
build/ba_data/data/languages/hindi.json \
build/ba_data/data/languages/chinesetraditional.json \
build/ba_data/data/languages/czech.json \
build/ba_data/data/languages/indonesian.json \
build/ba_data/data/languages/italian.json \

View File

@ -34,7 +34,7 @@ NOTE: This file was autogenerated by gendummymodule; do not edit by hand.
"""
# (hash we can use to see if this file is out of date)
# SOURCES_HASH=173278787862740619468200739407134121759
# SOURCES_HASH=107308741262112812748560676667362932520
# I'm sorry Pylint. I know this file saddens you. Be strong.
# pylint: disable=useless-suppression

View File

@ -66,7 +66,7 @@ class Activity(DependencyComponent):
# Annotating attr types at the class level lets us introspect them.
settings: Dict[str, Any]
teams: List[ba.Team]
players: List[_ba.Player]
players: List[ba.Player]
def __init__(self, settings: Dict[str, Any]):
"""Creates an activity in the current ba.Session.

View File

@ -85,7 +85,8 @@ class App:
"""
# We don't yet support full unicode display on windows or linux :-(.
if (language in ('Chinese', 'Persian', 'Korean', 'Arabic', 'Hindi')
if (language in ('Chinese', 'ChineseTraditional', 'Persian', 'Korean',
'Arabic', 'Hindi')
and self.platform in ('windows', 'linux')):
return False
return True
@ -119,7 +120,13 @@ class App:
'uk': 'Ukrainian',
'hi': 'Hindi'
}
language = languages.get(self.locale[:2], 'English')
# Special case Chinese: specific variations map to traditional.
# (otherwise will map to 'Chinese' which is simplified)
if self.locale in ('zh_HANT', 'zh_TW'):
language = 'ChineseTraditional'
else:
language = languages.get(self.locale[:2], 'English')
if not self.can_display_language(language):
language = 'English'
return language

View File

@ -18,7 +18,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Enums generated by tools/update_python_enums_module in core."""
"""Enums generated by tools/update_python_enums_module in ba-internal."""
from enum import Enum

View File

@ -414,6 +414,12 @@ def get_valid_languages() -> List[str]:
try:
names = os.listdir('ba_data/data/languages')
names = [n.replace('.json', '').capitalize() for n in names]
# FIXME: our simple capitalization fails on multi-word names;
# should handle this in a better way...
for i, name in enumerate(names):
if name == 'Chinesetraditional':
names[i] = 'ChineseTraditional'
except Exception:
from ba import _error
_error.print_exception()

View File

@ -33,7 +33,7 @@ if TYPE_CHECKING:
def _get_map_data(name: str) -> Dict[str, Any]:
import json
print('Would get map data', name)
with open('data/data/maps/' + name + '.json') as infile:
with open('ba_data/data/maps/' + name + '.json') as infile:
mapdata = json.loads(infile.read())
assert isinstance(mapdata, dict)
return mapdata

View File

@ -169,7 +169,7 @@ class CreditsListWindow(ba.OldWindow):
freesound_names = _format_names(names, 90)
try:
with open('data/data/langdata.json') as infile:
with open('ba_data/data/langdata.json') as infile:
translation_contributors = (json.loads(
infile.read())['translation_contributors'])
except Exception:

View File

@ -249,7 +249,7 @@ class AdvancedSettingsWindow(ba.OldWindow):
# so we don't have to go digging through each full language.
try:
import json
with open('data/data/langdata.json') as infile:
with open('ba_data/data/langdata.json') as infile:
lang_names_translated = (json.loads(
infile.read())['lang_names_translated'])
except Exception:

File diff suppressed because it is too large Load Diff

View File

@ -721,6 +721,7 @@ def pycharm(projroot: Path, full: bool, verbose: bool) -> None:
import time
# FIXME: Generalize this to work with at least linux, possibly windows.
cachepath = Path('config/.cache-pycharm')
filenames = get_script_filenames(projroot)
pycharmroot = Path('/Applications/PyCharm CE.app')

View File

@ -703,15 +703,10 @@ def update_docs_md() -> None:
curhash = get_files_hash(pysources)
# Extract the current embedded hash.
# with open(docs_path) as infile:
# lines = infile.readlines()
# hashlines = [l for l in lines if '<!--DOCSHASH=' in l]
# assert len(hashlines) == 1
# storedhash = hashlines[0][13:-4]
with open(docs_hash_path) as infile:
storedhash = infile.read()
if curhash != storedhash:
if curhash != storedhash or not os.path.exists(docs_path):
if check:
raise CleanError('Docs markdown is out of date.')
@ -722,10 +717,8 @@ def update_docs_md() -> None:
# bits at the top.
with open('build/docs.html') as infile:
docs = infile.read()
docs = (
'<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->\n'
# f'<!--DOCSHASH={curhash}-->\n'
) + docs
docs = ('<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->\n'
) + docs
with open(docs_path, 'w') as outfile:
outfile.write(docs)
with open(docs_hash_path, 'w') as outfile: