mirror of
https://github.com/RYDE-WORK/ballistica.git
synced 2026-02-08 16:53:49 +08:00
Syncing latest changes between public/private.
This commit is contained in:
parent
c4dd59616b
commit
0577029294
3
.idea/dictionaries/ericf.xml
generated
3
.idea/dictionaries/ericf.xml
generated
@ -265,6 +265,7 @@
|
|||||||
<w>checkroundover</w>
|
<w>checkroundover</w>
|
||||||
<w>checksums</w>
|
<w>checksums</w>
|
||||||
<w>childnode</w>
|
<w>childnode</w>
|
||||||
|
<w>chinesetraditional</w>
|
||||||
<w>chipfork</w>
|
<w>chipfork</w>
|
||||||
<w>chosenone</w>
|
<w>chosenone</w>
|
||||||
<w>chromebooks</w>
|
<w>chromebooks</w>
|
||||||
@ -347,6 +348,7 @@
|
|||||||
<w>crashlytics</w>
|
<w>crashlytics</w>
|
||||||
<w>creditslist</w>
|
<w>creditslist</w>
|
||||||
<w>cryptmodule</w>
|
<w>cryptmodule</w>
|
||||||
|
<w>cssclass</w>
|
||||||
<w>cstr</w>
|
<w>cstr</w>
|
||||||
<w>csum</w>
|
<w>csum</w>
|
||||||
<w>ctest</w>
|
<w>ctest</w>
|
||||||
@ -752,6 +754,7 @@
|
|||||||
<w>hacky</w>
|
<w>hacky</w>
|
||||||
<w>halign</w>
|
<w>halign</w>
|
||||||
<w>handlemessage</w>
|
<w>handlemessage</w>
|
||||||
|
<w>hant</w>
|
||||||
<w>hashlines</w>
|
<w>hashlines</w>
|
||||||
<w>hashobj</w>
|
<w>hashobj</w>
|
||||||
<w>hashopenssl</w>
|
<w>hashopenssl</w>
|
||||||
|
|||||||
@ -424,6 +424,7 @@
|
|||||||
"ba_data/data/languages/arabic.json",
|
"ba_data/data/languages/arabic.json",
|
||||||
"ba_data/data/languages/belarussian.json",
|
"ba_data/data/languages/belarussian.json",
|
||||||
"ba_data/data/languages/chinese.json",
|
"ba_data/data/languages/chinese.json",
|
||||||
|
"ba_data/data/languages/chinesetraditional.json",
|
||||||
"ba_data/data/languages/croatian.json",
|
"ba_data/data/languages/croatian.json",
|
||||||
"ba_data/data/languages/czech.json",
|
"ba_data/data/languages/czech.json",
|
||||||
"ba_data/data/languages/danish.json",
|
"ba_data/data/languages/danish.json",
|
||||||
|
|||||||
@ -17719,6 +17719,7 @@ DATA_TARGETS = \
|
|||||||
build/ba_data/data/languages/dutch.json \
|
build/ba_data/data/languages/dutch.json \
|
||||||
build/ba_data/data/languages/greek.json \
|
build/ba_data/data/languages/greek.json \
|
||||||
build/ba_data/data/languages/hindi.json \
|
build/ba_data/data/languages/hindi.json \
|
||||||
|
build/ba_data/data/languages/chinesetraditional.json \
|
||||||
build/ba_data/data/languages/czech.json \
|
build/ba_data/data/languages/czech.json \
|
||||||
build/ba_data/data/languages/indonesian.json \
|
build/ba_data/data/languages/indonesian.json \
|
||||||
build/ba_data/data/languages/italian.json \
|
build/ba_data/data/languages/italian.json \
|
||||||
|
|||||||
@ -34,7 +34,7 @@ NOTE: This file was autogenerated by gendummymodule; do not edit by hand.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# (hash we can use to see if this file is out of date)
|
# (hash we can use to see if this file is out of date)
|
||||||
# SOURCES_HASH=173278787862740619468200739407134121759
|
# SOURCES_HASH=107308741262112812748560676667362932520
|
||||||
|
|
||||||
# I'm sorry Pylint. I know this file saddens you. Be strong.
|
# I'm sorry Pylint. I know this file saddens you. Be strong.
|
||||||
# pylint: disable=useless-suppression
|
# pylint: disable=useless-suppression
|
||||||
|
|||||||
@ -66,7 +66,7 @@ class Activity(DependencyComponent):
|
|||||||
# Annotating attr types at the class level lets us introspect them.
|
# Annotating attr types at the class level lets us introspect them.
|
||||||
settings: Dict[str, Any]
|
settings: Dict[str, Any]
|
||||||
teams: List[ba.Team]
|
teams: List[ba.Team]
|
||||||
players: List[_ba.Player]
|
players: List[ba.Player]
|
||||||
|
|
||||||
def __init__(self, settings: Dict[str, Any]):
|
def __init__(self, settings: Dict[str, Any]):
|
||||||
"""Creates an activity in the current ba.Session.
|
"""Creates an activity in the current ba.Session.
|
||||||
|
|||||||
@ -85,7 +85,8 @@ class App:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# We don't yet support full unicode display on windows or linux :-(.
|
# We don't yet support full unicode display on windows or linux :-(.
|
||||||
if (language in ('Chinese', 'Persian', 'Korean', 'Arabic', 'Hindi')
|
if (language in ('Chinese', 'ChineseTraditional', 'Persian', 'Korean',
|
||||||
|
'Arabic', 'Hindi')
|
||||||
and self.platform in ('windows', 'linux')):
|
and self.platform in ('windows', 'linux')):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
@ -119,6 +120,12 @@ class App:
|
|||||||
'uk': 'Ukrainian',
|
'uk': 'Ukrainian',
|
||||||
'hi': 'Hindi'
|
'hi': 'Hindi'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Special case Chinese: specific variations map to traditional.
|
||||||
|
# (otherwise will map to 'Chinese' which is simplified)
|
||||||
|
if self.locale in ('zh_HANT', 'zh_TW'):
|
||||||
|
language = 'ChineseTraditional'
|
||||||
|
else:
|
||||||
language = languages.get(self.locale[:2], 'English')
|
language = languages.get(self.locale[:2], 'English')
|
||||||
if not self.can_display_language(language):
|
if not self.can_display_language(language):
|
||||||
language = 'English'
|
language = 'English'
|
||||||
|
|||||||
@ -18,7 +18,7 @@
|
|||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
# SOFTWARE.
|
# SOFTWARE.
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
"""Enums generated by tools/update_python_enums_module in core."""
|
"""Enums generated by tools/update_python_enums_module in ba-internal."""
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|||||||
@ -414,6 +414,12 @@ def get_valid_languages() -> List[str]:
|
|||||||
try:
|
try:
|
||||||
names = os.listdir('ba_data/data/languages')
|
names = os.listdir('ba_data/data/languages')
|
||||||
names = [n.replace('.json', '').capitalize() for n in names]
|
names = [n.replace('.json', '').capitalize() for n in names]
|
||||||
|
|
||||||
|
# FIXME: our simple capitalization fails on multi-word names;
|
||||||
|
# should handle this in a better way...
|
||||||
|
for i, name in enumerate(names):
|
||||||
|
if name == 'Chinesetraditional':
|
||||||
|
names[i] = 'ChineseTraditional'
|
||||||
except Exception:
|
except Exception:
|
||||||
from ba import _error
|
from ba import _error
|
||||||
_error.print_exception()
|
_error.print_exception()
|
||||||
|
|||||||
@ -33,7 +33,7 @@ if TYPE_CHECKING:
|
|||||||
def _get_map_data(name: str) -> Dict[str, Any]:
|
def _get_map_data(name: str) -> Dict[str, Any]:
|
||||||
import json
|
import json
|
||||||
print('Would get map data', name)
|
print('Would get map data', name)
|
||||||
with open('data/data/maps/' + name + '.json') as infile:
|
with open('ba_data/data/maps/' + name + '.json') as infile:
|
||||||
mapdata = json.loads(infile.read())
|
mapdata = json.loads(infile.read())
|
||||||
assert isinstance(mapdata, dict)
|
assert isinstance(mapdata, dict)
|
||||||
return mapdata
|
return mapdata
|
||||||
|
|||||||
@ -169,7 +169,7 @@ class CreditsListWindow(ba.OldWindow):
|
|||||||
freesound_names = _format_names(names, 90)
|
freesound_names = _format_names(names, 90)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open('data/data/langdata.json') as infile:
|
with open('ba_data/data/langdata.json') as infile:
|
||||||
translation_contributors = (json.loads(
|
translation_contributors = (json.loads(
|
||||||
infile.read())['translation_contributors'])
|
infile.read())['translation_contributors'])
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
@ -249,7 +249,7 @@ class AdvancedSettingsWindow(ba.OldWindow):
|
|||||||
# so we don't have to go digging through each full language.
|
# so we don't have to go digging through each full language.
|
||||||
try:
|
try:
|
||||||
import json
|
import json
|
||||||
with open('data/data/langdata.json') as infile:
|
with open('ba_data/data/langdata.json') as infile:
|
||||||
lang_names_translated = (json.loads(
|
lang_names_translated = (json.loads(
|
||||||
infile.read())['lang_names_translated'])
|
infile.read())['lang_names_translated'])
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
5517
docs/ba_module.md
5517
docs/ba_module.md
File diff suppressed because it is too large
Load Diff
@ -721,6 +721,7 @@ def pycharm(projroot: Path, full: bool, verbose: bool) -> None:
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
# FIXME: Generalize this to work with at least linux, possibly windows.
|
||||||
cachepath = Path('config/.cache-pycharm')
|
cachepath = Path('config/.cache-pycharm')
|
||||||
filenames = get_script_filenames(projroot)
|
filenames = get_script_filenames(projroot)
|
||||||
pycharmroot = Path('/Applications/PyCharm CE.app')
|
pycharmroot = Path('/Applications/PyCharm CE.app')
|
||||||
|
|||||||
@ -703,15 +703,10 @@ def update_docs_md() -> None:
|
|||||||
curhash = get_files_hash(pysources)
|
curhash = get_files_hash(pysources)
|
||||||
|
|
||||||
# Extract the current embedded hash.
|
# Extract the current embedded hash.
|
||||||
# with open(docs_path) as infile:
|
|
||||||
# lines = infile.readlines()
|
|
||||||
# hashlines = [l for l in lines if '<!--DOCSHASH=' in l]
|
|
||||||
# assert len(hashlines) == 1
|
|
||||||
# storedhash = hashlines[0][13:-4]
|
|
||||||
with open(docs_hash_path) as infile:
|
with open(docs_hash_path) as infile:
|
||||||
storedhash = infile.read()
|
storedhash = infile.read()
|
||||||
|
|
||||||
if curhash != storedhash:
|
if curhash != storedhash or not os.path.exists(docs_path):
|
||||||
if check:
|
if check:
|
||||||
raise CleanError('Docs markdown is out of date.')
|
raise CleanError('Docs markdown is out of date.')
|
||||||
|
|
||||||
@ -722,9 +717,7 @@ def update_docs_md() -> None:
|
|||||||
# bits at the top.
|
# bits at the top.
|
||||||
with open('build/docs.html') as infile:
|
with open('build/docs.html') as infile:
|
||||||
docs = infile.read()
|
docs = infile.read()
|
||||||
docs = (
|
docs = ('<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->\n'
|
||||||
'<!-- THIS FILE IS AUTO GENERATED; DO NOT EDIT BY HAND -->\n'
|
|
||||||
# f'<!--DOCSHASH={curhash}-->\n'
|
|
||||||
) + docs
|
) + docs
|
||||||
with open(docs_path, 'w') as outfile:
|
with open(docs_path, 'w') as outfile:
|
||||||
outfile.write(docs)
|
outfile.write(docs)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user