removed efro.entity now that efro.dataclassio has completely replaced it

This commit is contained in:
Eric Froemling 2021-10-01 17:36:52 -05:00
parent 36af7d7154
commit 7ecb16bd7c
No known key found for this signature in database
GPG Key ID: 89C93F0F8D6D5A98
16 changed files with 84 additions and 2683 deletions

View File

@ -3937,50 +3937,50 @@
"assets/src/ba_data/python/ba/_generated/__init__.py": "https://files.ballistica.net/cache/ba1/ee/e8/cad05aa531c7faf7ff7b96db7f6e",
"assets/src/ba_data/python/ba/_generated/enums.py": "https://files.ballistica.net/cache/ba1/72/82/86956fae909ac2fe2a1abd84a361",
"ballisticacore-windows/Generic/BallisticaCore.ico": "https://files.ballistica.net/cache/ba1/89/c0/e32c7d2a35dc9aef57cc73b0911a",
"build/prefab/full/linux_arm64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/11/56/91edee26ca9624941cfa5b800711",
"build/prefab/full/linux_arm64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/99/5f/402d8ccede14cf3662b749196a35",
"build/prefab/full/linux_arm64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/74/1d/fc9e33e565475daaac80da5252f0",
"build/prefab/full/linux_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/61/be/99fd74e9a331ce2c3ad926ad77fc",
"build/prefab/full/linux_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/78/e2/bce7116eebed3691a6ff58d4bc54",
"build/prefab/full/linux_x86_64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/04/71/9325a193cfbe7da47a7c7f94ebe1",
"build/prefab/full/linux_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/6b/8b/ebc856f3100c32c790b06facf231",
"build/prefab/full/linux_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/f2/0a/e33dba2bd26d802839993bc9e2d4",
"build/prefab/full/linux_x86_64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/cf/e3/042b1f2d634aa6d61e361eeba41f",
"build/prefab/full/linux_x86_64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/9c/7b/ac1a200be0f37078af0991faca3b",
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/3a/b9/0e46794d391cc9e9f3fac544ed49",
"build/prefab/full/linux_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/6d/a2/5da76344c31c4c8e943a56f8e5f8",
"build/prefab/full/mac_arm64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/53/e4/d1ac8975b8e0a541128f7a6c66ae",
"build/prefab/full/mac_arm64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/02/e2/65829a6d47ad9bfa5d41d08e1c97",
"build/prefab/full/mac_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/b2/0d/efb0ad546c078a38f8081b52e9c7",
"build/prefab/full/mac_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/48/b2/f84775fd3a3062b0b57c522d8089",
"build/prefab/full/mac_x86_64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/46/cc/0b0194a60c85d83312b5a56ec9d9",
"build/prefab/full/mac_x86_64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/b2/8b/5cf5de24316b6f58956f722db868",
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/10/b6/c6ee48d8f08c5ed96f9ca178379b",
"build/prefab/full/mac_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/d2/e6/78abc63f12b3c39100f79633cb22",
"build/prefab/full/windows_x86_gui/debug/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/16/ac/87ba32912d227e31b0d8532109b2",
"build/prefab/full/windows_x86_gui/release/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/56/45/c4e65995536cec438e0270b9b911",
"build/prefab/full/windows_x86_server/debug/dist/BallisticaCoreHeadless.exe": "https://files.ballistica.net/cache/ba1/a7/f1/ec2b457b078b1b96dc056aae30c7",
"build/prefab/full/windows_x86_server/release/dist/BallisticaCoreHeadless.exe": "https://files.ballistica.net/cache/ba1/2a/5a/4eec69d53dfb3ef6c18c03d2f473",
"build/prefab/lib/linux_arm64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/0e/ec/7864925a34d1ac2424c57cd86953",
"build/prefab/lib/linux_arm64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/80/90/bcdcade80c14a94051b50eb4d5e0",
"build/prefab/lib/linux_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/b0/63/c8c7122ded855fcb1bb9260eae98",
"build/prefab/lib/linux_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/2e/eb/504d1a86aafa6630c4f1ce7edb63",
"build/prefab/lib/linux_x86_64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/dc/50/c8f5633877ca74490ec86dff2cd2",
"build/prefab/lib/linux_x86_64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/88/2b/23a0f1e592614342ef89bd4fc24a",
"build/prefab/lib/linux_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/db/6d/657a49d655a1eec135a94d560cd3",
"build/prefab/lib/linux_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/ca/37/f957a2054f8370deb7bfe144db50",
"build/prefab/lib/mac_arm64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/6e/b9/3fb76b4a2b665b9c6fe3248f9585",
"build/prefab/lib/mac_arm64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/21/97/32778aa1e872e65b9c9a20227386",
"build/prefab/lib/mac_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/f8/e7/4a83daa2239ac5bf48bd4bdc98d1",
"build/prefab/lib/mac_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/c0/43/aaea8b918b6d438de7236a3e1dfd",
"build/prefab/lib/mac_x86_64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/04/65/e8b88eca7a996d736b98242b4c90",
"build/prefab/lib/mac_x86_64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/6f/a8/7384b203ae56c2504057748c7e86",
"build/prefab/lib/mac_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/11/56/f3aeecacfa0acfbbb4032f98e75c",
"build/prefab/lib/mac_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/7b/1a/6dc287f1258a427068467e8d5238",
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreGenericInternal.lib": "https://files.ballistica.net/cache/ba1/07/60/5242b82fbcc8ffb69e93802b9b27",
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreGenericInternal.pdb": "https://files.ballistica.net/cache/ba1/07/78/105d0bc438f8fb23f7cc02ba0b4c",
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreHeadlessInternal.lib": "https://files.ballistica.net/cache/ba1/c5/3f/ee71637551395c36f7d14bccb7c4",
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreHeadlessInternal.pdb": "https://files.ballistica.net/cache/ba1/54/5a/ed52f47776a8be311cc532d986ea",
"build/prefab/lib/windows/Release_Win32/BallisticaCoreGenericInternal.lib": "https://files.ballistica.net/cache/ba1/90/d2/0944e4e3fe19a80fe006248ded8b",
"build/prefab/lib/windows/Release_Win32/BallisticaCoreGenericInternal.pdb": "https://files.ballistica.net/cache/ba1/70/c8/a3a65b4e3027d4cd9710f9dcdd7a",
"build/prefab/lib/windows/Release_Win32/BallisticaCoreHeadlessInternal.lib": "https://files.ballistica.net/cache/ba1/0b/47/bfe080496d98dac040b3552edb2d",
"build/prefab/lib/windows/Release_Win32/BallisticaCoreHeadlessInternal.pdb": "https://files.ballistica.net/cache/ba1/fe/37/c82d017d3b9442bf9148fffd0c5b",
"build/prefab/full/linux_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/b6/a7/76a39b05d1db2d49dc3335679573",
"build/prefab/full/linux_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/79/56/a1576a8bee94a47cdd633aa86a3b",
"build/prefab/full/mac_arm64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/2b/8d/4b1084de33fe4ef3a2d0313d6330",
"build/prefab/full/mac_arm64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/a4/27/cb310b4e2eeb51fb9114f6d402ea",
"build/prefab/full/mac_arm64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/8a/97/ad31231721be8515b37bb3cf6fc0",
"build/prefab/full/mac_arm64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/9e/1e/81ad96ec96e0a3c90fb69d24b38e",
"build/prefab/full/mac_x86_64_gui/debug/ballisticacore": "https://files.ballistica.net/cache/ba1/f6/99/b27c665fcf9f8548abf3f162a736",
"build/prefab/full/mac_x86_64_gui/release/ballisticacore": "https://files.ballistica.net/cache/ba1/f6/e2/a712749d266aa8e6c6dade74f08d",
"build/prefab/full/mac_x86_64_server/debug/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/c2/70/3de0ba133d288111ae3fc386fbf5",
"build/prefab/full/mac_x86_64_server/release/dist/ballisticacore_headless": "https://files.ballistica.net/cache/ba1/0a/b8/9e6a5d93f61e30f817dc76fbe2e5",
"build/prefab/full/windows_x86_gui/debug/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/fa/2a/6b1a7111628941ff45a4bb6d941d",
"build/prefab/full/windows_x86_gui/release/BallisticaCore.exe": "https://files.ballistica.net/cache/ba1/6c/3d/6d0a3cbe3ac46e43eb19ed30dcc6",
"build/prefab/full/windows_x86_server/debug/dist/BallisticaCoreHeadless.exe": "https://files.ballistica.net/cache/ba1/59/2c/fd6ddf8cceee5900d68cd3da0435",
"build/prefab/full/windows_x86_server/release/dist/BallisticaCoreHeadless.exe": "https://files.ballistica.net/cache/ba1/a1/08/219321b9f73ebcd7e207587a7c14",
"build/prefab/lib/linux_arm64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/1e/ef/7a8262418694d65b440984bd7a6e",
"build/prefab/lib/linux_arm64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/70/93/b15380a02bb72a52a2b9f655f482",
"build/prefab/lib/linux_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/80/b2/9bb276e33ecbcc44d6a36d7cfcf5",
"build/prefab/lib/linux_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/44/56/44ffa9866e2df7cf3545e2b93ee8",
"build/prefab/lib/linux_x86_64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/5f/b0/4cd12e393365fb35409aa9296d3c",
"build/prefab/lib/linux_x86_64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/3f/64/949588e4db6f5958bcd8f92e1da8",
"build/prefab/lib/linux_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/04/e6/2d4a50696f8e8c63d88fe099e5bf",
"build/prefab/lib/linux_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/62/d9/475a8f5f0300a33ce98b2bff5ca1",
"build/prefab/lib/mac_arm64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/f0/93/4846540264e3d4bd459f1c2cc5d1",
"build/prefab/lib/mac_arm64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/53/e9/7590d921630f3bf7e2b4033399d7",
"build/prefab/lib/mac_arm64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/d7/96/8bd45dfb6ca6ddcd6ea72452df1d",
"build/prefab/lib/mac_arm64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/8b/50/112bedfb0231ddd07f99af277600",
"build/prefab/lib/mac_x86_64_gui/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/51/d8/2fe4ba3f4cbc99e3ae34589bd80d",
"build/prefab/lib/mac_x86_64_gui/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/fb/af/0523542964b3308114ff6fc40359",
"build/prefab/lib/mac_x86_64_server/debug/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/50/a7/6993a6a8e83d3d21f95209c93c9b",
"build/prefab/lib/mac_x86_64_server/release/libballisticacore_internal.a": "https://files.ballistica.net/cache/ba1/cf/e7/54d0c8f1460901fa217ffaf51663",
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreGenericInternal.lib": "https://files.ballistica.net/cache/ba1/6a/8e/37e1468bbdc68e6fae9336f18eb7",
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreGenericInternal.pdb": "https://files.ballistica.net/cache/ba1/98/b7/4a2cda209924e0a7015d64812a34",
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreHeadlessInternal.lib": "https://files.ballistica.net/cache/ba1/d0/2d/1c59860ba3367e21b05340cd29c2",
"build/prefab/lib/windows/Debug_Win32/BallisticaCoreHeadlessInternal.pdb": "https://files.ballistica.net/cache/ba1/46/79/7113c0a9f1507bf83cf3c09d9e22",
"build/prefab/lib/windows/Release_Win32/BallisticaCoreGenericInternal.lib": "https://files.ballistica.net/cache/ba1/ba/60/50b6aa57af123014ed571e554164",
"build/prefab/lib/windows/Release_Win32/BallisticaCoreGenericInternal.pdb": "https://files.ballistica.net/cache/ba1/9a/e9/5c01cec33f278b0f12c34cab37da",
"build/prefab/lib/windows/Release_Win32/BallisticaCoreHeadlessInternal.lib": "https://files.ballistica.net/cache/ba1/70/7b/3a749f597894ce7a78c062aa9179",
"build/prefab/lib/windows/Release_Win32/BallisticaCoreHeadlessInternal.pdb": "https://files.ballistica.net/cache/ba1/63/5c/770f212a9653106111cfa50416ba",
"src/ballistica/generated/python_embedded/binding.inc": "https://files.ballistica.net/cache/ba1/6c/63/2b8cbb65a0e1daa45d59e9f87e9a",
"src/ballistica/generated/python_embedded/bootstrap.inc": "https://files.ballistica.net/cache/ba1/c0/2f/a8b50faad086fba4fe936e863c20"
}

View File

@ -700,11 +700,6 @@ test-assetmanager:
@tools/pcommand pytest -o log_cli=true -o log_cli_level=debug -s -vv \
tests/test_ba/test_assetmanager.py::test_assetmanager
# Individual test with extra output enabled.
test-dataclassio:
@tools/pcommand pytest -o log_cli=true -o log_cli_level=debug -s -vv \
tests/test_efro/test_dataclassio.py
# Individual test with extra output enabled.
test-message:
@tools/pcommand pytest -o log_cli=true -o log_cli_level=debug -s -vv \

View File

@ -517,20 +517,6 @@
"ba_data/python/efro/dataclassio/_pathcapture.py",
"ba_data/python/efro/dataclassio/_prep.py",
"ba_data/python/efro/dataclassio/extras.py",
"ba_data/python/efro/entity/__init__.py",
"ba_data/python/efro/entity/__pycache__/__init__.cpython-38.opt-1.pyc",
"ba_data/python/efro/entity/__pycache__/_base.cpython-38.opt-1.pyc",
"ba_data/python/efro/entity/__pycache__/_entity.cpython-38.opt-1.pyc",
"ba_data/python/efro/entity/__pycache__/_field.cpython-38.opt-1.pyc",
"ba_data/python/efro/entity/__pycache__/_support.cpython-38.opt-1.pyc",
"ba_data/python/efro/entity/__pycache__/_value.cpython-38.opt-1.pyc",
"ba_data/python/efro/entity/__pycache__/util.cpython-38.opt-1.pyc",
"ba_data/python/efro/entity/_base.py",
"ba_data/python/efro/entity/_entity.py",
"ba_data/python/efro/entity/_field.py",
"ba_data/python/efro/entity/_support.py",
"ba_data/python/efro/entity/_value.py",
"ba_data/python/efro/entity/util.py",
"ba_data/python/efro/error.py",
"ba_data/python/efro/json.py",
"ba_data/python/efro/message.py",

View File

@ -650,13 +650,6 @@ SCRIPT_TARGETS_PY_PUBLIC_TOOLS = \
build/ba_data/python/efro/dataclassio/_pathcapture.py \
build/ba_data/python/efro/dataclassio/_prep.py \
build/ba_data/python/efro/dataclassio/extras.py \
build/ba_data/python/efro/entity/__init__.py \
build/ba_data/python/efro/entity/_base.py \
build/ba_data/python/efro/entity/_entity.py \
build/ba_data/python/efro/entity/_field.py \
build/ba_data/python/efro/entity/_support.py \
build/ba_data/python/efro/entity/_value.py \
build/ba_data/python/efro/entity/util.py \
build/ba_data/python/efro/error.py \
build/ba_data/python/efro/json.py \
build/ba_data/python/efro/message.py \
@ -678,13 +671,6 @@ SCRIPT_TARGETS_PYC_PUBLIC_TOOLS = \
build/ba_data/python/efro/dataclassio/__pycache__/_pathcapture.cpython-38.opt-1.pyc \
build/ba_data/python/efro/dataclassio/__pycache__/_prep.cpython-38.opt-1.pyc \
build/ba_data/python/efro/dataclassio/__pycache__/extras.cpython-38.opt-1.pyc \
build/ba_data/python/efro/entity/__pycache__/__init__.cpython-38.opt-1.pyc \
build/ba_data/python/efro/entity/__pycache__/_base.cpython-38.opt-1.pyc \
build/ba_data/python/efro/entity/__pycache__/_entity.cpython-38.opt-1.pyc \
build/ba_data/python/efro/entity/__pycache__/_field.cpython-38.opt-1.pyc \
build/ba_data/python/efro/entity/__pycache__/_support.cpython-38.opt-1.pyc \
build/ba_data/python/efro/entity/__pycache__/_value.cpython-38.opt-1.pyc \
build/ba_data/python/efro/entity/__pycache__/util.cpython-38.opt-1.pyc \
build/ba_data/python/efro/__pycache__/error.cpython-38.opt-1.pyc \
build/ba_data/python/efro/__pycache__/json.cpython-38.opt-1.pyc \
build/ba_data/python/efro/__pycache__/message.cpython-38.opt-1.pyc \

View File

@ -4,7 +4,8 @@
from __future__ import annotations
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Dict
from dataclasses import dataclass, field
from pathlib import Path
import threading
import urllib.request
@ -14,21 +15,30 @@ import time
import os
import sys
from efro import entity
from typing_extensions import Annotated
from efro.dataclassio import (ioprepped, IOAttrs, dataclass_from_json,
dataclass_to_json)
if TYPE_CHECKING:
from bacommon.assets import AssetPackageFlavor
from typing import List
class FileValue(entity.CompoundValue):
@ioprepped
@dataclass
class FileValue:
"""State for an individual file."""
class State(entity.Entity):
@ioprepped
@dataclass
class State:
"""Holds all persistent state for the asset-manager."""
files = entity.CompoundDictField('files', str, FileValue())
files: Annotated[Dict[str, FileValue],
IOAttrs('files')] = field(default_factory=dict)
# files = entity.CompoundDictField('files', str, FileValue())
class AssetManager:
@ -102,7 +112,7 @@ class AssetManager:
state_path = self.state_path
if state_path.exists():
with open(self.state_path, encoding='utf-8') as infile:
self._state = State.from_json_str(infile.read())
self._state = dataclass_from_json(State, infile.read())
return
except Exception:
logging.exception('Error loading existing AssetManager state')
@ -114,7 +124,7 @@ class AssetManager:
print('ASSET-MANAGER SAVING STATE')
try:
with open(self.state_path, 'w', encoding='utf-8') as outfile:
outfile.write(self._state.to_json_str())
outfile.write(dataclass_to_json(self._state))
except Exception:
logging.exception('Error writing AssetManager state')

View File

@ -11,8 +11,6 @@ __EFRO_MYPY_STANDARD_SETTINGS__
# repeating ourself)
[mypy-ba]
no_implicit_reexport = False
[mypy-efro.entity]
no_implicit_reexport = False
[mypy-ba.internal]
no_implicit_reexport = False
[mypy-ba.deprecated]
@ -33,9 +31,6 @@ ignore_errors = True
[mypy-astroid.*]
ignore_missing_imports = True
[mypy-pytest.*]
ignore_missing_imports = True
[mypy-efrotools.pylintplugins]
disallow_any_unimported = False

View File

@ -1,448 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Testing entity functionality."""
from __future__ import annotations
from typing import TYPE_CHECKING
from enum import Enum, unique
import pytest
# Seeming to get some non-deterministic behavior here as of pylint 2.6.0
# Where sometimes pylint wants these in one order and sometimes another.
# pylint: disable=useless-suppression
# pylint: disable=wrong-import-order
from efro import entity
from efrotools.statictest import static_type_equals
# pylint: enable=useless-suppression
if TYPE_CHECKING:
pass
@unique
class EnumTest(Enum):
"""Testing..."""
FIRST = 0
SECOND = 1
@unique
class EnumTest2(Enum):
"""Testing..."""
FIRST = 0
SECOND = 1
class SubCompoundTest(entity.CompoundValue):
"""Testing..."""
subval = entity.Field('b', entity.BoolValue())
class CompoundTest(entity.CompoundValue):
"""Testing..."""
isubval = entity.Field('i', entity.IntValue(default=34532))
compoundlist = entity.CompoundListField('l', SubCompoundTest())
class CompoundTest2(CompoundTest):
"""Testing..."""
isubval2 = entity.Field('i2', entity.IntValue(default=3453))
class EntityTest(entity.Entity):
"""Testing..."""
ival = entity.Field('i', entity.IntValue(default=345))
sval = entity.Field('s', entity.StringValue(default='svvv'))
bval = entity.Field('b', entity.BoolValue(default=True))
fval = entity.Field('f', entity.FloatValue(default=1.0))
grp = entity.CompoundField('g', CompoundTest())
grp2 = entity.CompoundField('g2', CompoundTest2())
enumval = entity.Field('e', entity.EnumValue(EnumTest, default=None))
enumval2 = entity.Field(
'e2', entity.OptionalEnumValue(EnumTest, default=EnumTest.SECOND))
slval = entity.ListField('sl', entity.StringValue())
tval2 = entity.Field('t2', entity.DateTimeValue())
str_int_dict = entity.DictField('sd', str, entity.IntValue())
enum_int_dict = entity.DictField('ed', EnumTest, entity.IntValue())
compoundlist = entity.CompoundListField('l', CompoundTest())
compoundlist2 = entity.CompoundListField('l2', CompoundTest())
compoundlist3 = entity.CompoundListField('l3', CompoundTest2())
compounddict = entity.CompoundDictField('td', str, CompoundTest())
compounddict2 = entity.CompoundDictField('td2', str, CompoundTest())
compounddict3 = entity.CompoundDictField('td3', str, CompoundTest2())
compounddict4 = entity.CompoundDictField('td4', EnumTest, CompoundTest())
fval2 = entity.Field('f2', entity.Float3Value())
def test_entity_values() -> None:
"""Test various entity assigns for value and type correctness."""
# pylint: disable=too-many-statements
ent = EntityTest()
# Simple int field.
with pytest.raises(TypeError):
# noinspection PyTypeHints
ent.ival = 'strval' # type: ignore
assert static_type_equals(ent.ival, int)
assert isinstance(ent.ival, int)
assert ent.ival == 345
ent.ival = 346
assert ent.ival == 346
# Simple float field.
with pytest.raises(TypeError):
# noinspection PyTypeHints
ent.fval = 'foo' # type: ignore
assert static_type_equals(ent.fval, float)
ent.fval = 2
ent.fval = True
ent.fval = 1.0
# Simple value list field.
assert not ent.slval # bool operator
assert len(ent.slval) == 0
with pytest.raises(TypeError):
ent.slval.append(1) # type: ignore
ent.slval.append('blah')
assert ent.slval # bool operator
assert len(ent.slval) == 1
assert list(ent.slval) == ['blah']
with pytest.raises(TypeError):
# noinspection PyTypeHints
ent.slval = ['foo', 'bar', 1] # type: ignore
# Simple value dict field.
assert not ent.str_int_dict # bool operator
assert 'foo' not in ent.str_int_dict
# Set with incorrect key type should give TypeError.
with pytest.raises(TypeError):
ent.str_int_dict[0] = 123 # type: ignore
# And set with incorrect value type should do same.
with pytest.raises(TypeError):
ent.str_int_dict['foo'] = 'bar' # type: ignore
ent.str_int_dict['foo'] = 123
assert ent.str_int_dict # bool operator
assert static_type_equals(ent.str_int_dict['foo'], int)
assert ent.str_int_dict['foo'] == 123
# Simple dict with enum key.
assert EnumTest.FIRST not in ent.enum_int_dict
ent.enum_int_dict[EnumTest.FIRST] = 234
assert EnumTest.FIRST in ent.enum_int_dict
assert ent.enum_int_dict[EnumTest.FIRST] == 234
# Set with incorrect key type should give TypeError.
with pytest.raises(TypeError):
ent.enum_int_dict[0] = 123 # type: ignore
with pytest.raises(TypeError):
ent.enum_int_dict[EnumTest2.FIRST] = 123 # type: ignore
# And set with incorrect value type should do same.
with pytest.raises(TypeError):
ent.enum_int_dict[EnumTest.FIRST] = 'bar' # type: ignore
# Make sure is stored as underlying type (though we convert ints to strs).
assert ent.d_data['ed'] == {'0': 234}
# Make sure assignment as dict works correctly with enum keys.
ent.enum_int_dict = {EnumTest.FIRST: 235}
assert ent.enum_int_dict[EnumTest.FIRST] == 235
# Make sure invalid raw enum values are caught.
ent2 = EntityTest()
ent2.set_data({})
ent2.set_data({'ed': {0: 111}})
with pytest.raises(ValueError):
ent2.set_data({'ed': {5: 111}})
# Waaah; this works at runtime, but it seems that we'd need
# to have BoundDictField inherit from Mapping for mypy to accept this.
# (which seems to get a bit ugly, but may be worth revisiting)
# assert dict(ent.str_int_dict) == {'foo': 123}
# Passing key/value pairs as a list works though..
assert dict(ent.str_int_dict.items()) == {'foo': 123}
def test_entity_values_2() -> None:
"""Test various entity assigns for value and type correctness."""
# pylint: disable=too-many-statements
ent = EntityTest()
# Compound value
assert static_type_equals(ent.grp, CompoundTest)
assert static_type_equals(ent.grp.isubval, int)
assert isinstance(ent.grp.isubval, int)
with pytest.raises(TypeError):
# noinspection PyTypeHints
ent.grp.isubval = 'blah' # type: ignore
# Compound value inheritance.
assert ent.grp2.isubval2 == 3453
assert ent.grp2.isubval == 34532
# Compound list field.
with pytest.raises(IndexError):
print(ent.compoundlist[0])
with pytest.raises(TypeError):
ent.compoundlist[0] = 123 # type: ignore
assert len(ent.compoundlist) == 0
assert not ent.compoundlist # bool operator
ent.compoundlist.append()
assert ent.compoundlist # bool operator
assert len(ent.compoundlist) == 1
assert static_type_equals(ent.compoundlist[0], CompoundTest)
# Compound dict field.
assert not ent.compounddict # bool operator
cdval = ent.compounddict.add('foo')
assert ent.compounddict # bool operator
assert static_type_equals(cdval, CompoundTest)
# Set with incorrect key type should give TypeError.
with pytest.raises(TypeError):
_cdval2 = ent.compounddict.add(1) # type: ignore
# Hmm; should this throw a TypeError and not a KeyError?..
with pytest.raises(TypeError):
_cdval3 = ent.compounddict[1] # type: ignore
assert static_type_equals(ent.compounddict['foo'], CompoundTest)
# Enum value
with pytest.raises(ValueError):
# noinspection PyTypeHints
ent.enumval = None # type: ignore
assert ent.enumval is EnumTest.FIRST
# Compound dict with enum key.
assert not ent.compounddict4 # bool operator
assert EnumTest.FIRST not in ent.compounddict4
_cd4val = ent.compounddict4.add(EnumTest.FIRST)
assert ent.compounddict4 # bool operator
assert EnumTest.FIRST in ent.compounddict4
ent.compounddict4[EnumTest.FIRST].isubval = 222
assert ent.compounddict4[EnumTest.FIRST].isubval == 222
with pytest.raises(TypeError):
ent.compounddict4[0].isubval = 222 # type: ignore
assert static_type_equals(ent.compounddict4[EnumTest.FIRST], CompoundTest)
# Make sure enum keys are stored as underlying type.
# (though with ints converted to strs)
assert ent.d_data['td4'] == {'0': {'i': 222, 'l': []}}
# Make sure assignment as dict works correctly with enum keys.
ent.compounddict4 = {EnumTest.SECOND: ent.compounddict4[EnumTest.FIRST]}
assert ent.compounddict4[EnumTest.SECOND].isubval == 222
# Optional Enum value
ent.enumval2 = None
assert ent.enumval2 is None
# Nested compound values
assert not ent.grp.compoundlist # bool operator
val = ent.grp.compoundlist.append()
assert ent.grp.compoundlist # bool operator
assert static_type_equals(val, SubCompoundTest)
assert static_type_equals(ent.grp.compoundlist[0], SubCompoundTest)
assert static_type_equals(ent.grp.compoundlist[0].subval, bool)
# Make sure we can digest the same data we spit out.
ent.set_data(ent.d_data)
def test_field_copies() -> None:
"""Test copying various values between fields."""
ent1 = EntityTest()
ent2 = EntityTest()
# Copying a simple value.
ent1.ival = 334
ent2.ival = ent1.ival
assert ent2.ival == 334
# Copying a nested compound.
ent1.grp.isubval = 543
ent2.grp = ent1.grp
assert ent2.grp.isubval == 543
# Type-checker currently allows this because both are Compounds
# but should fail at runtime since their subfield arrangement differs.
with pytest.raises(ValueError):
ent2.grp = ent1.grp2
# Copying a value list.
ent1.slval = ['foo', 'bar']
assert ent1.slval == ['foo', 'bar']
ent2.slval = ent1.slval
assert ent2.slval == ['foo', 'bar']
# Copying a value dict.
ent1.str_int_dict['tval'] = 987
ent2.str_int_dict = ent1.str_int_dict
assert ent2.str_int_dict['tval'] == 987
# Copying a CompoundList
val = ent1.compoundlist.append()
val.isubval = 356
assert ent1.compoundlist[0].isubval == 356
assert len(ent1.compoundlist) == 1
ent1.compoundlist.append()
assert len(ent1.compoundlist) == 2
assert len(ent2.compoundlist) == 0
# Copying to the same field on different obj should work.
ent2.compoundlist = ent1.compoundlist
assert ent2.compoundlist[0].isubval == 356
assert len(ent2.compoundlist) == 2
# Cross-field assigns should work too if the field layouts match..
ent1.compoundlist2 = ent1.compoundlist
# And not if they don't...
# (in this case mypy errors too but that may not always be the case)
with pytest.raises(ValueError):
# noinspection PyTypeHints
ent1.compoundlist3 = ent1.compoundlist # type: ignore
# Copying a CompoundDict
ent1.compounddict.add('foo')
ent1.compounddict.add('bar')
assert static_type_equals(ent1.compounddict['foo'].isubval, int)
ent1.compounddict['foo'].isubval = 23
# Copying to the same field on different obj should work.
ent2.compounddict = ent1.compounddict
assert ent2.compounddict.keys() == ['foo', 'bar']
assert ent2.compounddict['foo'].isubval == 23
# Cross field assigns should work too if the field layouts match..
ent1.compounddict2 = ent1.compounddict
# ..And should fail otherwise.
# (mypy catches this too, but that may not always be the case if
# two CompoundValues have the same type but different layouts based
# on their __init__ args or whatnot)
with pytest.raises(ValueError):
# noinspection PyTypeHints
ent1.compounddict3 = ent1.compounddict # type: ignore
# Make sure invalid key types get caught when setting a full dict:
with pytest.raises(TypeError):
ent1.compounddict2 = {
'foo': ent1.compounddict['foo'],
2: ent1.compounddict['bar'], # type: ignore
}
def test_field_access_from_type() -> None:
"""Accessing fields through type objects should return the Field objs."""
ent = EntityTest()
# Accessing fields through the type should return field objects
# instead of values.
assert static_type_equals(ent.ival, int)
assert isinstance(ent.ival, int)
mypytype = 'efro.entity._field.Field[builtins.int*]'
assert static_type_equals(type(ent).ival, mypytype)
assert isinstance(type(ent).ival, entity.Field)
# Accessing subtype on a nested compound field..
assert static_type_equals(type(ent).compoundlist.d_value, CompoundTest)
assert isinstance(type(ent).compoundlist.d_value, CompoundTest)
class EntityTestMixin(entity.EntityMixin, CompoundTest2):
"""A test entity created from a compound using a mixin class."""
def test_entity_mixin() -> None:
"""Testing our mixin entity variety."""
ent = EntityTestMixin()
assert static_type_equals(ent.isubval2, int)
assert ent.isubval2 == 3453
def test_entity_embedding() -> None:
"""Making sure compound entities work as expected."""
class EmbCompoundValTest(entity.CompoundValue):
"""Testing..."""
isubval = entity.Field('i', entity.IntValue(default=12345))
class EmbCompoundTest(entity.Entity):
"""Testing..."""
isubval = entity.Field('i', entity.IntValue(default=12345))
sub = entity.CompoundField('sub', EmbCompoundValTest())
# This should be ok...
_ent = EmbCompoundTest()
class EmbCompoundValTest2(entity.Entity):
"""Testing..."""
isubval = entity.Field('i', entity.IntValue(default=12345))
with pytest.raises(AssertionError):
# This should not be ok
# (can only embed CompoundValues, not complete Entities)
class EmbCompoundTest2(entity.Entity):
"""Testing..."""
isubval = entity.Field('i', entity.IntValue(default=12345))
sub = entity.CompoundField('sub', EmbCompoundValTest2())
_ent2 = EmbCompoundTest2()
def test_key_uniqueness() -> None:
"""Make sure entities reject multiple fields with the same key."""
# Make sure a single entity with dup keys fails:
with pytest.raises(RuntimeError):
class EntityKeyTest(entity.Entity):
"""Test entity with invalid duplicate keys."""
ival = entity.Field('i', entity.IntValue())
sval = entity.Field('i', entity.StringValue())
_ent = EntityKeyTest()
# Make sure we still get an error if the duplicate keys come from
# different places in the class hierarchy.
with pytest.raises(RuntimeError):
class EntityKeyTest2(entity.Entity):
"""Test entity with invalid duplicate keys."""
ival = entity.Field('i', entity.IntValue())
class EntityKeyTest3(EntityKeyTest2):
"""Test entity with invalid duplicate keys."""
sval = entity.Field('i', entity.StringValue())
_ent2 = EntityKeyTest3()
def test_data_storage_and_fetching() -> None:
"""Test store_default option for entities."""
class EntityTestD(entity.Entity):
"""Testing store_default off."""
ival = entity.Field('i', entity.IntValue(default=3,
store_default=False))
class EntityTestD2(entity.Entity):
"""Testing store_default on (the default)."""
ival = entity.Field('i', entity.IntValue(default=3))
# This guy should get pruned when its got a default value.
testd = EntityTestD()
assert testd.ival == 3
assert testd.pruned_data() == {}
testd.ival = 4
assert testd.pruned_data() == {'i': 4}
testd.ival = 3
assert testd.pruned_data() == {}
# Make sure our pretty/prune json options work.
assert testd.to_json_str() == '{}'
assert testd.to_json_str(prune=False) == '{"i":3}'
assert testd.to_json_str(prune=False, pretty=True) == ('{\n'
' "i": 3\n'
'}')
# This guy should never get pruned...
testd2 = EntityTestD2()
assert testd2.ival == 3
assert testd2.pruned_data() == {'i': 3}
testd2.ival = 4
assert testd2.pruned_data() == {'i': 4}
testd2.ival = 3
assert testd2.to_json_str(prune=True) == '{"i":3}'

View File

@ -11,7 +11,6 @@ from enum import Enum
from typing_extensions import Annotated
from efro.dataclassio import ioprepped, IOAttrs
# from efro import entity
if TYPE_CHECKING:
pass

View File

@ -5,32 +5,45 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Optional, List, Dict, Any, Tuple
from dataclasses import dataclass
from dataclasses import dataclass, field
from efro import entity
from efro.dataclassio import ioprepped
from typing_extensions import Annotated
from efro.dataclassio import ioprepped, IOAttrs
if TYPE_CHECKING:
pass
class ServerNodeEntry(entity.CompoundValue):
@ioprepped
@dataclass
class ServerNodeEntry:
"""Information about a specific server."""
region = entity.Field('r', entity.StringValue())
address = entity.Field('a', entity.StringValue())
port = entity.Field('p', entity.IntValue())
region: Annotated[str, IOAttrs('r')]
# region = entity.Field('r', entity.StringValue())
address: Annotated[str, IOAttrs('a')]
# address = entity.Field('a', entity.StringValue())
port: Annotated[int, IOAttrs('p')]
# port = entity.Field('p', entity.IntValue())
class ServerNodeQueryResponse(entity.Entity):
@ioprepped
@dataclass
class ServerNodeQueryResponse:
"""A response to a query about server-nodes."""
# If present, something went wrong, and this describes it.
error = entity.Field('e', entity.OptionalStringValue(store_default=False))
error: Annotated[Optional[str], IOAttrs('e', store_default=False)] = None
# error =
# entity.Field('e', entity.OptionalStringValue(store_default=False))
# The set of servernodes.
servers = entity.CompoundListField('s',
ServerNodeEntry(),
store_default=False)
servers: Annotated[List[ServerNodeEntry],
IOAttrs('s', store_default=False)] = field(
default_factory=list)
# servers = entity.CompoundListField('s',
# ServerNodeEntry(),
# store_default=False)
@ioprepped

View File

@ -1,42 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Entity functionality.
****************************************************************************
NOTE: This is largely being replaced by dataclassio, which offers similar
functionality in a cleaner way. Ideally we should remove this completely at
some point, but for now we should at least try to avoid using it in new code.
****************************************************************************
A system for defining structured data, supporting both static and runtime
type safety, serialization, efficient/sparse storage, per-field value
limits, etc. This is a heavyweight option in comparison to things such as
dataclasses, but the increased features can make the overhead worth it for
certain use cases.
Advantages compared to raw nested dataclasses:
- Field names separated from their data representation so can get more
concise json data, change variable names while preserving back-compat, etc.
- Can wrap and preserve unmapped data (so fields can be added to new versions
of something without breaking old versions' ability to read the data)
- Incorrectly typed data is caught at runtime (for dataclasses we rely on
type-checking and explicit validation calls)
Disadvantages compared to raw nested dataclasses:
- More complex to use
- Significantly more heavyweight (roughly 10 times slower in quick tests)
- Can't currently be initialized in constructors (this would probably require
a Mypy plugin to do in a type-safe way)
"""
# pylint: disable=unused-import
from efro.entity._entity import EntityMixin, Entity
from efro.entity._field import (Field, CompoundField, ListField, DictField,
CompoundListField, CompoundDictField)
from efro.entity._value import (
EnumValue, OptionalEnumValue, IntValue, OptionalIntValue, StringValue,
OptionalStringValue, BoolValue, OptionalBoolValue, FloatValue,
OptionalFloatValue, DateTimeValue, OptionalDateTimeValue, Float3Value,
CompoundValue)
from efro.entity._support import FieldInspector

View File

@ -1,133 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Base classes for the entity system."""
from __future__ import annotations
from enum import Enum
from typing import TYPE_CHECKING
from efro.util import enum_by_value
if TYPE_CHECKING:
from typing import Any, Type
def dict_key_to_raw(key: Any, keytype: Type) -> Any:
"""Given a key value from the world, filter to stored key."""
if not isinstance(key, keytype):
raise TypeError(
f'Invalid key type; expected {keytype}, got {type(key)}.')
if issubclass(keytype, Enum):
val = key.value
# We convert int enums to string since that is what firestore supports.
if isinstance(val, int):
val = str(val)
return val
return key
def dict_key_from_raw(key: Any, keytype: Type) -> Any:
"""Given internal key, filter to world visible type."""
if issubclass(keytype, Enum):
# We store all enum keys as strings; if the enum uses
# int keys, convert back.
for enumval in keytype:
if isinstance(enumval.value, int):
return enum_by_value(keytype, int(key))
break
return enum_by_value(keytype, key)
return key
class DataHandler:
"""Base class for anything that can wrangle entity data.
This contains common functionality shared by Fields and Values.
"""
def get_default_data(self) -> Any:
"""Return the default internal data value for this object.
This will be inserted when initing nonexistent entity data.
"""
raise RuntimeError(f'get_default_data() unimplemented for {self}')
def filter_input(self, data: Any, error: bool) -> Any:
"""Given arbitrary input data, return valid internal data.
If error is True, exceptions should be thrown for any non-trivial
mismatch (more than just int vs float/etc.). Otherwise the invalid
data should be replaced with valid defaults and the problem noted
via the logging module.
The passed-in data can be modified in-place or returned as-is, or
completely new data can be returned. Compound types are responsible
for setting defaults and/or calling this recursively for their
children. Data that is not used by the field (such as orphaned values
in a dict field) can be left alone.
Supported types for internal data are:
- anything that works with json (lists, dicts, bools, floats, ints,
strings, None) - no tuples!
- datetime.datetime objects
"""
del error # unused
return data
def filter_output(self, data: Any) -> Any:
"""Given valid internal data, return user-facing data.
Note that entity data is expected to be filtered to correctness on
input, so if internal and extra entity data are the same type
Value types such as Vec3 may store data internally as simple float
tuples but return Vec3 objects to the user/etc. this is the mechanism
by which they do so.
"""
return data
def prune_data(self, data: Any) -> bool:
"""Prune internal data to strip out default values/etc.
Should return a bool indicating whether root data itself can be pruned.
The object is responsible for pruning any sub-fields before returning.
"""
class BaseField(DataHandler):
"""Base class for all field types."""
def __init__(self, d_key: str = None) -> None:
# Key for this field's data in parent dict/list (when applicable;
# some fields such as the child field under a list field represent
# more than a single field entry so this is unused)
self.d_key = d_key
# IMPORTANT: this method should only be overridden in the eyes of the
# type-checker (to specify exact return types). Subclasses should instead
# override get_with_data() for doing the actual work, since that method
# may sometimes be called explicitly instead of through __get__
def __get__(self, obj: Any, type_in: Any = None) -> Any:
if obj is None:
# when called on the type, we return the field
return self
return self.get_with_data(obj.d_data)
# IMPORTANT: same deal as __get__() (see note above)
def __set__(self, obj: Any, value: Any) -> None:
assert obj is not None
self.set_with_data(obj.d_data, value, error=True)
def get_with_data(self, data: Any) -> Any:
"""Get the field value given an explicit data source."""
assert self.d_key is not None
return self.filter_output(data[self.d_key])
def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
"""Set the field value given an explicit data target.
If error is True, exceptions should be thrown for invalid data;
otherwise the problem should be logged but corrected.
"""
assert self.d_key is not None
data[self.d_key] = self.filter_input(value, error=error)

View File

@ -1,222 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Functionality for the actual Entity types."""
from __future__ import annotations
import json
from typing import TYPE_CHECKING, TypeVar
from efro.entity._support import FieldInspector, BoundCompoundValue
from efro.entity._value import CompoundValue
from efro.json import ExtendedJSONEncoder, ExtendedJSONDecoder
if TYPE_CHECKING:
from typing import Dict, Any, Type, Union, Optional
T = TypeVar('T', bound='EntityMixin')
class EntityMixin:
"""Mixin class to add data-storage to CompoundValue, forming an Entity.
Distinct Entity types should inherit from this first and a CompoundValue
(sub)type second. This order ensures that constructor arguments for this
class are accessible on the new type.
"""
def __init__(self,
d_data: Dict[str, Any] = None,
error: bool = True) -> None:
super().__init__()
if not isinstance(self, CompoundValue):
raise RuntimeError('EntityMixin class must be combined'
' with a CompoundValue class.')
# Underlying data for this entity; fields simply operate on this.
self.d_data: Dict[str, Any] = {}
assert isinstance(self, EntityMixin)
self.set_data(d_data if d_data is not None else {}, error=error)
def reset(self) -> None:
"""Resets data to default."""
self.set_data({}, error=True)
def set_data(self, data: Dict, error: bool = True) -> None:
"""Set the data for this entity and apply all value filters to it.
Note that it is more efficient to pass data to an Entity's constructor
than it is to create a default Entity and then call this on it.
"""
assert isinstance(self, CompoundValue)
self.d_data = self.filter_input(data, error=error)
def copy_data(self, target: Union[CompoundValue,
BoundCompoundValue]) -> None:
"""Copy data from a target Entity or compound-value.
This first verifies that the target has a matching set of fields
and then copies its data into ourself. To copy data into a nested
compound field, the assignment operator can be used.
"""
import copy
from efro.entity.util import have_matching_fields
tvalue: CompoundValue
if isinstance(target, CompoundValue):
tvalue = target
elif isinstance(target, BoundCompoundValue):
tvalue = target.d_value
else:
raise TypeError(
'Target must be a CompoundValue or BoundCompoundValue')
target_data = getattr(target, 'd_data', None)
if target_data is None:
raise ValueError('Target is not bound to data.')
assert isinstance(self, CompoundValue)
if not have_matching_fields(self, tvalue):
raise ValueError(
f'Fields for target {type(tvalue)} do not match ours'
f" ({type(self)}); can't copy data.")
self.d_data = copy.deepcopy(target_data)
def steal_data(self, target: EntityMixin) -> None:
"""Steal data from another entity.
This is more efficient than copy_data, as data is moved instead
of copied. However this leaves the target object in an invalid
state, and it must no longer be used after this call.
This can be convenient for entities to use to update themselves
with the result of a database transaction (which generally return
fresh entities).
"""
from efro.entity.util import have_matching_fields
if not isinstance(target, EntityMixin):
raise TypeError('EntityMixin is required.')
assert isinstance(target, CompoundValue)
assert isinstance(self, CompoundValue)
if not have_matching_fields(self, target):
raise ValueError(
f'Fields for target {type(target)} do not match ours'
f" ({type(self)}); can't steal data.")
assert target.d_data is not None
self.d_data = target.d_data
# Make sure target blows up if someone tries to use it.
# noinspection PyTypeHints
target.d_data = None # type: ignore
def pruned_data(self) -> Dict[str, Any]:
"""Return a pruned version of this instance's data.
This varies from d_data in that values may be stripped out if
they are equal to defaults (for fields with that option enabled).
"""
import copy
data = copy.deepcopy(self.d_data)
assert isinstance(self, CompoundValue)
self.prune_fields_data(data)
return data
def to_json_str(self,
prune: bool = True,
pretty: bool = False,
sort_keys_override: Optional[bool] = None) -> str:
"""Convert the entity to a json string.
This uses efro.jsontools.ExtendedJSONEncoder/Decoder
to support data types not natively storable in json.
Be sure to use the corresponding loading functions here for
this same reason.
By default, keys are sorted when pretty-printing and not otherwise,
but this can be overridden by passing a bool as sort_keys_override.
"""
if prune:
data = self.pruned_data()
else:
data = self.d_data
if pretty:
return json.dumps(
data,
indent=2,
sort_keys=(sort_keys_override
if sort_keys_override is not None else True),
cls=ExtendedJSONEncoder)
# When not doing pretty, go for quick and compact.
return json.dumps(data,
separators=(',', ':'),
sort_keys=(sort_keys_override if sort_keys_override
is not None else False),
cls=ExtendedJSONEncoder)
@staticmethod
def json_loads(s: Union[str, bytes]) -> Any:
"""Load a json string using our special extended decoder.
Note that this simply returns loaded json data; no
Entities are involved.
"""
return json.loads(s, cls=ExtendedJSONDecoder)
def load_from_json_str(self,
s: Union[str, bytes],
error: bool = True) -> None:
"""Set the entity's data in-place from a json string.
The 'error' argument determines whether Exceptions will be raised
for invalid data values. Values will be reset/conformed to valid ones
if error is False. Note that Exceptions will always be raised
in the case of invalid formatted json.
"""
data = self.json_loads(s)
self.set_data(data, error=error)
@classmethod
def from_json_str(cls: Type[T],
s: Union[str, bytes],
error: bool = True) -> T:
"""Instantiate a new instance with provided json string.
The 'error' argument determines whether exceptions will be raised
on invalid data values. Values will be reset/conformed to valid ones
if error is False. Note that exceptions will always be raised
in the case of invalid formatted json.
"""
obj = cls(d_data=cls.json_loads(s), error=error)
return obj
# Note: though d_fields actually returns a FieldInspector,
# in type-checking-land we currently just say it returns self.
# This allows the type-checker to at least validate subfield access,
# though the types will be incorrect (values instead of inspectors).
# This means that anything taking FieldInspectors needs to take 'Any'
# at the moment. Hopefully we can make this cleaner via a mypy
# plugin at some point.
if TYPE_CHECKING:
@property
def d_fields(self: T) -> T:
"""For accessing entity field objects (as opposed to values)."""
...
else:
@property
def d_fields(self):
"""For accessing entity field objects (as opposed to values)."""
return FieldInspector(self, self, [], [])
class Entity(EntityMixin, CompoundValue):
"""A data class consisting of Fields and their underlying data.
Fields and Values simply define a data layout; Entities are concrete
objects using those layouts.
Inherit from this class and add Fields to define a simple Entity type.
Alternately, combine an EntityMixin with any CompoundValue child class
to accomplish the same. The latter allows sharing CompoundValue
layouts between different concrete Entity types. For example, a
'Weapon' CompoundValue could be embedded as part of a 'Character'
Entity but also exist as a distinct 'WeaponEntity' in an armory
database.
"""

View File

@ -1,602 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Field types for the entity system."""
from __future__ import annotations
import copy
import logging
from enum import Enum
from typing import TYPE_CHECKING, Generic, TypeVar, overload
# from efro.util import enum_by_value
from efro.entity._base import BaseField, dict_key_to_raw, dict_key_from_raw
from efro.entity._support import (BoundCompoundValue, BoundListField,
BoundDictField, BoundCompoundListField,
BoundCompoundDictField)
from efro.entity.util import have_matching_fields
if TYPE_CHECKING:
from typing import Dict, Type, List, Any
from efro.entity._value import TypedValue, CompoundValue
T = TypeVar('T')
TK = TypeVar('TK')
TC = TypeVar('TC', bound='CompoundValue')
class Field(BaseField, Generic[T]):
"""Field consisting of a single value."""
def __init__(self,
d_key: str,
value: TypedValue[T],
store_default: bool = True) -> None:
super().__init__(d_key)
self.d_value = value
self._store_default = store_default
def __repr__(self) -> str:
return f'<Field "{self.d_key}" with {self.d_value}>'
def get_default_data(self) -> Any:
return self.d_value.get_default_data()
def filter_input(self, data: Any, error: bool) -> Any:
return self.d_value.filter_input(data, error)
def filter_output(self, data: Any) -> Any:
return self.d_value.filter_output(data)
def prune_data(self, data: Any) -> bool:
return self.d_value.prune_data(data)
if TYPE_CHECKING:
# Use default runtime get/set but let type-checker know our types.
# Note: we actually return a bound-field when accessed on
# a type instead of an instance, but we don't reflect that here yet
# (would need to write a mypy plugin so sub-field access works first)
@overload
def __get__(self, obj: None, cls: Any = None) -> Field[T]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> T:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
def __set__(self, obj: Any, value: T) -> None:
...
class CompoundField(BaseField, Generic[TC]):
"""Field consisting of a single compound value."""
def __init__(self,
d_key: str,
value: TC,
store_default: bool = True) -> None:
super().__init__(d_key)
if __debug__:
from efro.entity._value import CompoundValue
assert isinstance(value, CompoundValue)
assert not hasattr(value, 'd_data')
self.d_value = value
self._store_default = store_default
def get_default_data(self) -> dict:
return self.d_value.get_default_data()
def filter_input(self, data: Any, error: bool) -> dict:
return self.d_value.filter_input(data, error)
def prune_data(self, data: Any) -> bool:
return self.d_value.prune_data(data)
# Note:
# Currently, to the type-checker we just return a simple instance
# of our CompoundValue so it can properly type-check access to its
# attrs. However at runtime we return a FieldInspector or
# BoundCompoundField which both use magic to provide the same attrs
# dynamically (but which the type-checker doesn't understand).
# Perhaps at some point we can write a mypy plugin to correct this.
if TYPE_CHECKING:
def __get__(self, obj: Any, cls: Any = None) -> TC:
...
# Theoretically this type-checking may be too tight;
# we can support assigning a parent class to a child class if
# their fields match. Not sure if that'll ever come up though;
# gonna leave this for now as I prefer to have *some* checking.
# Also once we get BoundCompoundValues working with mypy we'll
# need to accept those too.
def __set__(self: CompoundField[TC], obj: Any, value: TC) -> None:
...
def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundValue(self.d_value, data[self.d_key])
def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
from efro.entity._value import CompoundValue
# Ok here's the deal: our type checking above allows any subtype
# of our CompoundValue in here, but we want to be more picky than
# that. Let's check fields for equality. This way we'll allow
# assigning something like a Carentity to a Car field
# (where the data is the same), but won't allow assigning a Car
# to a Vehicle field (as Car probably adds more fields).
value1: CompoundValue
if isinstance(value, BoundCompoundValue):
value1 = value.d_value
elif isinstance(value, CompoundValue):
value1 = value
else:
raise ValueError(f"Can't assign from object type {type(value)}")
dataval = getattr(value, 'd_data', None)
if dataval is None:
raise ValueError(f"Can't assign from unbound object {value}")
if self.d_value.get_fields() != value1.get_fields():
raise ValueError(f"Can't assign to {self.d_value} from"
f' incompatible type {value.d_value}; '
f'sub-fields do not match.')
# If we're allowing this to go through, we can simply copy the
# data from the passed in value. The fields match so it should
# be in a valid state already.
data[self.d_key] = copy.deepcopy(dataval)
class ListField(BaseField, Generic[T]):
"""Field consisting of repeated values."""
def __init__(self,
d_key: str,
value: TypedValue[T],
store_default: bool = True) -> None:
super().__init__(d_key)
self.d_value = value
self._store_default = store_default
def get_default_data(self) -> list:
return []
def filter_input(self, data: Any, error: bool) -> Any:
# If we were passed a BoundListField, operate on its raw values
if isinstance(data, BoundListField):
data = data.d_data
if not isinstance(data, list):
if error:
raise TypeError(f'list value expected; got {type(data)}')
logging.error('Ignoring non-list data for %s: %s', self, data)
data = []
for i, entry in enumerate(data):
data[i] = self.d_value.filter_input(entry, error=error)
return data
def prune_data(self, data: Any) -> bool:
# We never prune individual values since that would fundamentally
# change the list, but we can prune completely if empty (and allowed).
return not data and not self._store_default
# When accessed on a FieldInspector we return a sub-field FieldInspector.
# When accessed on an instance we return a BoundListField.
if TYPE_CHECKING:
# Access via type gives our field; via an instance gives a bound field.
@overload
def __get__(self, obj: None, cls: Any = None) -> ListField[T]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundListField[T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
# Allow setting via a raw value list or a bound list field
@overload
def __set__(self, obj: Any, value: List[T]) -> None:
...
@overload
def __set__(self, obj: Any, value: BoundListField[T]) -> None:
...
def __set__(self, obj: Any, value: Any) -> None:
...
def get_with_data(self, data: Any) -> Any:
return BoundListField(self, data[self.d_key])
class DictField(BaseField, Generic[TK, T]):
"""A field of values in a dict with a specified index type."""
def __init__(self,
d_key: str,
keytype: Type[TK],
field: TypedValue[T],
store_default: bool = True) -> None:
super().__init__(d_key)
self.d_value = field
self._store_default = store_default
self._keytype = keytype
def get_default_data(self) -> dict:
return {}
def filter_input(self, data: Any, error: bool) -> Any:
# If we were passed a BoundDictField, operate on its raw values
if isinstance(data, BoundDictField):
data = data.d_data
if not isinstance(data, dict):
if error:
raise TypeError('dict value expected')
logging.error('Ignoring non-dict data for %s: %s', self, data)
data = {}
data_out = {}
for key, val in data.items():
# For enum keys, make sure its a valid enum.
if issubclass(self._keytype, Enum):
# Our input data can either be an enum or the underlying type.
if isinstance(key, self._keytype):
key = dict_key_to_raw(key, self._keytype)
# key = key.value
else:
try:
_enumval = dict_key_from_raw(key, self._keytype)
# _enumval = enum_by_value(self._keytype, key)
except Exception as exc:
if error:
raise ValueError(
f'No enum of type {self._keytype}'
f' exists with value {key}') from exc
logging.error('Ignoring invalid key type for %s: %s',
self, data)
continue
# For all other keys we can check for exact types.
elif not isinstance(key, self._keytype):
if error:
raise TypeError(
f'Invalid key type; expected {self._keytype},'
f' got {type(key)}.')
logging.error('Ignoring invalid key type for %s: %s', self,
data)
continue
data_out[key] = self.d_value.filter_input(val, error=error)
return data_out
def prune_data(self, data: Any) -> bool:
# We never prune individual values since that would fundamentally
# change the dict, but we can prune completely if empty (and allowed)
return not data and not self._store_default
if TYPE_CHECKING:
# Return our field if accessed via type and bound-dict-field
# if via instance.
@overload
def __get__(self, obj: None, cls: Any = None) -> DictField[TK, T]:
...
@overload
def __get__(self, obj: Any, cls: Any = None) -> BoundDictField[TK, T]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
# Allow setting via matching dict values or BoundDictFields
@overload
def __set__(self, obj: Any, value: Dict[TK, T]) -> None:
...
@overload
def __set__(self, obj: Any, value: BoundDictField[TK, T]) -> None:
...
def __set__(self, obj: Any, value: Any) -> None:
...
def get_with_data(self, data: Any) -> Any:
return BoundDictField(self._keytype, self, data[self.d_key])
class CompoundListField(BaseField, Generic[TC]):
"""A field consisting of repeated instances of a compound-value.
Element access returns the sub-field, allowing nested field access.
ie: mylist[10].fieldattr = 'foo'
"""
def __init__(self,
d_key: str,
valuetype: TC,
store_default: bool = True) -> None:
super().__init__(d_key)
self.d_value = valuetype
# This doesnt actually exist for us, but want the type-checker
# to think it does (see TYPE_CHECKING note below).
self.d_data: Any
self._store_default = store_default
def filter_input(self, data: Any, error: bool) -> list:
if not isinstance(data, list):
if error:
raise TypeError('list value expected')
logging.error('Ignoring non-list data for %s: %s', self, data)
data = []
assert isinstance(data, list)
# Ok we've got a list; now run everything in it through validation.
for i, subdata in enumerate(data):
data[i] = self.d_value.filter_input(subdata, error=error)
return data
def get_default_data(self) -> list:
return []
def prune_data(self, data: Any) -> bool:
# Run pruning on all individual entries' data through out child field.
# However we don't *completely* prune values from the list since that
# would change it.
for subdata in data:
self.d_value.prune_fields_data(subdata)
# We can also optionally prune the whole list if empty and allowed.
return not data and not self._store_default
if TYPE_CHECKING:
@overload
def __get__(self, obj: None, cls: Any = None) -> CompoundListField[TC]:
...
@overload
def __get__(self,
obj: Any,
cls: Any = None) -> BoundCompoundListField[TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
# Note:
# When setting the list, we tell the type-checker that we also accept
# a raw list of CompoundValue objects, but at runtime we actually
# always deal with BoundCompoundValue objects (see note in
# BoundCompoundListField for why we accept CompoundValue objs)
@overload
def __set__(self, obj: Any, value: List[TC]) -> None:
...
@overload
def __set__(self, obj: Any, value: BoundCompoundListField[TC]) -> None:
...
def __set__(self, obj: Any, value: Any) -> None:
...
def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundListField(self, data[self.d_key])
def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
# If we were passed a BoundCompoundListField,
# simply convert it to a flat list of BoundCompoundValue objects which
# is what we work with natively here.
if isinstance(value, BoundCompoundListField):
value = list(value)
if not isinstance(value, list):
raise TypeError(f'CompoundListField expected list value on set;'
f' got {type(value)}.')
# Allow assigning only from a sequence of our existing children.
# (could look into expanding this to other children if we can
# be sure the underlying data will line up; for example two
# CompoundListFields with different child_field values should not
# be inter-assignable.
if not all(isinstance(i, BoundCompoundValue) for i in value):
raise ValueError('CompoundListField assignment must be a '
'list containing only BoundCompoundValue objs.')
# Make sure the data all has the same CompoundValue type and
# compare that type against ours once to make sure its fields match.
# (this will not allow passing CompoundValues from multiple sources
# but I don't know if that would ever come up..)
for i, val in enumerate(value):
if i == 0:
# Do the full field comparison on the first value only..
if not have_matching_fields(val.d_value, self.d_value):
raise ValueError(
'CompoundListField assignment must be a '
'list containing matching CompoundValues.')
else:
# For all remaining values, just ensure they match the first.
if val.d_value is not value[0].d_value:
raise ValueError(
'CompoundListField assignment cannot contain '
'multiple CompoundValue types as sources.')
data[self.d_key] = self.filter_input([i.d_data for i in value],
error=error)
class CompoundDictField(BaseField, Generic[TK, TC]):
"""A field consisting of key-indexed instances of a compound-value.
Element access returns the sub-field, allowing nested field access.
ie: mylist[10].fieldattr = 'foo'
"""
def __init__(self,
d_key: str,
keytype: Type[TK],
valuetype: TC,
store_default: bool = True) -> None:
super().__init__(d_key)
self.d_value = valuetype
# This doesnt actually exist for us, but want the type-checker
# to think it does (see TYPE_CHECKING note below).
self.d_data: Any
self.d_keytype = keytype
self._store_default = store_default
def filter_input(self, data: Any, error: bool) -> dict:
if not isinstance(data, dict):
if error:
raise TypeError('dict value expected')
logging.error('Ignoring non-dict data for %s: %s', self, data)
data = {}
data_out = {}
for key, val in data.items():
# For enum keys, make sure its a valid enum.
if issubclass(self.d_keytype, Enum):
# Our input data can either be an enum or the underlying type.
if isinstance(key, self.d_keytype):
key = dict_key_to_raw(key, self.d_keytype)
# key = key.value
else:
try:
_enumval = dict_key_from_raw(key, self.d_keytype)
# _enumval = enum_by_value(self.d_keytype, key)
except Exception as exc:
if error:
raise ValueError(
f'No enum of type {self.d_keytype}'
f' exists with value {key}') from exc
logging.error('Ignoring invalid key type for %s: %s',
self, data)
continue
# For all other keys we can check for exact types.
elif not isinstance(key, self.d_keytype):
if error:
raise TypeError(
f'Invalid key type; expected {self.d_keytype},'
f' got {type(key)}.')
logging.error('Ignoring invalid key type for %s: %s', self,
data)
continue
data_out[key] = self.d_value.filter_input(val, error=error)
return data_out
def get_default_data(self) -> dict:
return {}
def prune_data(self, data: Any) -> bool:
# Run pruning on all individual entries' data through our child field.
# However we don't *completely* prune values from the list since that
# would change it.
for subdata in data.values():
self.d_value.prune_fields_data(subdata)
# We can also optionally prune the whole list if empty and allowed.
return not data and not self._store_default
# ONLY overriding these in type-checker land to clarify types.
# (see note in BaseField)
if TYPE_CHECKING:
@overload
def __get__(self,
obj: None,
cls: Any = None) -> CompoundDictField[TK, TC]:
...
@overload
def __get__(self,
obj: Any,
cls: Any = None) -> BoundCompoundDictField[TK, TC]:
...
def __get__(self, obj: Any, cls: Any = None) -> Any:
...
# Note:
# When setting the dict, we tell the type-checker that we also accept
# a raw dict of CompoundValue objects, but at runtime we actually
# always deal with BoundCompoundValue objects (see note in
# BoundCompoundDictField for why we accept CompoundValue objs)
@overload
def __set__(self, obj: Any, value: Dict[TK, TC]) -> None:
...
@overload
def __set__(self, obj: Any, value: BoundCompoundDictField[TK,
TC]) -> None:
...
def __set__(self, obj: Any, value: Any) -> None:
...
def get_with_data(self, data: Any) -> Any:
assert self.d_key in data
return BoundCompoundDictField(self, data[self.d_key])
def set_with_data(self, data: Any, value: Any, error: bool) -> Any:
# If we were passed a BoundCompoundDictField,
# simply convert it to a flat dict of BoundCompoundValue objects which
# is what we work with natively here.
if isinstance(value, BoundCompoundDictField):
value = dict(value.items())
if not isinstance(value, dict):
raise TypeError('CompoundDictField expected dict value on set.')
# Allow assigning only from a sequence of our existing children.
# (could look into expanding this to other children if we can
# be sure the underlying data will line up; for example two
# CompoundListFields with different child_field values should not
# be inter-assignable.
if (not all(isinstance(i, BoundCompoundValue)
for i in value.values())):
raise ValueError('CompoundDictField assignment must be a '
'dict containing only BoundCompoundValues.')
# Make sure the data all has the same CompoundValue type and
# compare that type against ours once to make sure its fields match.
# (this will not allow passing CompoundValues from multiple sources
# but I don't know if that would ever come up..)
first_value: Any = None
for i, val in enumerate(value.values()):
if i == 0:
first_value = val.d_value
# Do the full field comparison on the first value only..
if not have_matching_fields(val.d_value, self.d_value):
raise ValueError(
'CompoundListField assignment must be a '
'list containing matching CompoundValues.')
else:
# For all remaining values, just ensure they match the first.
if val.d_value is not first_value:
raise ValueError(
'CompoundListField assignment cannot contain '
'multiple CompoundValue types as sources.')
data[self.d_key] = self.filter_input(
{key: val.d_data
for key, val in value.items()}, error=error)

View File

@ -1,468 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Various support classes for accessing data and info on fields and values."""
from __future__ import annotations
from typing import TYPE_CHECKING, TypeVar, Generic, overload
from efro.entity._base import (BaseField, dict_key_to_raw, dict_key_from_raw)
if TYPE_CHECKING:
from typing import (Optional, Tuple, Type, Any, Dict, List, Union)
from efro.entity._value import CompoundValue
from efro.entity._field import (ListField, DictField, CompoundListField,
CompoundDictField)
T = TypeVar('T')
TKey = TypeVar('TKey')
TCompound = TypeVar('TCompound', bound='CompoundValue')
TBoundList = TypeVar('TBoundList', bound='BoundCompoundListField')
class BoundCompoundValue:
"""Wraps a CompoundValue object and its entity data.
Allows access to its values through our own equivalent attributes.
"""
def __init__(self, value: CompoundValue, d_data: Union[List[Any],
Dict[str, Any]]):
self.d_value: CompoundValue
self.d_data: Union[List[Any], Dict[str, Any]]
# Need to use base setters to avoid triggering our own overrides.
object.__setattr__(self, 'd_value', value)
object.__setattr__(self, 'd_data', d_data)
def __eq__(self, other: Any) -> Any:
# Allow comparing to compound and bound-compound objects.
from efro.entity.util import compound_eq
return compound_eq(self, other)
def __getattr__(self, name: str, default: Any = None) -> Any:
# If this attribute corresponds to a field on our compound value's
# unbound type, ask it to give us a value using our data
d_value = type(object.__getattribute__(self, 'd_value'))
field = getattr(d_value, name, None)
if isinstance(field, BaseField):
return field.get_with_data(self.d_data)
raise AttributeError
def __setattr__(self, name: str, value: Any) -> None:
# Same deal as __getattr__ basically.
field = getattr(type(object.__getattribute__(self, 'd_value')), name,
None)
if isinstance(field, BaseField):
field.set_with_data(self.d_data, value, error=True)
return
super().__setattr__(name, value)
def reset(self) -> None:
"""Reset this field's data to defaults."""
value = object.__getattribute__(self, 'd_value')
data = object.__getattribute__(self, 'd_data')
assert isinstance(data, dict)
# Need to clear our dict in-place since we have no
# access to our parent which we'd need to assign an empty one.
data.clear()
# Now fill in default data.
value.apply_fields_to_data(data, error=True)
def __repr__(self) -> str:
fstrs: List[str] = []
for field in self.d_value.get_fields():
try:
fstrs.append(str(field) + '=' + repr(getattr(self, field)))
except Exception:
fstrs.append('FAIL' + str(field) + ' ' + str(type(self)))
return type(self.d_value).__name__ + '(' + ', '.join(fstrs) + ')'
class FieldInspector:
"""Used for inspecting fields."""
def __init__(self, root: Any, obj: Any, path: List[str],
dbpath: List[str]) -> None:
self._root = root
self._obj = obj
self._path = path
self._dbpath = dbpath
def __repr__(self) -> str:
path = '.'.join(self._path)
typename = type(self._root).__name__
if path == '':
return f'<FieldInspector: {typename}>'
return f'<FieldInspector: {typename}: {path}>'
def __getattr__(self, name: str, default: Any = None) -> Any:
# pylint: disable=cyclic-import
from efro.entity._field import CompoundField
# If this attribute corresponds to a field on our obj's
# unbound type, return a new inspector for it.
if isinstance(self._obj, CompoundField):
target = self._obj.d_value
else:
target = self._obj
field = getattr(type(target), name, None)
if isinstance(field, BaseField):
newpath = list(self._path)
newpath.append(name)
newdbpath = list(self._dbpath)
assert field.d_key is not None
newdbpath.append(field.d_key)
return FieldInspector(self._root, field, newpath, newdbpath)
raise AttributeError
def get_root(self) -> Any:
"""Return the root object this inspector is targeting."""
return self._root
def get_path(self) -> List[str]:
"""Return the python path components of this inspector."""
return self._path
def get_db_path(self) -> List[str]:
"""Return the database path components of this inspector."""
return self._dbpath
class BoundListField(Generic[T]):
"""ListField bound to data; used for accessing field values."""
def __init__(self, field: ListField[T], d_data: List[Any]):
self.d_field = field
assert isinstance(d_data, list)
self.d_data = d_data
self._i = 0
def __eq__(self, other: Any) -> Any:
# Just convert us into a regular list and run a compare with that.
flattened = [
self.d_field.d_value.filter_output(value) for value in self.d_data
]
return flattened == other
def __repr__(self) -> str:
return '[' + ', '.join(
repr(self.d_field.d_value.filter_output(i))
for i in self.d_data) + ']'
def __len__(self) -> int:
return len(self.d_data)
def __iter__(self) -> Any:
self._i = 0
return self
def append(self, val: T) -> None:
"""Append the provided value to the list."""
self.d_data.append(self.d_field.d_value.filter_input(val, error=True))
def __next__(self) -> T:
if self._i < len(self.d_data):
self._i += 1
val: T = self.d_field.d_value.filter_output(self.d_data[self._i -
1])
return val
raise StopIteration
@overload
def __getitem__(self, key: int) -> T:
...
@overload
def __getitem__(self, key: slice) -> List[T]:
...
def __getitem__(self, key: Any) -> Any:
if isinstance(key, slice):
dofilter = self.d_field.d_value.filter_output
return [
dofilter(self.d_data[i])
for i in range(*key.indices(len(self)))
]
assert isinstance(key, int)
return self.d_field.d_value.filter_output(self.d_data[key])
def __setitem__(self, key: int, value: T) -> None:
if not isinstance(key, int):
raise TypeError('Expected int index.')
self.d_data[key] = self.d_field.d_value.filter_input(value, error=True)
class BoundDictField(Generic[TKey, T]):
"""DictField bound to its data; used for accessing its values."""
def __init__(self, keytype: Type[TKey], field: DictField[TKey, T],
d_data: Dict[TKey, T]):
self._keytype = keytype
self.d_field = field
assert isinstance(d_data, dict)
self.d_data = d_data
def __eq__(self, other: Any) -> Any:
# Just convert us into a regular dict and run a compare with that.
flattened = {
key: self.d_field.d_value.filter_output(value)
for key, value in self.d_data.items()
}
return flattened == other
def __repr__(self) -> str:
return '{' + ', '.join(
repr(dict_key_from_raw(key, self._keytype)) + ': ' +
repr(self.d_field.d_value.filter_output(val))
for key, val in self.d_data.items()) + '}'
def __len__(self) -> int:
return len(self.d_data)
def __getitem__(self, key: TKey) -> T:
keyfilt = dict_key_to_raw(key, self._keytype)
typedval: T = self.d_field.d_value.filter_output(self.d_data[keyfilt])
return typedval
def get(self, key: TKey, default: Optional[T] = None) -> Optional[T]:
"""Get a value if present, or a default otherwise."""
keyfilt = dict_key_to_raw(key, self._keytype)
if keyfilt not in self.d_data:
return default
typedval: T = self.d_field.d_value.filter_output(self.d_data[keyfilt])
return typedval
def __setitem__(self, key: TKey, value: T) -> None:
keyfilt = dict_key_to_raw(key, self._keytype)
self.d_data[keyfilt] = self.d_field.d_value.filter_input(value,
error=True)
def __contains__(self, key: TKey) -> bool:
keyfilt = dict_key_to_raw(key, self._keytype)
return keyfilt in self.d_data
def __delitem__(self, key: TKey) -> None:
keyfilt = dict_key_to_raw(key, self._keytype)
del self.d_data[keyfilt]
def keys(self) -> List[TKey]:
"""Return a list of our keys."""
return [
dict_key_from_raw(k, self._keytype) for k in self.d_data.keys()
]
def values(self) -> List[T]:
"""Return a list of our values."""
return [
self.d_field.d_value.filter_output(value)
for value in self.d_data.values()
]
def items(self) -> List[Tuple[TKey, T]]:
"""Return a list of item/value pairs."""
return [(dict_key_from_raw(key, self._keytype),
self.d_field.d_value.filter_output(value))
for key, value in self.d_data.items()]
class BoundCompoundListField(Generic[TCompound]):
"""A CompoundListField bound to its entity sub-data."""
def __init__(self, field: CompoundListField[TCompound], d_data: List[Any]):
self.d_field = field
self.d_data = d_data
self._i = 0
def __eq__(self, other: Any) -> Any:
from efro.entity.util import have_matching_fields
# We can only be compared to other bound-compound-fields
if not isinstance(other, BoundCompoundListField):
return NotImplemented
# If our compound values have differing fields, we're unequal.
if not have_matching_fields(self.d_field.d_value,
other.d_field.d_value):
return False
# Ok our data schemas match; now just compare our data..
return self.d_data == other.d_data
def __len__(self) -> int:
return len(self.d_data)
def __repr__(self) -> str:
return '[' + ', '.join(
repr(BoundCompoundValue(self.d_field.d_value, i))
for i in self.d_data) + ']'
# Note: to the type checker our gets/sets simply deal with CompoundValue
# objects so the type-checker can cleanly handle their sub-fields.
# However at runtime we deal in BoundCompoundValue objects which use magic
# to tie the CompoundValue object to its data but which the type checker
# can't understand.
if TYPE_CHECKING:
@overload
def __getitem__(self, key: int) -> TCompound:
...
@overload
def __getitem__(self, key: slice) -> List[TCompound]:
...
def __getitem__(self, key: Any) -> Any:
...
def __next__(self) -> TCompound:
...
def append(self) -> TCompound:
"""Append and return a new field entry to the array."""
...
else:
def __getitem__(self, key: Any) -> Any:
if isinstance(key, slice):
return [
BoundCompoundValue(self.d_field.d_value, self.d_data[i])
for i in range(*key.indices(len(self)))
]
assert isinstance(key, int)
return BoundCompoundValue(self.d_field.d_value, self.d_data[key])
def __next__(self):
if self._i < len(self.d_data):
self._i += 1
return BoundCompoundValue(self.d_field.d_value,
self.d_data[self._i - 1])
raise StopIteration
def append(self) -> Any:
"""Append and return a new field entry to the array."""
# push the entity default into data and then let it fill in
# any children/etc.
self.d_data.append(
self.d_field.d_value.filter_input(
self.d_field.d_value.get_default_data(), error=True))
return BoundCompoundValue(self.d_field.d_value, self.d_data[-1])
def __iter__(self: TBoundList) -> TBoundList:
self._i = 0
return self
class BoundCompoundDictField(Generic[TKey, TCompound]):
"""A CompoundDictField bound to its entity sub-data."""
def __init__(self, field: CompoundDictField[TKey, TCompound],
d_data: Dict[Any, Any]):
self.d_field = field
self.d_data = d_data
def __eq__(self, other: Any) -> Any:
from efro.entity.util import have_matching_fields
# We can only be compared to other bound-compound-fields
if not isinstance(other, BoundCompoundDictField):
return NotImplemented
# If our compound values have differing fields, we're unequal.
if not have_matching_fields(self.d_field.d_value,
other.d_field.d_value):
return False
# Ok our data schemas match; now just compare our data..
return self.d_data == other.d_data
def __repr__(self) -> str:
return '{' + ', '.join(
repr(key) + ': ' +
repr(BoundCompoundValue(self.d_field.d_value, value))
for key, value in self.d_data.items()) + '}'
# In the typechecker's eyes, gets/sets on us simply deal in
# CompoundValue object. This allows type-checking to work nicely
# for its sub-fields.
# However in real-life we return BoundCompoundValues which use magic
# to tie the CompoundValue to its data (but which the typechecker
# would not be able to make sense of)
if TYPE_CHECKING:
def get(self, key: TKey) -> Optional[TCompound]:
"""Return a value if present; otherwise None."""
def __getitem__(self, key: TKey) -> TCompound:
...
def values(self) -> List[TCompound]:
"""Return a list of our values."""
def items(self) -> List[Tuple[TKey, TCompound]]:
"""Return key/value pairs for all dict entries."""
def add(self, key: TKey) -> TCompound:
"""Add an entry into the dict, returning it.
Any existing value is replaced."""
else:
def get(self, key):
"""return a value if present; otherwise None."""
keyfilt = dict_key_to_raw(key, self.d_field.d_keytype)
data = self.d_data.get(keyfilt)
if data is not None:
return BoundCompoundValue(self.d_field.d_value, data)
return None
def __getitem__(self, key):
keyfilt = dict_key_to_raw(key, self.d_field.d_keytype)
return BoundCompoundValue(self.d_field.d_value,
self.d_data[keyfilt])
def values(self):
"""Return a list of our values."""
return list(
BoundCompoundValue(self.d_field.d_value, i)
for i in self.d_data.values())
def items(self):
"""Return key/value pairs for all dict entries."""
return [(dict_key_from_raw(key, self.d_field.d_keytype),
BoundCompoundValue(self.d_field.d_value, value))
for key, value in self.d_data.items()]
def add(self, key: TKey) -> TCompound:
"""Add an entry into the dict, returning it.
Any existing value is replaced."""
keyfilt = dict_key_to_raw(key, self.d_field.d_keytype)
# Push the entity default into data and then let it fill in
# any children/etc.
self.d_data[keyfilt] = (self.d_field.d_value.filter_input(
self.d_field.d_value.get_default_data(), error=True))
return BoundCompoundValue(self.d_field.d_value,
self.d_data[keyfilt])
def __len__(self) -> int:
return len(self.d_data)
def __contains__(self, key: TKey) -> bool:
keyfilt = dict_key_to_raw(key, self.d_field.d_keytype)
return keyfilt in self.d_data
def __delitem__(self, key: TKey) -> None:
keyfilt = dict_key_to_raw(key, self.d_field.d_keytype)
del self.d_data[keyfilt]
def keys(self) -> List[TKey]:
"""Return a list of our keys."""
return [
dict_key_from_raw(k, self.d_field.d_keytype)
for k in self.d_data.keys()
]

View File

@ -1,537 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Value types for the entity system."""
from __future__ import annotations
import datetime
import inspect
import logging
from collections import abc
from enum import Enum
from typing import TYPE_CHECKING, TypeVar, Generic
# Our Pylint class_generics_filter gives us a false-positive unused-import.
from typing import Tuple, Optional # pylint: disable=W0611
from efro.entity._base import DataHandler, BaseField
from efro.entity.util import compound_eq
if TYPE_CHECKING:
from typing import Optional, Set, List, Dict, Any, Type
T = TypeVar('T')
TE = TypeVar('TE', bound=Enum)
_sanity_tested_types: Set[Type] = set()
_type_field_cache: Dict[Type, Dict[str, BaseField]] = {}
class TypedValue(DataHandler, Generic[T]):
"""Base class for all value types dealing with a single data type."""
class SimpleValue(TypedValue[T]):
"""Standard base class for simple single-value types.
This class provides enough functionality to handle most simple
types such as int/float/etc without too many subclass overrides.
"""
def __init__(self,
default: T,
store_default: bool,
target_type: Type = None,
convert_source_types: Tuple[Type, ...] = (),
allow_none: bool = False) -> None:
"""Init the value field.
If store_default is False, the field value will not be included
in final entity data if it is a default value. Be sure to set
this to True for any fields that will be used for server-side
queries so they are included in indexing.
target_type and convert_source_types are used in the default
filter_input implementation; if passed in data's type is present
in convert_source_types, a target_type will be instantiated
using it. (allows for simple conversions to bool, int, etc)
Data will also be allowed through untouched if it matches target_type.
(types needing further introspection should override filter_input).
Lastly, the value of allow_none is also used in filter_input for
whether values of None should be allowed.
"""
super().__init__()
self._store_default = store_default
self._target_type = target_type
self._convert_source_types = convert_source_types
self._allow_none = allow_none
# We store _default_data in our internal data format so need
# to run user-facing values through our input filter.
# Make sure we do this last since filter_input depends on above vals.
self._default_data: T = self.filter_input(default, error=True)
def __repr__(self) -> str:
if self._target_type is not None:
return f'<Value of type {self._target_type.__name__}>'
return '<Value of unknown type>'
def get_default_data(self) -> Any:
return self._default_data
def prune_data(self, data: Any) -> bool:
return not self._store_default and data == self._default_data
def filter_input(self, data: Any, error: bool) -> Any:
# Let data pass through untouched if its already our target type
if self._target_type is not None:
if isinstance(data, self._target_type):
return data
# ...and also if its None and we're into that sort of thing.
if self._allow_none and data is None:
return data
# If its one of our convertible types, convert.
if (self._convert_source_types
and isinstance(data, self._convert_source_types)):
assert self._target_type is not None
return self._target_type(data)
if error:
errmsg = (f'value of type {self._target_type} or None expected'
if self._allow_none else
f'value of type {self._target_type} expected')
errmsg += f'; got {type(data)}'
raise TypeError(errmsg)
errmsg = f'Ignoring incompatible data for {self};'
errmsg += (f' expected {self._target_type} or None;'
if self._allow_none else f'expected {self._target_type};')
errmsg += f' got {type(data)}'
logging.error(errmsg)
return self.get_default_data()
class StringValue(SimpleValue[str]):
"""Value consisting of a single string."""
def __init__(self, default: str = '', store_default: bool = True) -> None:
super().__init__(default, store_default, str)
class OptionalStringValue(SimpleValue[Optional[str]]):
"""Value consisting of a single string or None."""
def __init__(self,
default: Optional[str] = None,
store_default: bool = True) -> None:
super().__init__(default, store_default, str, allow_none=True)
class BoolValue(SimpleValue[bool]):
"""Value consisting of a single bool."""
def __init__(self,
default: bool = False,
store_default: bool = True) -> None:
super().__init__(default, store_default, bool, (int, float))
class OptionalBoolValue(SimpleValue[Optional[bool]]):
"""Value consisting of a single bool or None."""
def __init__(self,
default: Optional[bool] = None,
store_default: bool = True) -> None:
super().__init__(default,
store_default,
bool, (int, float),
allow_none=True)
def verify_time_input(data: Any, error: bool, allow_none: bool) -> Any:
"""Checks input data for time values."""
pytz_utc: Any
# We don't *require* pytz since it must be installed through pip
# but it is used by firestore client for its date values
# (in which case it should be installed as a dependency anyway).
try:
import pytz
pytz_utc = pytz.utc
except ModuleNotFoundError:
pytz_utc = None
# Filter unallowed None values.
if not allow_none and data is None:
if error:
raise ValueError('datetime value cannot be None')
logging.error('ignoring datetime value of None')
data = (None if allow_none else datetime.datetime.now(
datetime.timezone.utc))
# Parent filter_input does what we need, but let's just make
# sure we *only* accept datetime values that know they're UTC.
elif (isinstance(data, datetime.datetime)
and data.tzinfo is not datetime.timezone.utc
and (pytz_utc is None or data.tzinfo is not pytz_utc)):
if error:
raise ValueError(
'datetime values must have timezone set as timezone.utc')
logging.error(
'ignoring datetime value without timezone.utc set: %s %s',
type(datetime.timezone.utc), type(data.tzinfo))
data = (None if allow_none else datetime.datetime.now(
datetime.timezone.utc))
return data
class DateTimeValue(SimpleValue[datetime.datetime]):
"""Value consisting of a datetime.datetime object.
The default value for this is always the current time in UTC.
"""
def __init__(self, store_default: bool = True) -> None:
# Pass dummy datetime value as default just to satisfy constructor;
# we override get_default_data though so this doesn't get used.
dummy_default = datetime.datetime.now(datetime.timezone.utc)
super().__init__(dummy_default, store_default, datetime.datetime)
def get_default_data(self) -> Any:
# For this class we don't use a static default value;
# default is always now.
return datetime.datetime.now(datetime.timezone.utc)
def filter_input(self, data: Any, error: bool) -> Any:
data = verify_time_input(data, error, allow_none=False)
return super().filter_input(data, error)
class OptionalDateTimeValue(SimpleValue[Optional[datetime.datetime]]):
"""Value consisting of a datetime.datetime object or None."""
def __init__(self, store_default: bool = True) -> None:
super().__init__(None,
store_default,
datetime.datetime,
allow_none=True)
def filter_input(self, data: Any, error: bool) -> Any:
data = verify_time_input(data, error, allow_none=True)
return super().filter_input(data, error)
class IntValue(SimpleValue[int]):
"""Value consisting of a single int."""
def __init__(self, default: int = 0, store_default: bool = True) -> None:
super().__init__(default, store_default, int, (bool, float))
class OptionalIntValue(SimpleValue[Optional[int]]):
"""Value consisting of a single int or None"""
def __init__(self,
default: int = None,
store_default: bool = True) -> None:
super().__init__(default,
store_default,
int, (bool, float),
allow_none=True)
class FloatValue(SimpleValue[float]):
"""Value consisting of a single float."""
def __init__(self,
default: float = 0.0,
store_default: bool = True) -> None:
super().__init__(default, store_default, float, (bool, int))
class OptionalFloatValue(SimpleValue[Optional[float]]):
"""Value consisting of a single float or None."""
def __init__(self,
default: float = None,
store_default: bool = True) -> None:
super().__init__(default,
store_default,
float, (bool, int),
allow_none=True)
class Float3Value(SimpleValue[Tuple[float, float, float]]):
"""Value consisting of 3 floats."""
def __init__(self,
default: Tuple[float, float, float] = (0.0, 0.0, 0.0),
store_default: bool = True) -> None:
super().__init__(default, store_default)
def __repr__(self) -> str:
return '<Value of type float3>'
def filter_input(self, data: Any, error: bool) -> Any:
if (not isinstance(data, abc.Sequence) or len(data) != 3
or any(not isinstance(i, (int, float)) for i in data)):
if error:
raise TypeError('Sequence of 3 float values expected.')
logging.error('Ignoring non-3-float-sequence data for %s: %s',
self, data)
data = self.get_default_data()
# Actually store as list.
return [float(data[0]), float(data[1]), float(data[2])]
def filter_output(self, data: Any) -> Any:
"""Override."""
assert len(data) == 3
return tuple(data)
class BaseEnumValue(TypedValue[T]):
"""Value class for storing Python Enums.
Internally enums are stored as their corresponding int/str/etc. values.
"""
def __init__(self,
enumtype: Type[T],
default: Optional[T] = None,
store_default: bool = True,
allow_none: bool = False) -> None:
super().__init__()
assert issubclass(enumtype, Enum)
vals: List[T] = list(enumtype)
# Bit of sanity checking: make sure this enum has at least
# one value and that its underlying values are all of simple
# json-friendly types.
if not vals:
raise TypeError(f'enum {enumtype} has no values')
for val in vals:
assert isinstance(val, Enum)
if not isinstance(val.value, (int, bool, float, str)):
raise TypeError(f'enum value {val} has an invalid'
f' value type {type(val.value)}')
self._enumtype: Type[Enum] = enumtype
self._store_default: bool = store_default
self._allow_none: bool = allow_none
# We store default data is internal format so need to run
# user-provided value through input filter.
# Make sure to set this last since it could depend on other
# stuff we set here.
if default is None and not self._allow_none:
# Special case: we allow passing None as default even if
# we don't support None as a value; in that case we sub
# in the first enum value.
default = vals[0]
self._default_data: Enum = self.filter_input(default, error=True)
def get_default_data(self) -> Any:
return self._default_data
def prune_data(self, data: Any) -> bool:
return not self._store_default and data == self._default_data
def filter_input(self, data: Any, error: bool) -> Any:
# Allow passing in enum objects directly of course.
if isinstance(data, self._enumtype):
data = data.value
elif self._allow_none and data is None:
pass
else:
# At this point we assume its an enum value
try:
self._enumtype(data)
except ValueError:
if error:
raise ValueError(
f'Invalid value for {self._enumtype}: {data}'
) from None
logging.error('Ignoring invalid value for %s: %s',
self._enumtype, data)
data = self._default_data
return data
def filter_output(self, data: Any) -> Any:
if self._allow_none and data is None:
return None
return self._enumtype(data)
class EnumValue(BaseEnumValue[TE]):
"""Value class for storing Python Enums.
Internally enums are stored as their corresponding int/str/etc. values.
"""
def __init__(self,
enumtype: Type[TE],
default: TE = None,
store_default: bool = True) -> None:
super().__init__(enumtype, default, store_default, allow_none=False)
class OptionalEnumValue(BaseEnumValue[Optional[TE]]):
"""Value class for storing Python Enums (or None).
Internally enums are stored as their corresponding int/str/etc. values.
"""
def __init__(self,
enumtype: Type[TE],
default: TE = None,
store_default: bool = True) -> None:
super().__init__(enumtype, default, store_default, allow_none=True)
class CompoundValue(DataHandler):
"""A value containing one or more named child fields of its own.
Custom classes can be defined that inherit from this and include
any number of Field instances within themself.
"""
def __init__(self, store_default: bool = True) -> None:
super().__init__()
self._store_default = store_default
# Run sanity checks on this type if we haven't.
self.run_type_sanity_checks()
def __eq__(self, other: Any) -> Any:
# Allow comparing to compound and bound-compound objects.
return compound_eq(self, other)
def get_default_data(self) -> dict:
return {}
# NOTE: once we've got bound-compound-fields working in mypy
# we should get rid of this here.
# For now it needs to be here though since bound-compound fields
# come across as these in type-land.
def reset(self) -> None:
"""Resets data to default."""
raise ValueError('Unbound CompoundValue cannot be reset.')
def filter_input(self, data: Any, error: bool) -> dict:
if not isinstance(data, dict):
if error:
raise TypeError('dict value expected')
logging.error('Ignoring non-dict data for %s: %s', self, data)
data = {}
assert isinstance(data, dict)
self.apply_fields_to_data(data, error=error)
return data
def prune_data(self, data: Any) -> bool:
# Let all of our sub-fields prune themselves..
self.prune_fields_data(data)
# Now we can optionally prune ourself completely if there's
# nothing left in our data dict...
return not data and not self._store_default
def prune_fields_data(self, d_data: Dict[str, Any]) -> None:
"""Given a CompoundValue and data, prune any unnecessary data.
will include those set to default values with store_default False.
"""
# Allow all fields to take a pruning pass.
assert isinstance(d_data, dict)
for field in self.get_fields().values():
assert isinstance(field.d_key, str)
# This is supposed to be valid data so there should be *something*
# there for all fields.
if field.d_key not in d_data:
raise RuntimeError(f'expected to find {field.d_key} in data'
f' for {self}; got data {d_data}')
# Now ask the field if this data is necessary. If not, prune it.
if field.prune_data(d_data[field.d_key]):
del d_data[field.d_key]
def apply_fields_to_data(self, d_data: Dict[str, Any],
error: bool) -> None:
"""Apply all of our fields to target data.
If error is True, exceptions will be raised for invalid data;
otherwise it will be overwritten (with logging notices emitted).
"""
assert isinstance(d_data, dict)
for field in self.get_fields().values():
assert isinstance(field.d_key, str)
# First off, make sure *something* is there for this field.
if field.d_key not in d_data:
d_data[field.d_key] = field.get_default_data()
# Now let the field tweak the data as needed so its valid.
d_data[field.d_key] = field.filter_input(d_data[field.d_key],
error=error)
def __repr__(self) -> str:
if not hasattr(self, 'd_data'):
return f'<unbound {type(self).__name__} at {hex(id(self))}>'
fstrs: List[str] = []
assert isinstance(self, CompoundValue)
for field in self.get_fields():
fstrs.append(str(field) + '=' + repr(getattr(self, field)))
return type(self).__name__ + '(' + ', '.join(fstrs) + ')'
@classmethod
def get_fields(cls) -> Dict[str, BaseField]:
"""Return all field instances for this type."""
assert issubclass(cls, CompoundValue)
# If we haven't yet, calculate and cache a complete list of fields
# for this exact type.
if cls not in _type_field_cache:
fields: Dict[str, BaseField] = {}
for icls in inspect.getmro(cls):
for name, field in icls.__dict__.items():
if isinstance(field, BaseField):
fields[name] = field
_type_field_cache[cls] = fields
retval: Dict[str, BaseField] = _type_field_cache[cls]
assert isinstance(retval, dict)
return retval
@classmethod
def run_type_sanity_checks(cls) -> None:
"""Given a type, run one-time sanity checks on it.
These tests ensure child fields are using valid
non-repeating names/etc.
"""
if cls not in _sanity_tested_types:
_sanity_tested_types.add(cls)
# Make sure all embedded fields have a key set and there are no
# duplicates.
field_keys: Set[str] = set()
for field in cls.get_fields().values():
assert isinstance(field.d_key, str)
if field.d_key is None:
raise RuntimeError(f'Child field {field} under {cls}'
'has d_key None')
if field.d_key == '':
raise RuntimeError(f'Child field {field} under {cls}'
'has empty d_key')
# Allow alphanumeric and underscore only.
if not field.d_key.replace('_', '').isalnum():
raise RuntimeError(
f'Child field "{field.d_key}" under {cls}'
f' contains invalid characters; only alphanumeric'
f' and underscore allowed.')
if field.d_key in field_keys:
raise RuntimeError('Multiple child fields with key'
f' "{field.d_key}" found in {cls}')
field_keys.add(field.d_key)

View File

@ -1,131 +0,0 @@
# Released under the MIT License. See LICENSE for details.
#
"""Misc utility functionality related to the entity system."""
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any, Union, Tuple, List
from efro.entity._value import CompoundValue
from efro.entity._support import BoundCompoundValue
def diff_compound_values(
obj1: Union[BoundCompoundValue, CompoundValue],
obj2: Union[BoundCompoundValue, CompoundValue]) -> str:
"""Generate a string showing differences between two compound values.
Both must be associated with data and have the same set of fields.
"""
# Ensure fields match and both are attached to data...
value1, data1 = get_compound_value_and_data(obj1)
if data1 is None:
raise ValueError(f'Invalid unbound compound value: {obj1}')
value2, data2 = get_compound_value_and_data(obj2)
if data2 is None:
raise ValueError(f'Invalid unbound compound value: {obj2}')
if not have_matching_fields(value1, value2):
raise ValueError(
f"Can't diff objs with non-matching fields: {value1} and {value2}")
# Ok; let 'er rip...
diff = _diff(obj1, obj2, 2)
return ' <no differences>' if diff == '' else diff
class CompoundValueDiff:
"""Wraps diff_compound_values() in an object for efficiency.
It is preferable to pass this to logging calls instead of the
final diff string since the diff will never be generated if
the associated logging level is not being emitted.
"""
def __init__(self, obj1: Union[BoundCompoundValue, CompoundValue],
obj2: Union[BoundCompoundValue, CompoundValue]):
self._obj1 = obj1
self._obj2 = obj2
def __repr__(self) -> str:
return diff_compound_values(self._obj1, self._obj2)
def _diff(obj1: Union[BoundCompoundValue, CompoundValue],
obj2: Union[BoundCompoundValue, CompoundValue], indent: int) -> str:
from efro.entity._support import BoundCompoundValue
bits: List[str] = []
indentstr = ' ' * indent
vobj1, _data1 = get_compound_value_and_data(obj1)
fields = sorted(vobj1.get_fields().keys())
for field in fields:
val1 = getattr(obj1, field)
val2 = getattr(obj2, field)
# for nested compounds, dive in and do nice piecewise compares
if isinstance(val1, BoundCompoundValue):
assert isinstance(val2, BoundCompoundValue)
diff = _diff(val1, val2, indent + 2)
if diff != '':
bits.append(f'{indentstr}{field}:')
bits.append(diff)
# for all else just do a single line
# (perhaps we could improve on this for other complex types)
else:
if val1 != val2:
bits.append(f'{indentstr}{field}: {val1} -> {val2}')
return '\n'.join(bits)
def have_matching_fields(val1: CompoundValue, val2: CompoundValue) -> bool:
"""Return whether two compound-values have matching sets of fields.
Note this just refers to the field configuration; not data.
"""
# Quick-out: matching types will always have identical fields.
if type(val1) is type(val2):
return True
# Otherwise do a full comparison.
return val1.get_fields() == val2.get_fields()
def get_compound_value_and_data(
obj: Union[BoundCompoundValue,
CompoundValue]) -> Tuple[CompoundValue, Any]:
"""Return value and data for bound or unbound compound values."""
# pylint: disable=cyclic-import
from efro.entity._support import BoundCompoundValue
from efro.entity._value import CompoundValue
if isinstance(obj, BoundCompoundValue):
value = obj.d_value
data = obj.d_data
elif isinstance(obj, CompoundValue):
value = obj
data = getattr(obj, 'd_data', None) # may not exist
else:
raise TypeError(
f'Expected a BoundCompoundValue or CompoundValue; got {type(obj)}')
return value, data
def compound_eq(obj1: Union[BoundCompoundValue, CompoundValue],
obj2: Union[BoundCompoundValue, CompoundValue]) -> Any:
"""Compare two compound value/bound-value objects for equality."""
# Criteria for comparison: both need to be a compound value
# and both must have data (which implies they are either a entity
# or bound to a subfield in an entity).
value1, data1 = get_compound_value_and_data(obj1)
if data1 is None:
return NotImplemented
value2, data2 = get_compound_value_and_data(obj2)
if data2 is None:
return NotImplemented
# Ok we can compare them. To consider them equal we look for
# matching sets of fields and matching data. Note that there
# could be unbound data causing inequality despite their field
# values all matching; not sure if that's what we want.
return have_matching_fields(value1, value2) and data1 == data2