Skip to content

Commit

Permalink
Unfinished business 2
Browse files Browse the repository at this point in the history
  • Loading branch information
sveinugu committed Dec 6, 2023
1 parent 256ef45 commit 10f0740
Show file tree
Hide file tree
Showing 8 changed files with 219 additions and 32 deletions.
5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,11 @@ warn_no_return = false
#show_column_numbers = true
error_summary = true
verbosity = 0
#no_implicit_reexport = true

#plugins = [
# "pydantic.mypy"
#]

[[tool.mypy.overrides]]
module = "tests.*"
Expand Down
6 changes: 6 additions & 0 deletions src/omnipy/data/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@
# # orjson.dumps returns bytes, to match standard json.dumps we need to decode
# return orjson.dumps(v, default=default).decode()

# TODO: implement copy(), __copy__() and __deepcopy__() for Dataset and Model, making use of
# BaseModel.copy()


class Dataset(GenericModel, Generic[ModelT], UserDict):
"""
Expand Down Expand Up @@ -127,6 +130,9 @@ def __init__(self,
# Traceback (most recent call last):
# ...
# TypeError: 'ModelMetaclass' object does not support item assignment
#
# TODO: Allow e.g. Dataset[Model[int]](a=1, b=2) init
# TODO: Disallow e.g. Dataset[Model[str]](Model[int](5)) == Dataset[Model[str]](data=Model[int](5)) == Dataset[Model[str]](data={'__root__': Model[str]('5')})

if value != Undefined:
input_data[DATA_KEY] = value
Expand Down
105 changes: 97 additions & 8 deletions src/omnipy/data/model.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from collections.abc import Mapping, Sequence
import json
from types import UnionType
from typing import (Annotated,
Expand All @@ -9,6 +10,8 @@
get_origin,
List,
Optional,
overload,
SupportsIndex,
Type,
TypeVar,
Union)
Expand All @@ -21,12 +24,17 @@
from pydantic import root_validator
from pydantic.fields import ModelField, Undefined, UndefinedType
from pydantic.generics import GenericModel
from pydantic.main import ModelMetaclass
from pydantic.typing import display_as_type, is_none_type
from pydantic.utils import lenient_issubclass
from pydantic.utils import lenient_isinstance, lenient_issubclass

from omnipy.util.helpers import is_optional

_KeyT = TypeVar('_KeyT')
_ValT = TypeVar('_ValT')
_IdxT = TypeVar('_IdxT', bound=SupportsIndex)
RootT = TypeVar('RootT', covariant=True, bound=object)

ROOT_KEY = '__root__'

# def orjson_dumps(v, *, default):
Expand All @@ -42,7 +50,29 @@ def generate_qualname(cls_name: str, model: Any) -> str:
return f'{cls_name}[{fully_qual_model_name}]'


class Model(GenericModel, Generic[RootT]):
class MyModelMetaclass(ModelMetaclass):
# Hack to overcome bug in pydantic/fields.py (v1.10.13), lines 636-641:
#
# if origin is None or origin is CollectionsHashable:
# # field is not "typing" object eg. Union, Dict, List etc.
# # allow None for virtual superclasses of NoneType, e.g. Hashable
# if isinstance(self.type_, type) and isinstance(None, self.type_):
# self.allow_none = True
# return
#
# This hinders models (including pure pydantic BaseModels) to be properly considered as
# subfields, e.g. in `list[MyModel]` as `get_origin(MyModel) is None`. Here, we want allow_none
# to be set to True so that Model is allowed to validate a None value.
#
# TODO: Revisit the need for MyModelMetaclass hack in pydantic v2

def __instancecheck__(self, instance: Any) -> bool:
if instance is None:
return True
return super().__instancecheck__(instance)


class Model(GenericModel, Generic[RootT], metaclass=MyModelMetaclass):
"""
A data model containing a value parsed according to the model.
Expand Down Expand Up @@ -226,6 +256,7 @@ def __new__(cls, value: Union[Any, UndefinedType] = Undefined, **kwargs):
cls._raise_no_model_exception()
return super().__new__(cls)

# TODO: Allow e.g. Model[str](Model[int](5)) == Model[str](Model[int](5).contents). Should then work the same as dataset
def __init__(
self,
value: Union[Any, UndefinedType] = Undefined,
Expand Down Expand Up @@ -290,8 +321,8 @@ def _parse_root_object(cls, root_obj: Dict[str, RootT]) -> Any: # noqa
def _parse_none_value_with_root_type_if_model(cls, value):
root_field = cls.__fields__.get(ROOT_KEY)
root_type = root_field.type_
if value is None:
value = cls._parse_with_root_type_if_model(value, root_field, root_type)
# if value is None:
value = cls._parse_with_root_type_if_model(value, root_field, root_type)
return value

@classmethod
Expand All @@ -313,10 +344,25 @@ def _parse_with_root_type_if_model(cls, value: Any, root_field: ModelField,
else:
raise main_error

if lenient_issubclass(root_type, Model) \
and get_origin(root_type.__fields__[ROOT_KEY].outer_type_) not in [List, Dict, list, dict]: # Very much a hack
return root_type.parse_obj(value)
else:
if lenient_issubclass(root_type, Model):
if root_field.outer_type_ != root_type:
outer_type = get_origin(root_field.outer_type_)
if lenient_issubclass(outer_type, Sequence) and lenient_isinstance(value, Sequence):
return [
root_type.parse_obj(val)
if is_none_type(val) or not lenient_isinstance(val, Model) else val
for val in value
]
elif lenient_issubclass(outer_type, Mapping) and lenient_isinstance(value, Mapping):
return {
key:
root_type.parse_obj(val)
if is_none_type(val) or not lenient_isinstance(val, Model) else val
for (key, val) in value.items()
}
else:
return root_type.parse_obj(value)
if value is None:
none_default = root_field.default_factory() is None if root_field.default_factory \
else root_field.default is None
root_type_is_none = is_none_type(root_type)
Expand Down Expand Up @@ -411,3 +457,46 @@ def __eq__(self, other: object) -> bool:
and self.__class__ == other.__class__ \
and self.contents == other.contents \
and self.to_data() == other.to_data() # last is probably unnecessary, but just in case

@overload
def __getitem__(self: 'Model[Model[Mapping[_KeyT, _ValT]]]', item: _KeyT) -> _ValT:
...

@overload
def __getitem__(self: 'Model[Mapping[_KeyT, _ValT]]', item: _KeyT) -> _ValT:
...

@overload
def __getitem__(self: 'Model[Model[Sequence[_ValT]]]', item: slice) -> 'RootT':
...

@overload
def __getitem__(self: 'Model[Sequence[_ValT]]', item: slice) -> 'Model[RootT]':
...

@overload
def __getitem__(self: 'Model[Model[Sequence[_ValT]]]', item: int) -> _ValT:
...

@overload
def __getitem__(self: 'Model[Sequence[_ValT]]', item: int) -> _ValT:
...

def __getitem__(self, item):
contents = self.contents
if isinstance(contents, Model):
if isinstance(contents.contents, Sequence):
if isinstance(item, int):
return contents.contents[item]
if isinstance(item, slice):
return type(contents)(contents.contents[item])
if isinstance(contents.contents, Mapping):
return contents.contents[cast(_KeyT, item)]
else:
if isinstance(contents, Sequence):
if isinstance(item, int):
return contents[item]
if isinstance(item, slice):
return type(self)(contents[item])
if isinstance(contents, Mapping):
return contents[cast(_KeyT, item)]
6 changes: 6 additions & 0 deletions src/omnipy/hub/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,12 @@ def __post_init__(self):
self.reset_subscriptions()

def reset_subscriptions(self):
"""
Resets all subscriptions for the current instance.
This function unsubscribes all existing subscriptions and then sets up new subscriptions
for the `config` and `objects` members.
"""
self.config.unsubscribe_all()
self.objects.unsubscribe_all()

Expand Down
20 changes: 8 additions & 12 deletions src/omnipy/modules/json/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,7 @@

_JsonBaseT = TypeVar(
'_JsonBaseT',
bound=Optional[Union['_JsonScalarM',
'_JsonBaseListM',
'_JsonBaseDictM',
'_JsonListM',
'_JsonDictM']])
bound=Union['_JsonScalarM', '_JsonBaseListM', '_JsonBaseDictM', '_JsonListM', '_JsonDictM'])


class _JsonScalarM(Model[JsonScalar]):
Expand All @@ -34,22 +30,22 @@ class _JsonBaseDictM(Model[Dict[str, _JsonBaseT]], Generic[_JsonBaseT]):


# Optional is workaround for test_union_nested_model_classes_inner_forwardref_generic_list_of_none
class _JsonListM(_JsonBaseListM[Optional['_JsonAnyUnion']]):
class _JsonListM(_JsonBaseListM['_JsonAnyUnion']):
...


# Optional is workaround for test_union_nested_model_classes_inner_forwardref_generic_list_of_none
class _JsonDictM(_JsonBaseDictM[Optional['_JsonAnyUnion']]):
class _JsonDictM(_JsonBaseDictM['_JsonAnyUnion']):
...


# Optional is workaround for test_union_nested_model_classes_inner_forwardref_generic_list_of_none
class _JsonNoDictsM(_JsonBaseListM[Optional['_JsonNoDictsUnion']]):
class _JsonNoDictsM(_JsonBaseListM['_JsonNoDictsUnion']):
...


# Optional is workaround for test_union_nested_model_classes_inner_forwardref_generic_list_of_none
class _JsonNoListsM(_JsonBaseDictM[Optional['_JsonNoListsUnion']]):
class _JsonNoListsM(_JsonBaseDictM['_JsonNoListsUnion']):
...


Expand All @@ -63,10 +59,10 @@ class _JsonNoListsM(_JsonBaseDictM[Optional['_JsonNoListsUnion']]):
_JsonNoListsUnion: TypeAlias = Union[_JsonScalarM, _JsonNoListsM]

# Optional is workaround for test_union_nested_model_classes_inner_forwardref_generic_list_of_none
_JsonListOfScalarsM: TypeAlias = _JsonBaseListM[Optional[_JsonScalarM]]
_JsonListOfScalarsM: TypeAlias = _JsonBaseListM[_JsonScalarM]

# Optional is workaround for test_union_nested_model_classes_inner_forwardref_generic_list_of_none
_JsonDictOfScalarsM: TypeAlias = _JsonBaseDictM[Optional[_JsonScalarM]]
_JsonDictOfScalarsM: TypeAlias = _JsonBaseDictM[_JsonScalarM]

# Basic models needs to update their forward_refs with type aliases declared above

Expand Down Expand Up @@ -295,7 +291,7 @@ class JsonDictOfListsOfDictsModel(Model[_JsonBaseDictM[_JsonBaseListM[_JsonDictM

# Custom models

JsonCustomScalarModel: TypeAlias = Optional[_JsonScalarM]
JsonCustomScalarModel: TypeAlias = _JsonScalarM


class JsonCustomListModel(Model[_JsonBaseListM[_JsonBaseT]], Generic[_JsonBaseT]):
Expand Down
Loading

0 comments on commit 10f0740

Please sign in to comment.