2018-05-16 10:44:27 +08:00
|
|
|
import re
|
2017-12-05 05:58:55 +08:00
|
|
|
import sys
|
2018-03-25 05:20:26 +08:00
|
|
|
import copy
|
2017-12-05 05:58:55 +08:00
|
|
|
import types
|
|
|
|
import inspect
|
2018-05-16 23:31:29 +08:00
|
|
|
import keyword
|
2018-08-12 19:46:05 +08:00
|
|
|
import builtins
|
2018-10-20 00:54:50 +08:00
|
|
|
import functools
|
2022-03-20 05:01:17 +08:00
|
|
|
import itertools
|
2020-10-07 01:40:50 +08:00
|
|
|
import abc
|
2018-10-20 00:54:50 +08:00
|
|
|
import _thread
|
2020-10-21 21:49:22 +08:00
|
|
|
from types import FunctionType, GenericAlias
|
2018-10-20 00:54:50 +08:00
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
__all__ = ['dataclass',
|
|
|
|
'field',
|
2018-03-22 05:10:22 +08:00
|
|
|
'Field',
|
2017-12-05 05:58:55 +08:00
|
|
|
'FrozenInstanceError',
|
|
|
|
'InitVar',
|
2021-04-26 08:42:39 +08:00
|
|
|
'KW_ONLY',
|
2017-12-30 02:59:58 +08:00
|
|
|
'MISSING',
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# Helper functions.
|
|
|
|
'fields',
|
|
|
|
'asdict',
|
|
|
|
'astuple',
|
|
|
|
'make_dataclass',
|
|
|
|
'replace',
|
2018-01-07 01:41:53 +08:00
|
|
|
'is_dataclass',
|
2017-12-05 05:58:55 +08:00
|
|
|
]
|
|
|
|
|
2018-01-28 08:07:40 +08:00
|
|
|
# Conditions for adding methods. The boxes indicate what action the
|
2018-05-16 17:14:53 +08:00
|
|
|
# dataclass decorator takes. For all of these tables, when I talk
|
|
|
|
# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm
|
|
|
|
# referring to the arguments to the @dataclass decorator. When
|
|
|
|
# checking if a dunder method already exists, I mean check for an
|
|
|
|
# entry in the class's __dict__. I never check to see if an attribute
|
|
|
|
# is defined in a base class.
|
2018-01-28 08:07:40 +08:00
|
|
|
|
|
|
|
# Key:
|
|
|
|
# +=========+=========================================+
|
|
|
|
# + Value | Meaning |
|
|
|
|
# +=========+=========================================+
|
|
|
|
# | <blank> | No action: no method is added. |
|
|
|
|
# +---------+-----------------------------------------+
|
|
|
|
# | add | Generated method is added. |
|
|
|
|
# +---------+-----------------------------------------+
|
|
|
|
# | raise | TypeError is raised. |
|
|
|
|
# +---------+-----------------------------------------+
|
|
|
|
# | None | Attribute is set to None. |
|
|
|
|
# +=========+=========================================+
|
|
|
|
|
|
|
|
# __init__
|
|
|
|
#
|
|
|
|
# +--- init= parameter
|
|
|
|
# |
|
|
|
|
# v | | |
|
|
|
|
# | no | yes | <--- class has __init__ in __dict__?
|
|
|
|
# +=======+=======+=======+
|
|
|
|
# | False | | |
|
|
|
|
# +-------+-------+-------+
|
|
|
|
# | True | add | | <- the default
|
|
|
|
# +=======+=======+=======+
|
|
|
|
|
|
|
|
# __repr__
|
|
|
|
#
|
|
|
|
# +--- repr= parameter
|
|
|
|
# |
|
|
|
|
# v | | |
|
|
|
|
# | no | yes | <--- class has __repr__ in __dict__?
|
|
|
|
# +=======+=======+=======+
|
|
|
|
# | False | | |
|
|
|
|
# +-------+-------+-------+
|
|
|
|
# | True | add | | <- the default
|
|
|
|
# +=======+=======+=======+
|
|
|
|
|
|
|
|
|
|
|
|
# __setattr__
|
|
|
|
# __delattr__
|
|
|
|
#
|
|
|
|
# +--- frozen= parameter
|
|
|
|
# |
|
|
|
|
# v | | |
|
|
|
|
# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__?
|
|
|
|
# +=======+=======+=======+
|
|
|
|
# | False | | | <- the default
|
|
|
|
# +-------+-------+-------+
|
|
|
|
# | True | add | raise |
|
|
|
|
# +=======+=======+=======+
|
|
|
|
# Raise because not adding these methods would break the "frozen-ness"
|
2018-05-16 17:14:53 +08:00
|
|
|
# of the class.
|
2018-01-28 08:07:40 +08:00
|
|
|
|
|
|
|
# __eq__
|
|
|
|
#
|
|
|
|
# +--- eq= parameter
|
|
|
|
# |
|
|
|
|
# v | | |
|
|
|
|
# | no | yes | <--- class has __eq__ in __dict__?
|
|
|
|
# +=======+=======+=======+
|
|
|
|
# | False | | |
|
|
|
|
# +-------+-------+-------+
|
|
|
|
# | True | add | | <- the default
|
|
|
|
# +=======+=======+=======+
|
|
|
|
|
|
|
|
# __lt__
|
|
|
|
# __le__
|
|
|
|
# __gt__
|
|
|
|
# __ge__
|
|
|
|
#
|
|
|
|
# +--- order= parameter
|
|
|
|
# |
|
|
|
|
# v | | |
|
|
|
|
# | no | yes | <--- class has any comparison method in __dict__?
|
|
|
|
# +=======+=======+=======+
|
|
|
|
# | False | | | <- the default
|
|
|
|
# +-------+-------+-------+
|
|
|
|
# | True | add | raise |
|
|
|
|
# +=======+=======+=======+
|
|
|
|
# Raise because to allow this case would interfere with using
|
2018-05-16 17:14:53 +08:00
|
|
|
# functools.total_ordering.
|
2018-01-28 08:07:40 +08:00
|
|
|
|
|
|
|
# __hash__
|
|
|
|
|
2018-02-26 10:30:17 +08:00
|
|
|
# +------------------- unsafe_hash= parameter
|
|
|
|
# | +----------- eq= parameter
|
|
|
|
# | | +--- frozen= parameter
|
|
|
|
# | | |
|
|
|
|
# v v v | | |
|
|
|
|
# | no | yes | <--- class has explicitly defined __hash__
|
|
|
|
# +=======+=======+=======+========+========+
|
|
|
|
# | False | False | False | | | No __eq__, use the base class __hash__
|
|
|
|
# +-------+-------+-------+--------+--------+
|
|
|
|
# | False | False | True | | | No __eq__, use the base class __hash__
|
|
|
|
# +-------+-------+-------+--------+--------+
|
|
|
|
# | False | True | False | None | | <-- the default, not hashable
|
|
|
|
# +-------+-------+-------+--------+--------+
|
|
|
|
# | False | True | True | add | | Frozen, so hashable, allows override
|
|
|
|
# +-------+-------+-------+--------+--------+
|
|
|
|
# | True | False | False | add | raise | Has no __eq__, but hashable
|
|
|
|
# +-------+-------+-------+--------+--------+
|
|
|
|
# | True | False | True | add | raise | Has no __eq__, but hashable
|
|
|
|
# +-------+-------+-------+--------+--------+
|
|
|
|
# | True | True | False | add | raise | Not frozen, but hashable
|
|
|
|
# +-------+-------+-------+--------+--------+
|
|
|
|
# | True | True | True | add | raise | Frozen, so hashable
|
|
|
|
# +=======+=======+=======+========+========+
|
2018-01-28 08:07:40 +08:00
|
|
|
# For boxes that are blank, __hash__ is untouched and therefore
|
2018-05-16 17:14:53 +08:00
|
|
|
# inherited from the base class. If the base is object, then
|
|
|
|
# id-based hashing is used.
|
|
|
|
#
|
2018-03-25 05:20:26 +08:00
|
|
|
# Note that a class may already have __hash__=None if it specified an
|
2018-05-16 17:14:53 +08:00
|
|
|
# __eq__ method in the class body (not one that was created by
|
|
|
|
# @dataclass).
|
|
|
|
#
|
2018-02-26 10:30:17 +08:00
|
|
|
# See _hash_action (below) for a coded version of this table.
|
2018-01-28 08:07:40 +08:00
|
|
|
|
2021-02-27 06:51:55 +08:00
|
|
|
# __match_args__
|
|
|
|
#
|
2021-04-11 09:28:42 +08:00
|
|
|
# +--- match_args= parameter
|
|
|
|
# |
|
|
|
|
# v | | |
|
|
|
|
# | no | yes | <--- class has __match_args__ in __dict__?
|
|
|
|
# +=======+=======+=======+
|
|
|
|
# | False | | |
|
|
|
|
# +-------+-------+-------+
|
|
|
|
# | True | add | | <- the default
|
|
|
|
# +=======+=======+=======+
|
2021-04-26 08:42:39 +08:00
|
|
|
# __match_args__ is always added unless the class already defines it. It is a
|
|
|
|
# tuple of __init__ parameter names; non-init fields must be matched by keyword.
|
2021-02-27 06:51:55 +08:00
|
|
|
|
2018-01-28 08:07:40 +08:00
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
# Raised when an attempt is made to modify a frozen class.
|
|
|
|
class FrozenInstanceError(AttributeError): pass
|
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# A sentinel object for default values to signal that a default
|
|
|
|
# factory will be used. This is given a nice repr() which will appear
|
|
|
|
# in the function signature of dataclasses' constructors.
|
2017-12-05 05:58:55 +08:00
|
|
|
class _HAS_DEFAULT_FACTORY_CLASS:
|
|
|
|
def __repr__(self):
|
|
|
|
return '<factory>'
|
|
|
|
_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS()
|
|
|
|
|
2017-12-30 02:59:58 +08:00
|
|
|
# A sentinel object to detect if a parameter is supplied or not. Use
|
2018-05-16 17:14:53 +08:00
|
|
|
# a class to give it a better repr.
|
2017-12-30 02:59:58 +08:00
|
|
|
class _MISSING_TYPE:
|
|
|
|
pass
|
|
|
|
MISSING = _MISSING_TYPE()
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2021-04-26 08:42:39 +08:00
|
|
|
# A sentinel object to indicate that following fields are keyword-only by
|
|
|
|
# default. Use a class to give it a better repr.
|
|
|
|
class _KW_ONLY_TYPE:
|
|
|
|
pass
|
|
|
|
KW_ONLY = _KW_ONLY_TYPE()
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
# Since most per-field metadata will be unused, create an empty
|
2018-05-16 17:14:53 +08:00
|
|
|
# read-only proxy that can be shared among all fields.
|
2017-12-05 05:58:55 +08:00
|
|
|
_EMPTY_METADATA = types.MappingProxyType({})
|
|
|
|
|
|
|
|
# Markers for the various kinds of fields and pseudo-fields.
|
2018-05-15 20:36:21 +08:00
|
|
|
class _FIELD_BASE:
|
|
|
|
def __init__(self, name):
|
|
|
|
self.name = name
|
|
|
|
def __repr__(self):
|
|
|
|
return self.name
|
|
|
|
_FIELD = _FIELD_BASE('_FIELD')
|
|
|
|
_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR')
|
|
|
|
_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR')
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# The name of an attribute on the class where we store the Field
|
2018-05-16 17:14:53 +08:00
|
|
|
# objects. Also used to check if a class is a Data Class.
|
2018-03-19 08:40:34 +08:00
|
|
|
_FIELDS = '__dataclass_fields__'
|
|
|
|
|
|
|
|
# The name of an attribute on the class that stores the parameters to
|
|
|
|
# @dataclass.
|
|
|
|
_PARAMS = '__dataclass_params__'
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# The name of the function, that if it exists, is called at the end of
|
|
|
|
# __init__.
|
|
|
|
_POST_INIT_NAME = '__post_init__'
|
|
|
|
|
2018-05-16 10:44:27 +08:00
|
|
|
# String regex that string annotations for ClassVar or InitVar must match.
|
|
|
|
# Allows "identifier.identifier[" or "identifier[".
|
|
|
|
# https://bugs.python.org/issue33453 for details.
|
|
|
|
_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2019-09-22 18:32:41 +08:00
|
|
|
class InitVar:
|
2019-06-03 10:14:48 +08:00
|
|
|
__slots__ = ('type', )
|
|
|
|
|
|
|
|
def __init__(self, type):
|
|
|
|
self.type = type
|
|
|
|
|
|
|
|
def __repr__(self):
|
2021-12-06 04:41:58 +08:00
|
|
|
if isinstance(self.type, type) and not isinstance(self.type, GenericAlias):
|
2019-10-13 19:45:36 +08:00
|
|
|
type_name = self.type.__name__
|
|
|
|
else:
|
|
|
|
# typing objects, e.g. List[int]
|
|
|
|
type_name = repr(self.type)
|
|
|
|
return f'dataclasses.InitVar[{type_name}]'
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2019-09-22 18:32:41 +08:00
|
|
|
def __class_getitem__(cls, type):
|
|
|
|
return InitVar(type)
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
# Instances of Field are only ever created from within this module,
|
2018-05-16 17:14:53 +08:00
|
|
|
# and only from the field() function, although Field instances are
|
|
|
|
# exposed externally as (conceptually) read-only objects.
|
|
|
|
#
|
|
|
|
# name and type are filled in after the fact, not in __init__.
|
|
|
|
# They're not known at the time this class is instantiated, but it's
|
|
|
|
# convenient if they're available later.
|
|
|
|
#
|
2018-03-19 08:40:34 +08:00
|
|
|
# When cls._FIELDS is filled in with a list of Field objects, the name
|
2018-05-16 17:14:53 +08:00
|
|
|
# and type fields will have been populated.
|
2017-12-05 05:58:55 +08:00
|
|
|
class Field:
|
|
|
|
__slots__ = ('name',
|
|
|
|
'type',
|
|
|
|
'default',
|
|
|
|
'default_factory',
|
|
|
|
'repr',
|
|
|
|
'hash',
|
|
|
|
'init',
|
|
|
|
'compare',
|
|
|
|
'metadata',
|
2021-04-26 08:42:39 +08:00
|
|
|
'kw_only',
|
2017-12-05 05:58:55 +08:00
|
|
|
'_field_type', # Private: not to be used by user code.
|
|
|
|
)
|
|
|
|
|
|
|
|
def __init__(self, default, default_factory, init, repr, hash, compare,
|
2021-04-26 08:42:39 +08:00
|
|
|
metadata, kw_only):
|
2017-12-05 05:58:55 +08:00
|
|
|
self.name = None
|
|
|
|
self.type = None
|
|
|
|
self.default = default
|
|
|
|
self.default_factory = default_factory
|
|
|
|
self.init = init
|
|
|
|
self.repr = repr
|
|
|
|
self.hash = hash
|
|
|
|
self.compare = compare
|
|
|
|
self.metadata = (_EMPTY_METADATA
|
2019-02-12 19:50:49 +08:00
|
|
|
if metadata is None else
|
2017-12-05 05:58:55 +08:00
|
|
|
types.MappingProxyType(metadata))
|
2021-04-26 08:42:39 +08:00
|
|
|
self.kw_only = kw_only
|
2017-12-05 05:58:55 +08:00
|
|
|
self._field_type = None
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return ('Field('
|
|
|
|
f'name={self.name!r},'
|
2018-05-14 23:37:28 +08:00
|
|
|
f'type={self.type!r},'
|
|
|
|
f'default={self.default!r},'
|
|
|
|
f'default_factory={self.default_factory!r},'
|
|
|
|
f'init={self.init!r},'
|
|
|
|
f'repr={self.repr!r},'
|
|
|
|
f'hash={self.hash!r},'
|
|
|
|
f'compare={self.compare!r},'
|
2018-05-15 20:36:21 +08:00
|
|
|
f'metadata={self.metadata!r},'
|
2021-04-26 08:42:39 +08:00
|
|
|
f'kw_only={self.kw_only!r},'
|
2018-05-15 20:36:21 +08:00
|
|
|
f'_field_type={self._field_type}'
|
2017-12-05 05:58:55 +08:00
|
|
|
')')
|
|
|
|
|
2018-03-27 01:29:16 +08:00
|
|
|
# This is used to support the PEP 487 __set_name__ protocol in the
|
2018-05-16 17:14:53 +08:00
|
|
|
# case where we're using a field that contains a descriptor as a
|
2018-07-06 07:09:13 +08:00
|
|
|
# default value. For details on __set_name__, see
|
2022-03-30 19:00:27 +08:00
|
|
|
# https://peps.python.org/pep-0487/#implementation-details.
|
2018-05-16 17:14:53 +08:00
|
|
|
#
|
|
|
|
# Note that in _process_class, this Field object is overwritten
|
|
|
|
# with the default value, so the end result is a descriptor that
|
|
|
|
# had __set_name__ called on it at the right time.
|
2018-03-27 01:29:16 +08:00
|
|
|
def __set_name__(self, owner, name):
|
2018-03-29 23:07:48 +08:00
|
|
|
func = getattr(type(self.default), '__set_name__', None)
|
2018-03-27 01:29:16 +08:00
|
|
|
if func:
|
2018-05-16 17:14:53 +08:00
|
|
|
# There is a __set_name__ method on the descriptor, call
|
|
|
|
# it.
|
2018-03-29 23:07:48 +08:00
|
|
|
func(self.default, owner, name)
|
2018-03-27 01:29:16 +08:00
|
|
|
|
2020-04-15 07:14:15 +08:00
|
|
|
__class_getitem__ = classmethod(GenericAlias)
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-03-19 08:40:34 +08:00
|
|
|
class _DataclassParams:
|
|
|
|
__slots__ = ('init',
|
|
|
|
'repr',
|
|
|
|
'eq',
|
|
|
|
'order',
|
|
|
|
'unsafe_hash',
|
|
|
|
'frozen',
|
|
|
|
)
|
2018-03-25 05:20:26 +08:00
|
|
|
|
2018-03-19 08:40:34 +08:00
|
|
|
def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
|
|
|
|
self.init = init
|
|
|
|
self.repr = repr
|
|
|
|
self.eq = eq
|
|
|
|
self.order = order
|
|
|
|
self.unsafe_hash = unsafe_hash
|
|
|
|
self.frozen = frozen
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return ('_DataclassParams('
|
2018-05-15 05:16:52 +08:00
|
|
|
f'init={self.init!r},'
|
|
|
|
f'repr={self.repr!r},'
|
|
|
|
f'eq={self.eq!r},'
|
|
|
|
f'order={self.order!r},'
|
|
|
|
f'unsafe_hash={self.unsafe_hash!r},'
|
|
|
|
f'frozen={self.frozen!r}'
|
2018-03-19 08:40:34 +08:00
|
|
|
')')
|
|
|
|
|
2018-03-25 05:20:26 +08:00
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
# This function is used instead of exposing Field creation directly,
|
2018-05-16 17:14:53 +08:00
|
|
|
# so that a type checker can be told (via overloads) that this is a
|
|
|
|
# function whose type depends on its parameters.
|
2017-12-30 02:59:58 +08:00
|
|
|
def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True,
|
2021-04-26 08:42:39 +08:00
|
|
|
hash=None, compare=True, metadata=None, kw_only=MISSING):
|
2017-12-05 05:58:55 +08:00
|
|
|
"""Return an object to identify dataclass fields.
|
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
default is the default value of the field. default_factory is a
|
|
|
|
0-argument function called to initialize a field's value. If init
|
2021-04-26 08:42:39 +08:00
|
|
|
is true, the field will be a parameter to the class's __init__()
|
|
|
|
function. If repr is true, the field will be included in the
|
|
|
|
object's repr(). If hash is true, the field will be included in the
|
|
|
|
object's hash(). If compare is true, the field will be used in
|
|
|
|
comparison functions. metadata, if specified, must be a mapping
|
|
|
|
which is stored but not otherwise examined by dataclass. If kw_only
|
|
|
|
is true, the field will become a keyword-only parameter to
|
|
|
|
__init__().
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
It is an error to specify both default and default_factory.
|
|
|
|
"""
|
|
|
|
|
2017-12-30 02:59:58 +08:00
|
|
|
if default is not MISSING and default_factory is not MISSING:
|
2017-12-05 05:58:55 +08:00
|
|
|
raise ValueError('cannot specify both default and default_factory')
|
|
|
|
return Field(default, default_factory, init, repr, hash, compare,
|
2021-04-26 08:42:39 +08:00
|
|
|
metadata, kw_only)
|
|
|
|
|
|
|
|
|
|
|
|
def _fields_in_init_order(fields):
|
|
|
|
# Returns the fields as __init__ will output them. It returns 2 tuples:
|
|
|
|
# the first for normal args, and the second for keyword args.
|
|
|
|
|
|
|
|
return (tuple(f for f in fields if f.init and not f.kw_only),
|
|
|
|
tuple(f for f in fields if f.init and f.kw_only)
|
|
|
|
)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
|
|
|
def _tuple_str(obj_name, fields):
|
|
|
|
# Return a string representing each field of obj_name as a tuple
|
2018-05-16 17:14:53 +08:00
|
|
|
# member. So, if fields is ['x', 'y'] and obj_name is "self",
|
|
|
|
# return "(self.x,self.y)".
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# Special case for the 0-tuple.
|
2018-01-28 08:07:40 +08:00
|
|
|
if not fields:
|
2017-12-05 05:58:55 +08:00
|
|
|
return '()'
|
|
|
|
# Note the trailing comma, needed if this turns out to be a 1-tuple.
|
|
|
|
return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'
|
|
|
|
|
|
|
|
|
2018-10-20 00:54:50 +08:00
|
|
|
# This function's logic is copied from "recursive_repr" function in
|
|
|
|
# reprlib module to avoid dependency.
|
|
|
|
def _recursive_repr(user_function):
|
|
|
|
# Decorator to make a repr function return "..." for a recursive
|
|
|
|
# call.
|
|
|
|
repr_running = set()
|
|
|
|
|
|
|
|
@functools.wraps(user_function)
|
|
|
|
def wrapper(self):
|
|
|
|
key = id(self), _thread.get_ident()
|
|
|
|
if key in repr_running:
|
|
|
|
return '...'
|
|
|
|
repr_running.add(key)
|
|
|
|
try:
|
|
|
|
result = user_function(self)
|
|
|
|
finally:
|
|
|
|
repr_running.discard(key)
|
|
|
|
return result
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2018-01-28 08:07:40 +08:00
|
|
|
def _create_fn(name, args, body, *, globals=None, locals=None,
|
2017-12-30 02:59:58 +08:00
|
|
|
return_type=MISSING):
|
2018-05-16 17:14:53 +08:00
|
|
|
# Note that we mutate locals when exec() is called. Caller
|
|
|
|
# beware! The only callers are internal to this module, so no
|
|
|
|
# worries about external callers.
|
2017-12-05 05:58:55 +08:00
|
|
|
if locals is None:
|
|
|
|
locals = {}
|
2019-12-09 22:54:20 +08:00
|
|
|
if 'BUILTINS' not in locals:
|
|
|
|
locals['BUILTINS'] = builtins
|
2017-12-05 05:58:55 +08:00
|
|
|
return_annotation = ''
|
2017-12-30 02:59:58 +08:00
|
|
|
if return_type is not MISSING:
|
2017-12-05 05:58:55 +08:00
|
|
|
locals['_return_type'] = return_type
|
|
|
|
return_annotation = '->_return_type'
|
|
|
|
args = ','.join(args)
|
2019-12-09 22:54:20 +08:00
|
|
|
body = '\n'.join(f' {b}' for b in body)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-03-19 08:40:34 +08:00
|
|
|
# Compute the text of the entire function.
|
2019-12-09 22:54:20 +08:00
|
|
|
txt = f' def {name}({args}){return_annotation}:\n{body}'
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
local_vars = ', '.join(locals.keys())
|
|
|
|
txt = f"def __create_fn__({local_vars}):\n{txt}\n return {name}"
|
|
|
|
ns = {}
|
|
|
|
exec(txt, globals, ns)
|
2021-04-21 19:41:19 +08:00
|
|
|
return ns['__create_fn__'](**locals)
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
def _field_assign(frozen, name, value, self_name):
|
|
|
|
# If we're a frozen class, then assign to our fields in __init__
|
2018-05-16 17:14:53 +08:00
|
|
|
# via object.__setattr__. Otherwise, just use a simple
|
|
|
|
# assignment.
|
|
|
|
#
|
2017-12-05 05:58:55 +08:00
|
|
|
# self_name is what "self" is called in this function: don't
|
2018-05-16 17:14:53 +08:00
|
|
|
# hard-code "self", since that might be a field name.
|
2017-12-05 05:58:55 +08:00
|
|
|
if frozen:
|
2019-12-09 22:54:20 +08:00
|
|
|
return f'BUILTINS.object.__setattr__({self_name},{name!r},{value})'
|
2017-12-05 05:58:55 +08:00
|
|
|
return f'{self_name}.{name}={value}'
|
|
|
|
|
|
|
|
|
2021-11-22 21:26:12 +08:00
|
|
|
def _field_init(f, frozen, globals, self_name, slots):
|
2017-12-05 05:58:55 +08:00
|
|
|
# Return the text of the line in the body of __init__ that will
|
2018-05-16 17:14:53 +08:00
|
|
|
# initialize this field.
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
default_name = f'_dflt_{f.name}'
|
2017-12-30 02:59:58 +08:00
|
|
|
if f.default_factory is not MISSING:
|
2017-12-05 05:58:55 +08:00
|
|
|
if f.init:
|
|
|
|
# This field has a default factory. If a parameter is
|
2018-05-16 17:14:53 +08:00
|
|
|
# given, use it. If not, call the factory.
|
2017-12-05 05:58:55 +08:00
|
|
|
globals[default_name] = f.default_factory
|
|
|
|
value = (f'{default_name}() '
|
|
|
|
f'if {f.name} is _HAS_DEFAULT_FACTORY '
|
|
|
|
f'else {f.name}')
|
|
|
|
else:
|
|
|
|
# This is a field that's not in the __init__ params, but
|
2018-05-16 17:14:53 +08:00
|
|
|
# has a default factory function. It needs to be
|
|
|
|
# initialized here by calling the factory function,
|
|
|
|
# because there's no other way to initialize it.
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# For a field initialized with a default=defaultvalue, the
|
2018-05-16 17:14:53 +08:00
|
|
|
# class dict just has the default value
|
|
|
|
# (cls.fieldname=defaultvalue). But that won't work for a
|
|
|
|
# default factory, the factory must be called in __init__
|
|
|
|
# and we must assign that to self.fieldname. We can't
|
|
|
|
# fall back to the class dict's value, both because it's
|
|
|
|
# not set, and because it might be different per-class
|
|
|
|
# (which, after all, is why we have a factory function!).
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
globals[default_name] = f.default_factory
|
|
|
|
value = f'{default_name}()'
|
|
|
|
else:
|
|
|
|
# No default factory.
|
|
|
|
if f.init:
|
2017-12-30 02:59:58 +08:00
|
|
|
if f.default is MISSING:
|
2017-12-05 05:58:55 +08:00
|
|
|
# There's no default, just do an assignment.
|
|
|
|
value = f.name
|
2017-12-30 02:59:58 +08:00
|
|
|
elif f.default is not MISSING:
|
2017-12-05 05:58:55 +08:00
|
|
|
globals[default_name] = f.default
|
|
|
|
value = f.name
|
|
|
|
else:
|
2021-11-22 21:26:12 +08:00
|
|
|
# If the class has slots, then initialize this field.
|
|
|
|
if slots and f.default is not MISSING:
|
|
|
|
globals[default_name] = f.default
|
|
|
|
value = default_name
|
|
|
|
else:
|
|
|
|
# This field does not need initialization: reading from it will
|
|
|
|
# just use the class attribute that contains the default.
|
|
|
|
# Signify that to the caller by returning None.
|
|
|
|
return None
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# Only test this now, so that we can create variables for the
|
2018-05-16 17:14:53 +08:00
|
|
|
# default. However, return None to signify that we're not going
|
|
|
|
# to actually do the assignment statement for InitVars.
|
2018-06-08 02:43:59 +08:00
|
|
|
if f._field_type is _FIELD_INITVAR:
|
2017-12-05 05:58:55 +08:00
|
|
|
return None
|
|
|
|
|
|
|
|
# Now, actually generate the field assignment.
|
|
|
|
return _field_assign(frozen, f.name, value, self_name)
|
|
|
|
|
|
|
|
|
|
|
|
def _init_param(f):
|
2018-05-16 17:14:53 +08:00
|
|
|
# Return the __init__ parameter string for this field. For
|
|
|
|
# example, the equivalent of 'x:int=3' (except instead of 'int',
|
|
|
|
# reference a variable set to int, and instead of '3', reference a
|
|
|
|
# variable set to 3).
|
2017-12-30 02:59:58 +08:00
|
|
|
if f.default is MISSING and f.default_factory is MISSING:
|
2018-05-16 17:14:53 +08:00
|
|
|
# There's no default, and no default_factory, just output the
|
|
|
|
# variable name and type.
|
2017-12-05 05:58:55 +08:00
|
|
|
default = ''
|
2017-12-30 02:59:58 +08:00
|
|
|
elif f.default is not MISSING:
|
2018-05-16 17:14:53 +08:00
|
|
|
# There's a default, this will be the name that's used to look
|
|
|
|
# it up.
|
2017-12-05 05:58:55 +08:00
|
|
|
default = f'=_dflt_{f.name}'
|
2017-12-30 02:59:58 +08:00
|
|
|
elif f.default_factory is not MISSING:
|
2018-05-16 17:14:53 +08:00
|
|
|
# There's a factory function. Set a marker.
|
2017-12-05 05:58:55 +08:00
|
|
|
default = '=_HAS_DEFAULT_FACTORY'
|
|
|
|
return f'{f.name}:_type_{f.name}{default}'
|
|
|
|
|
|
|
|
|
2021-04-26 08:42:39 +08:00
|
|
|
def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init,
|
2021-11-22 21:26:12 +08:00
|
|
|
self_name, globals, slots):
|
2017-12-05 05:58:55 +08:00
|
|
|
# fields contains both real fields and InitVar pseudo-fields.
|
|
|
|
|
|
|
|
# Make sure we don't have fields without defaults following fields
|
2018-05-16 17:14:53 +08:00
|
|
|
# with defaults. This actually would be caught when exec-ing the
|
|
|
|
# function source code, but catching it here gives a better error
|
|
|
|
# message, and future-proofs us in case we build up the function
|
|
|
|
# using ast.
|
2021-04-26 08:42:39 +08:00
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
seen_default = False
|
2021-04-26 08:42:39 +08:00
|
|
|
for f in std_fields:
|
|
|
|
# Only consider the non-kw-only fields in the __init__ call.
|
2017-12-05 05:58:55 +08:00
|
|
|
if f.init:
|
2017-12-30 02:59:58 +08:00
|
|
|
if not (f.default is MISSING and f.default_factory is MISSING):
|
2017-12-05 05:58:55 +08:00
|
|
|
seen_default = True
|
|
|
|
elif seen_default:
|
|
|
|
raise TypeError(f'non-default argument {f.name!r} '
|
|
|
|
'follows default argument')
|
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
locals = {f'_type_{f.name}': f.type for f in fields}
|
|
|
|
locals.update({
|
|
|
|
'MISSING': MISSING,
|
|
|
|
'_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY,
|
|
|
|
})
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
body_lines = []
|
|
|
|
for f in fields:
|
2021-11-22 21:26:12 +08:00
|
|
|
line = _field_init(f, frozen, locals, self_name, slots)
|
2018-03-25 05:20:26 +08:00
|
|
|
# line is None means that this field doesn't require
|
2018-05-16 17:14:53 +08:00
|
|
|
# initialization (it's a pseudo-field). Just skip it.
|
2018-03-25 05:20:26 +08:00
|
|
|
if line:
|
2017-12-05 05:58:55 +08:00
|
|
|
body_lines.append(line)
|
|
|
|
|
|
|
|
# Does this class have a post-init function?
|
|
|
|
if has_post_init:
|
|
|
|
params_str = ','.join(f.name for f in fields
|
|
|
|
if f._field_type is _FIELD_INITVAR)
|
2018-03-25 05:20:26 +08:00
|
|
|
body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})')
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# If no body lines, use 'pass'.
|
2018-01-28 08:07:40 +08:00
|
|
|
if not body_lines:
|
2017-12-05 05:58:55 +08:00
|
|
|
body_lines = ['pass']
|
|
|
|
|
2021-04-26 08:42:39 +08:00
|
|
|
_init_params = [_init_param(f) for f in std_fields]
|
|
|
|
if kw_only_fields:
|
|
|
|
# Add the keyword-only args. Because the * can only be added if
|
|
|
|
# there's at least one keyword-only arg, there needs to be a test here
|
|
|
|
# (instead of just concatenting the lists together).
|
|
|
|
_init_params += ['*']
|
|
|
|
_init_params += [_init_param(f) for f in kw_only_fields]
|
2017-12-05 05:58:55 +08:00
|
|
|
return _create_fn('__init__',
|
2021-04-26 08:42:39 +08:00
|
|
|
[self_name] + _init_params,
|
2017-12-05 05:58:55 +08:00
|
|
|
body_lines,
|
|
|
|
locals=locals,
|
|
|
|
globals=globals,
|
|
|
|
return_type=None)
|
|
|
|
|
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
def _repr_fn(fields, globals):
|
2018-10-20 00:54:50 +08:00
|
|
|
fn = _create_fn('__repr__',
|
|
|
|
('self',),
|
|
|
|
['return self.__class__.__qualname__ + f"(' +
|
|
|
|
', '.join([f"{f.name}={{self.{f.name}!r}}"
|
|
|
|
for f in fields]) +
|
2019-12-09 22:54:20 +08:00
|
|
|
')"'],
|
|
|
|
globals=globals)
|
2018-10-20 00:54:50 +08:00
|
|
|
return _recursive_repr(fn)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
def _frozen_get_del_attr(cls, fields, globals):
|
|
|
|
locals = {'cls': cls,
|
2018-03-19 08:40:34 +08:00
|
|
|
'FrozenInstanceError': FrozenInstanceError}
|
|
|
|
if fields:
|
|
|
|
fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)'
|
|
|
|
else:
|
|
|
|
# Special case for the zero-length tuple.
|
|
|
|
fields_str = '()'
|
|
|
|
return (_create_fn('__setattr__',
|
|
|
|
('self', 'name', 'value'),
|
|
|
|
(f'if type(self) is cls or name in {fields_str}:',
|
|
|
|
' raise FrozenInstanceError(f"cannot assign to field {name!r}")',
|
|
|
|
f'super(cls, self).__setattr__(name, value)'),
|
2019-12-09 22:54:20 +08:00
|
|
|
locals=locals,
|
2018-03-19 08:40:34 +08:00
|
|
|
globals=globals),
|
|
|
|
_create_fn('__delattr__',
|
|
|
|
('self', 'name'),
|
|
|
|
(f'if type(self) is cls or name in {fields_str}:',
|
|
|
|
' raise FrozenInstanceError(f"cannot delete field {name!r}")',
|
|
|
|
f'super(cls, self).__delattr__(name)'),
|
2019-12-09 22:54:20 +08:00
|
|
|
locals=locals,
|
2018-03-19 08:40:34 +08:00
|
|
|
globals=globals),
|
|
|
|
)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
def _cmp_fn(name, op, self_tuple, other_tuple, globals):
|
2017-12-05 05:58:55 +08:00
|
|
|
# Create a comparison function. If the fields in the object are
|
2018-05-16 17:14:53 +08:00
|
|
|
# named 'x' and 'y', then self_tuple is the string
|
|
|
|
# '(self.x,self.y)' and other_tuple is the string
|
|
|
|
# '(other.x,other.y)'.
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
return _create_fn(name,
|
2018-03-25 05:20:26 +08:00
|
|
|
('self', 'other'),
|
2017-12-05 05:58:55 +08:00
|
|
|
[ 'if other.__class__ is self.__class__:',
|
|
|
|
f' return {self_tuple}{op}{other_tuple}',
|
2019-12-09 22:54:20 +08:00
|
|
|
'return NotImplemented'],
|
|
|
|
globals=globals)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
def _hash_fn(fields, globals):
|
2017-12-05 05:58:55 +08:00
|
|
|
self_tuple = _tuple_str('self', fields)
|
|
|
|
return _create_fn('__hash__',
|
2018-03-25 05:20:26 +08:00
|
|
|
('self',),
|
2019-12-09 22:54:20 +08:00
|
|
|
[f'return hash({self_tuple})'],
|
|
|
|
globals=globals)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
2018-05-16 10:44:27 +08:00
|
|
|
def _is_classvar(a_type, typing):
|
2018-05-16 19:24:00 +08:00
|
|
|
# This test uses a typing internal class, but it's the best way to
|
|
|
|
# test if this is a ClassVar.
|
|
|
|
return (a_type is typing.ClassVar
|
|
|
|
or (type(a_type) is typing._GenericAlias
|
|
|
|
and a_type.__origin__ is typing.ClassVar))
|
2018-05-16 10:44:27 +08:00
|
|
|
|
|
|
|
|
|
|
|
def _is_initvar(a_type, dataclasses):
|
|
|
|
# The module we're checking against is the module we're
|
|
|
|
# currently in (dataclasses.py).
|
2019-06-03 10:14:48 +08:00
|
|
|
return (a_type is dataclasses.InitVar
|
|
|
|
or type(a_type) is dataclasses.InitVar)
|
2018-05-16 10:44:27 +08:00
|
|
|
|
2021-04-26 08:42:39 +08:00
|
|
|
def _is_kw_only(a_type, dataclasses):
|
|
|
|
return a_type is dataclasses.KW_ONLY
|
|
|
|
|
2018-05-16 10:44:27 +08:00
|
|
|
|
|
|
|
def _is_type(annotation, cls, a_module, a_type, is_type_predicate):
|
|
|
|
# Given a type annotation string, does it refer to a_type in
|
|
|
|
# a_module? For example, when checking that annotation denotes a
|
|
|
|
# ClassVar, then a_module is typing, and a_type is
|
|
|
|
# typing.ClassVar.
|
|
|
|
|
|
|
|
# It's possible to look up a_module given a_type, but it involves
|
|
|
|
# looking in sys.modules (again!), and seems like a waste since
|
|
|
|
# the caller already knows a_module.
|
|
|
|
|
|
|
|
# - annotation is a string type annotation
|
|
|
|
# - cls is the class that this annotation was found in
|
|
|
|
# - a_module is the module we want to match
|
|
|
|
# - a_type is the type in that module we want to match
|
|
|
|
# - is_type_predicate is a function called with (obj, a_module)
|
|
|
|
# that determines if obj is of the desired type.
|
|
|
|
|
|
|
|
# Since this test does not do a local namespace lookup (and
|
|
|
|
# instead only a module (global) lookup), there are some things it
|
|
|
|
# gets wrong.
|
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# With string annotations, cv0 will be detected as a ClassVar:
|
2018-05-16 10:44:27 +08:00
|
|
|
# CV = ClassVar
|
|
|
|
# @dataclass
|
|
|
|
# class C0:
|
|
|
|
# cv0: CV
|
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# But in this example cv1 will not be detected as a ClassVar:
|
2018-05-16 10:44:27 +08:00
|
|
|
# @dataclass
|
|
|
|
# class C1:
|
|
|
|
# CV = ClassVar
|
|
|
|
# cv1: CV
|
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# In C1, the code in this function (_is_type) will look up "CV" in
|
|
|
|
# the module and not find it, so it will not consider cv1 as a
|
|
|
|
# ClassVar. This is a fairly obscure corner case, and the best
|
|
|
|
# way to fix it would be to eval() the string "CV" with the
|
|
|
|
# correct global and local namespaces. However that would involve
|
|
|
|
# a eval() penalty for every single field of every dataclass
|
|
|
|
# that's defined. It was judged not worth it.
|
2018-05-16 10:44:27 +08:00
|
|
|
|
|
|
|
match = _MODULE_IDENTIFIER_RE.match(annotation)
|
|
|
|
if match:
|
|
|
|
ns = None
|
|
|
|
module_name = match.group(1)
|
|
|
|
if not module_name:
|
|
|
|
# No module name, assume the class's module did
|
|
|
|
# "from dataclasses import InitVar".
|
|
|
|
ns = sys.modules.get(cls.__module__).__dict__
|
|
|
|
else:
|
|
|
|
# Look up module_name in the class's module.
|
|
|
|
module = sys.modules.get(cls.__module__)
|
|
|
|
if module and module.__dict__.get(module_name) is a_module:
|
|
|
|
ns = sys.modules.get(a_type.__module__).__dict__
|
|
|
|
if ns and is_type_predicate(ns.get(match.group(2)), a_module):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-04-26 08:42:39 +08:00
|
|
|
def _get_field(cls, a_name, a_type, default_kw_only):
|
|
|
|
# Return a Field object for this field name and type. ClassVars and
|
|
|
|
# InitVars are also returned, but marked as such (see f._field_type).
|
|
|
|
# default_kw_only is the value of kw_only to use if there isn't a field()
|
|
|
|
# that defines it.
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# If the default value isn't derived from Field, then it's only a
|
|
|
|
# normal default value. Convert it to a Field().
|
2017-12-30 02:59:58 +08:00
|
|
|
default = getattr(cls, a_name, MISSING)
|
2017-12-05 05:58:55 +08:00
|
|
|
if isinstance(default, Field):
|
|
|
|
f = default
|
|
|
|
else:
|
2018-03-20 09:07:51 +08:00
|
|
|
if isinstance(default, types.MemberDescriptorType):
|
|
|
|
# This is a field in __slots__, so it has no default value.
|
|
|
|
default = MISSING
|
2017-12-05 05:58:55 +08:00
|
|
|
f = field(default=default)
|
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# Only at this point do we know the name and the type. Set them.
|
2017-12-05 05:58:55 +08:00
|
|
|
f.name = a_name
|
|
|
|
f.type = a_type
|
|
|
|
|
2018-05-16 10:44:27 +08:00
|
|
|
# Assume it's a normal field until proven otherwise. We're next
|
2018-05-16 17:14:53 +08:00
|
|
|
# going to decide if it's a ClassVar or InitVar, everything else
|
|
|
|
# is just a normal field.
|
2018-05-16 10:44:27 +08:00
|
|
|
f._field_type = _FIELD
|
|
|
|
|
|
|
|
# In addition to checking for actual types here, also check for
|
2018-05-16 17:14:53 +08:00
|
|
|
# string annotations. get_type_hints() won't always work for us
|
|
|
|
# (see https://github.com/python/typing/issues/508 for example),
|
2021-04-17 21:53:24 +08:00
|
|
|
# plus it's expensive and would require an eval for every string
|
2018-05-16 17:14:53 +08:00
|
|
|
# annotation. So, make a best effort to see if this is a ClassVar
|
|
|
|
# or InitVar using regex's and checking that the thing referenced
|
|
|
|
# is actually of the correct type.
|
2018-05-16 10:44:27 +08:00
|
|
|
|
|
|
|
# For the complete discussion, see https://bugs.python.org/issue33453
|
|
|
|
|
|
|
|
# If typing has not been imported, then it's impossible for any
|
2018-05-16 17:14:53 +08:00
|
|
|
# annotation to be a ClassVar. So, only look for ClassVar if
|
|
|
|
# typing has been imported by any module (not necessarily cls's
|
|
|
|
# module).
|
2017-12-05 05:58:55 +08:00
|
|
|
typing = sys.modules.get('typing')
|
2018-05-16 10:44:27 +08:00
|
|
|
if typing:
|
|
|
|
if (_is_classvar(a_type, typing)
|
|
|
|
or (isinstance(f.type, str)
|
|
|
|
and _is_type(f.type, cls, typing, typing.ClassVar,
|
|
|
|
_is_classvar))):
|
2017-12-05 05:58:55 +08:00
|
|
|
f._field_type = _FIELD_CLASSVAR
|
|
|
|
|
2018-05-16 10:44:27 +08:00
|
|
|
# If the type is InitVar, or if it's a matching string annotation,
|
|
|
|
# then it's an InitVar.
|
2017-12-05 05:58:55 +08:00
|
|
|
if f._field_type is _FIELD:
|
2018-05-16 10:44:27 +08:00
|
|
|
# The module we're checking against is the module we're
|
|
|
|
# currently in (dataclasses.py).
|
|
|
|
dataclasses = sys.modules[__name__]
|
|
|
|
if (_is_initvar(a_type, dataclasses)
|
|
|
|
or (isinstance(f.type, str)
|
|
|
|
and _is_type(f.type, cls, dataclasses, dataclasses.InitVar,
|
|
|
|
_is_initvar))):
|
2017-12-05 05:58:55 +08:00
|
|
|
f._field_type = _FIELD_INITVAR
|
|
|
|
|
2018-05-16 10:44:27 +08:00
|
|
|
# Validations for individual fields. This is delayed until now,
|
|
|
|
# instead of in the Field() constructor, since only here do we
|
|
|
|
# know the field name, which allows for better error reporting.
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# Special restrictions for ClassVar and InitVar.
|
|
|
|
if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR):
|
2017-12-30 02:59:58 +08:00
|
|
|
if f.default_factory is not MISSING:
|
2017-12-05 05:58:55 +08:00
|
|
|
raise TypeError(f'field {f.name} cannot have a '
|
|
|
|
'default factory')
|
|
|
|
# Should I check for other field settings? default_factory
|
2018-05-16 17:14:53 +08:00
|
|
|
# seems the most serious to check for. Maybe add others. For
|
|
|
|
# example, how about init=False (or really,
|
|
|
|
# init=<not-the-default-init-value>)? It makes no sense for
|
|
|
|
# ClassVar and InitVar to specify init=<anything>.
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2021-04-26 08:42:39 +08:00
|
|
|
# kw_only validation and assignment.
|
|
|
|
if f._field_type in (_FIELD, _FIELD_INITVAR):
|
|
|
|
# For real and InitVar fields, if kw_only wasn't specified use the
|
|
|
|
# default value.
|
|
|
|
if f.kw_only is MISSING:
|
|
|
|
f.kw_only = default_kw_only
|
|
|
|
else:
|
|
|
|
# Make sure kw_only isn't set for ClassVars
|
|
|
|
assert f._field_type is _FIELD_CLASSVAR
|
|
|
|
if f.kw_only is not MISSING:
|
|
|
|
raise TypeError(f'field {f.name} is a ClassVar but specifies '
|
|
|
|
'kw_only')
|
|
|
|
|
2021-12-12 05:12:17 +08:00
|
|
|
# For real fields, disallow mutable defaults. Use unhashable as a proxy
|
|
|
|
# indicator for mutability. Read the __hash__ attribute from the class,
|
|
|
|
# not the instance.
|
|
|
|
if f._field_type is _FIELD and f.default.__class__.__hash__ is None:
|
2017-12-05 05:58:55 +08:00
|
|
|
raise ValueError(f'mutable default {type(f.default)} for field '
|
|
|
|
f'{f.name} is not allowed: use default_factory')
|
|
|
|
|
|
|
|
return f
|
|
|
|
|
2020-10-21 21:49:22 +08:00
|
|
|
def _set_qualname(cls, value):
|
|
|
|
# Ensure that the functions returned from _create_fn uses the proper
|
|
|
|
# __qualname__ (the class they belong to).
|
|
|
|
if isinstance(value, FunctionType):
|
|
|
|
value.__qualname__ = f"{cls.__qualname__}.{value.__name__}"
|
|
|
|
return value
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-01-28 08:07:40 +08:00
|
|
|
def _set_new_attribute(cls, name, value):
|
|
|
|
# Never overwrites an existing attribute. Returns True if the
|
2018-05-16 17:14:53 +08:00
|
|
|
# attribute already exists.
|
2017-12-05 05:58:55 +08:00
|
|
|
if name in cls.__dict__:
|
2018-01-28 08:07:40 +08:00
|
|
|
return True
|
2020-10-21 21:49:22 +08:00
|
|
|
_set_qualname(cls, value)
|
2017-12-05 05:58:55 +08:00
|
|
|
setattr(cls, name, value)
|
2018-01-28 08:07:40 +08:00
|
|
|
return False
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
2018-02-26 10:30:17 +08:00
|
|
|
# Decide if/how we're going to create a hash function. Key is
|
2018-05-16 17:14:53 +08:00
|
|
|
# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to
|
|
|
|
# take. The common case is to do nothing, so instead of providing a
|
|
|
|
# function that is a no-op, use None to signify that.
|
2018-03-25 10:10:14 +08:00
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
def _hash_set_none(cls, fields, globals):
|
2018-03-25 10:10:14 +08:00
|
|
|
return None
|
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
def _hash_add(cls, fields, globals):
|
2018-03-25 10:10:14 +08:00
|
|
|
flds = [f for f in fields if (f.compare if f.hash is None else f.hash)]
|
2020-10-21 21:49:22 +08:00
|
|
|
return _set_qualname(cls, _hash_fn(flds, globals))
|
2018-03-25 10:10:14 +08:00
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
def _hash_exception(cls, fields, globals):
|
2018-03-25 10:10:14 +08:00
|
|
|
# Raise an exception.
|
|
|
|
raise TypeError(f'Cannot overwrite attribute __hash__ '
|
|
|
|
f'in class {cls.__name__}')
|
|
|
|
|
2018-02-26 10:30:17 +08:00
|
|
|
#
|
|
|
|
# +-------------------------------------- unsafe_hash?
|
|
|
|
# | +------------------------------- eq?
|
|
|
|
# | | +------------------------ frozen?
|
|
|
|
# | | | +---------------- has-explicit-hash?
|
|
|
|
# | | | |
|
|
|
|
# | | | | +------- action
|
|
|
|
# | | | | |
|
|
|
|
# v v v v v
|
2018-03-25 10:10:14 +08:00
|
|
|
_hash_action = {(False, False, False, False): None,
|
|
|
|
(False, False, False, True ): None,
|
|
|
|
(False, False, True, False): None,
|
|
|
|
(False, False, True, True ): None,
|
|
|
|
(False, True, False, False): _hash_set_none,
|
|
|
|
(False, True, False, True ): None,
|
|
|
|
(False, True, True, False): _hash_add,
|
|
|
|
(False, True, True, True ): None,
|
|
|
|
(True, False, False, False): _hash_add,
|
|
|
|
(True, False, False, True ): _hash_exception,
|
|
|
|
(True, False, True, False): _hash_add,
|
|
|
|
(True, False, True, True ): _hash_exception,
|
|
|
|
(True, True, False, False): _hash_add,
|
|
|
|
(True, True, False, True ): _hash_exception,
|
|
|
|
(True, True, True, False): _hash_add,
|
|
|
|
(True, True, True, True ): _hash_exception,
|
2018-02-26 10:30:17 +08:00
|
|
|
}
|
|
|
|
# See https://bugs.python.org/issue32929#msg312829 for an if-statement
|
2018-05-16 17:14:53 +08:00
|
|
|
# version of this table.
|
2018-02-26 10:30:17 +08:00
|
|
|
|
|
|
|
|
2021-04-11 09:28:42 +08:00
|
|
|
def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen,
|
2022-05-03 00:36:39 +08:00
|
|
|
match_args, kw_only, slots, weakref_slot):
|
2018-01-08 03:30:17 +08:00
|
|
|
# Now that dicts retain insertion order, there's no reason to use
|
2018-05-16 17:14:53 +08:00
|
|
|
# an ordered dict. I am leveraging that ordering here, because
|
|
|
|
# derived class fields overwrite base class fields, but the order
|
|
|
|
# is defined by the base class, which is found first.
|
2018-01-08 03:30:17 +08:00
|
|
|
fields = {}
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2019-12-09 22:54:20 +08:00
|
|
|
if cls.__module__ in sys.modules:
|
|
|
|
globals = sys.modules[cls.__module__].__dict__
|
|
|
|
else:
|
|
|
|
# Theoretically this can happen if someone writes
|
|
|
|
# a custom string to cls.__module__. In which case
|
|
|
|
# such dataclass won't be fully introspectable
|
|
|
|
# (w.r.t. typing.get_type_hints) but will still function
|
|
|
|
# correctly.
|
|
|
|
globals = {}
|
|
|
|
|
2018-03-19 08:40:34 +08:00
|
|
|
setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order,
|
|
|
|
unsafe_hash, frozen))
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
# Find our base classes in reverse MRO order, and exclude
|
2018-05-16 17:14:53 +08:00
|
|
|
# ourselves. In reversed order so that more derived classes
|
|
|
|
# override earlier field definitions in base classes. As long as
|
|
|
|
# we're iterating over them, see if any are frozen.
|
2018-03-19 08:40:34 +08:00
|
|
|
any_frozen_base = False
|
|
|
|
has_dataclass_bases = False
|
2017-12-05 05:58:55 +08:00
|
|
|
for b in cls.__mro__[-1:0:-1]:
|
|
|
|
# Only process classes that have been processed by our
|
2018-05-16 17:14:53 +08:00
|
|
|
# decorator. That is, they have a _FIELDS attribute.
|
2018-03-19 08:40:34 +08:00
|
|
|
base_fields = getattr(b, _FIELDS, None)
|
2021-04-06 13:14:01 +08:00
|
|
|
if base_fields is not None:
|
2018-03-19 08:40:34 +08:00
|
|
|
has_dataclass_bases = True
|
2017-12-05 05:58:55 +08:00
|
|
|
for f in base_fields.values():
|
|
|
|
fields[f.name] = f
|
2018-03-19 08:40:34 +08:00
|
|
|
if getattr(b, _PARAMS).frozen:
|
|
|
|
any_frozen_base = True
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-03-23 04:28:48 +08:00
|
|
|
# Annotations that are defined in this class (not in base
|
2018-05-16 17:14:53 +08:00
|
|
|
# classes). If __annotations__ isn't present, then this class
|
|
|
|
# adds no new annotations. We use this to compute fields that are
|
|
|
|
# added by this class.
|
|
|
|
#
|
2018-03-23 04:28:48 +08:00
|
|
|
# Fields are found from cls_annotations, which is guaranteed to be
|
2018-05-16 17:14:53 +08:00
|
|
|
# ordered. Default values are from class attributes, if a field
|
|
|
|
# has a default. If the default value is a Field(), then it
|
|
|
|
# contains additional info beyond (and possibly including) the
|
|
|
|
# actual default value. Pseudo-fields ClassVars and InitVars are
|
|
|
|
# included, despite the fact that they're not real fields. That's
|
|
|
|
# dealt with later.
|
2018-03-23 04:28:48 +08:00
|
|
|
cls_annotations = cls.__dict__.get('__annotations__', {})
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
# Now find fields in our class. While doing so, validate some
|
2018-05-16 17:14:53 +08:00
|
|
|
# things, and set the default values (as class attributes) where
|
|
|
|
# we can.
|
2021-04-26 08:42:39 +08:00
|
|
|
cls_fields = []
|
|
|
|
# Get a reference to this module for the _is_kw_only() test.
|
2021-05-03 15:24:53 +08:00
|
|
|
KW_ONLY_seen = False
|
2021-04-26 08:42:39 +08:00
|
|
|
dataclasses = sys.modules[__name__]
|
|
|
|
for name, type in cls_annotations.items():
|
|
|
|
# See if this is a marker to change the value of kw_only.
|
|
|
|
if (_is_kw_only(type, dataclasses)
|
|
|
|
or (isinstance(type, str)
|
|
|
|
and _is_type(type, cls, dataclasses, dataclasses.KW_ONLY,
|
|
|
|
_is_kw_only))):
|
|
|
|
# Switch the default to kw_only=True, and ignore this
|
|
|
|
# annotation: it's not a real field.
|
2021-05-03 15:24:53 +08:00
|
|
|
if KW_ONLY_seen:
|
|
|
|
raise TypeError(f'{name!r} is KW_ONLY, but KW_ONLY '
|
|
|
|
'has already been specified')
|
|
|
|
KW_ONLY_seen = True
|
2021-04-26 08:42:39 +08:00
|
|
|
kw_only = True
|
|
|
|
else:
|
|
|
|
# Otherwise it's a field of some type.
|
|
|
|
cls_fields.append(_get_field(cls, name, type, kw_only))
|
|
|
|
|
2018-03-23 04:28:48 +08:00
|
|
|
for f in cls_fields:
|
2017-12-05 05:58:55 +08:00
|
|
|
fields[f.name] = f
|
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# If the class attribute (which is the default value for this
|
|
|
|
# field) exists and is of type 'Field', replace it with the
|
|
|
|
# real default. This is so that normal class introspection
|
|
|
|
# sees a real default value, not a Field.
|
2017-12-05 05:58:55 +08:00
|
|
|
if isinstance(getattr(cls, f.name, None), Field):
|
2017-12-30 02:59:58 +08:00
|
|
|
if f.default is MISSING:
|
2017-12-05 05:58:55 +08:00
|
|
|
# If there's no default, delete the class attribute.
|
2018-05-16 17:14:53 +08:00
|
|
|
# This happens if we specify field(repr=False), for
|
|
|
|
# example (that is, we specified a field object, but
|
|
|
|
# no default value). Also if we're using a default
|
|
|
|
# factory. The class attribute should not be set at
|
|
|
|
# all in the post-processed class.
|
2017-12-05 05:58:55 +08:00
|
|
|
delattr(cls, f.name)
|
|
|
|
else:
|
|
|
|
setattr(cls, f.name, f.default)
|
|
|
|
|
2018-03-23 04:28:48 +08:00
|
|
|
# Do we have any Field members that don't also have annotations?
|
|
|
|
for name, value in cls.__dict__.items():
|
|
|
|
if isinstance(value, Field) and not name in cls_annotations:
|
|
|
|
raise TypeError(f'{name!r} is a field but has no type annotation')
|
|
|
|
|
2018-03-19 08:40:34 +08:00
|
|
|
# Check rules that apply if we are derived from any dataclasses.
|
|
|
|
if has_dataclass_bases:
|
|
|
|
# Raise an exception if any of our bases are frozen, but we're not.
|
|
|
|
if any_frozen_base and not frozen:
|
|
|
|
raise TypeError('cannot inherit non-frozen dataclass from a '
|
|
|
|
'frozen one')
|
2018-02-27 09:38:33 +08:00
|
|
|
|
2018-03-19 08:40:34 +08:00
|
|
|
# Raise an exception if we're frozen, but none of our bases are.
|
|
|
|
if not any_frozen_base and frozen:
|
|
|
|
raise TypeError('cannot inherit frozen dataclass from a '
|
|
|
|
'non-frozen one')
|
2018-02-27 09:38:33 +08:00
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# Remember all of the fields on our class (including bases). This
|
|
|
|
# also marks this class as being a dataclass.
|
2018-03-19 08:40:34 +08:00
|
|
|
setattr(cls, _FIELDS, fields)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-02-26 10:30:17 +08:00
|
|
|
# Was this class defined with an explicit __hash__? Note that if
|
2018-05-16 17:14:53 +08:00
|
|
|
# __eq__ is defined in this class, then python will automatically
|
|
|
|
# set __hash__ to None. This is a heuristic, as it's possible
|
|
|
|
# that such a __hash__ == None was not auto-generated, but it
|
|
|
|
# close enough.
|
2018-02-26 10:30:17 +08:00
|
|
|
class_hash = cls.__dict__.get('__hash__', MISSING)
|
|
|
|
has_explicit_hash = not (class_hash is MISSING or
|
|
|
|
(class_hash is None and '__eq__' in cls.__dict__))
|
2018-01-28 08:07:40 +08:00
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# If we're generating ordering methods, we must be generating the
|
|
|
|
# eq methods.
|
2017-12-05 05:58:55 +08:00
|
|
|
if order and not eq:
|
|
|
|
raise ValueError('eq must be true if order is true')
|
|
|
|
|
2021-04-26 08:42:39 +08:00
|
|
|
# Include InitVars and regular fields (so, not ClassVars). This is
|
|
|
|
# initialized here, outside of the "if init:" test, because std_init_fields
|
|
|
|
# is used with match_args, below.
|
|
|
|
all_init_fields = [f for f in fields.values()
|
|
|
|
if f._field_type in (_FIELD, _FIELD_INITVAR)]
|
|
|
|
(std_init_fields,
|
|
|
|
kw_only_init_fields) = _fields_in_init_order(all_init_fields)
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
if init:
|
|
|
|
# Does this class have a post-init function?
|
|
|
|
has_post_init = hasattr(cls, _POST_INIT_NAME)
|
|
|
|
|
2018-01-28 08:07:40 +08:00
|
|
|
_set_new_attribute(cls, '__init__',
|
2021-04-26 08:42:39 +08:00
|
|
|
_init_fn(all_init_fields,
|
|
|
|
std_init_fields,
|
|
|
|
kw_only_init_fields,
|
2018-02-27 09:38:33 +08:00
|
|
|
frozen,
|
2018-01-28 08:07:40 +08:00
|
|
|
has_post_init,
|
2018-05-16 17:14:53 +08:00
|
|
|
# The name to use for the "self"
|
|
|
|
# param in __init__. Use "self"
|
|
|
|
# if possible.
|
2018-01-28 08:07:40 +08:00
|
|
|
'__dataclass_self__' if 'self' in fields
|
|
|
|
else 'self',
|
2019-12-09 22:54:20 +08:00
|
|
|
globals,
|
2021-11-22 21:26:12 +08:00
|
|
|
slots,
|
2018-01-28 08:07:40 +08:00
|
|
|
))
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# Get the fields as a list, and include only real fields. This is
|
2018-05-16 17:14:53 +08:00
|
|
|
# used in all of the following methods.
|
2018-01-28 08:07:40 +08:00
|
|
|
field_list = [f for f in fields.values() if f._field_type is _FIELD]
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
if repr:
|
2018-01-28 08:07:40 +08:00
|
|
|
flds = [f for f in field_list if f.repr]
|
2019-12-09 22:54:20 +08:00
|
|
|
_set_new_attribute(cls, '__repr__', _repr_fn(flds, globals))
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
if eq:
|
2021-05-29 04:57:18 +08:00
|
|
|
# Create __eq__ method. There's no need for a __ne__ method,
|
2018-05-16 17:14:53 +08:00
|
|
|
# since python will call __eq__ and negate it.
|
2018-01-28 08:07:40 +08:00
|
|
|
flds = [f for f in field_list if f.compare]
|
|
|
|
self_tuple = _tuple_str('self', flds)
|
|
|
|
other_tuple = _tuple_str('other', flds)
|
|
|
|
_set_new_attribute(cls, '__eq__',
|
|
|
|
_cmp_fn('__eq__', '==',
|
2019-12-09 22:54:20 +08:00
|
|
|
self_tuple, other_tuple,
|
|
|
|
globals=globals))
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
if order:
|
2018-01-28 08:07:40 +08:00
|
|
|
# Create and set the ordering methods.
|
|
|
|
flds = [f for f in field_list if f.compare]
|
|
|
|
self_tuple = _tuple_str('self', flds)
|
|
|
|
other_tuple = _tuple_str('other', flds)
|
|
|
|
for name, op in [('__lt__', '<'),
|
|
|
|
('__le__', '<='),
|
|
|
|
('__gt__', '>'),
|
|
|
|
('__ge__', '>='),
|
|
|
|
]:
|
|
|
|
if _set_new_attribute(cls, name,
|
2019-12-09 22:54:20 +08:00
|
|
|
_cmp_fn(name, op, self_tuple, other_tuple,
|
|
|
|
globals=globals)):
|
2018-01-28 08:07:40 +08:00
|
|
|
raise TypeError(f'Cannot overwrite attribute {name} '
|
2018-02-26 10:30:17 +08:00
|
|
|
f'in class {cls.__name__}. Consider using '
|
2018-01-28 08:07:40 +08:00
|
|
|
'functools.total_ordering')
|
|
|
|
|
2018-02-27 09:38:33 +08:00
|
|
|
if frozen:
|
2019-12-09 22:54:20 +08:00
|
|
|
for fn in _frozen_get_del_attr(cls, field_list, globals):
|
2018-03-19 08:40:34 +08:00
|
|
|
if _set_new_attribute(cls, fn.__name__, fn):
|
|
|
|
raise TypeError(f'Cannot overwrite attribute {fn.__name__} '
|
2018-02-26 10:30:17 +08:00
|
|
|
f'in class {cls.__name__}')
|
2018-01-28 08:07:40 +08:00
|
|
|
|
|
|
|
# Decide if/how we're going to create a hash function.
|
2018-02-26 10:30:17 +08:00
|
|
|
hash_action = _hash_action[bool(unsafe_hash),
|
|
|
|
bool(eq),
|
|
|
|
bool(frozen),
|
|
|
|
has_explicit_hash]
|
2018-03-25 10:10:14 +08:00
|
|
|
if hash_action:
|
|
|
|
# No need to call _set_new_attribute here, since by the time
|
2018-05-16 17:14:53 +08:00
|
|
|
# we're here the overwriting is unconditional.
|
2019-12-09 22:54:20 +08:00
|
|
|
cls.__hash__ = hash_action(cls, field_list, globals)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
if not getattr(cls, '__doc__'):
|
|
|
|
# Create a class doc-string.
|
|
|
|
cls.__doc__ = (cls.__name__ +
|
2021-04-21 19:41:19 +08:00
|
|
|
str(inspect.signature(cls)).replace(' -> None', ''))
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2021-04-11 09:28:42 +08:00
|
|
|
if match_args:
|
2021-04-26 08:42:39 +08:00
|
|
|
# I could probably compute this once
|
2021-04-11 09:28:42 +08:00
|
|
|
_set_new_attribute(cls, '__match_args__',
|
2021-04-26 08:42:39 +08:00
|
|
|
tuple(f.name for f in std_init_fields))
|
2021-02-27 06:51:55 +08:00
|
|
|
|
2022-05-03 00:36:39 +08:00
|
|
|
# It's an error to specify weakref_slot if slots is False.
|
|
|
|
if weakref_slot and not slots:
|
|
|
|
raise TypeError('weakref_slot is True but slots is False')
|
2021-05-01 10:14:30 +08:00
|
|
|
if slots:
|
2022-05-03 00:36:39 +08:00
|
|
|
cls = _add_slots(cls, frozen, weakref_slot)
|
2021-05-01 10:14:30 +08:00
|
|
|
|
2020-10-07 01:40:50 +08:00
|
|
|
abc.update_abstractmethods(cls)
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
return cls
|
|
|
|
|
|
|
|
|
2021-05-02 01:27:30 +08:00
|
|
|
# _dataclass_getstate and _dataclass_setstate are needed for pickling frozen
|
2021-06-13 10:47:44 +08:00
|
|
|
# classes with slots. These could be slightly more performant if we generated
|
2021-05-02 01:27:30 +08:00
|
|
|
# the code instead of iterating over fields. But that can be a project for
|
|
|
|
# another day, if performance becomes an issue.
|
|
|
|
def _dataclass_getstate(self):
|
|
|
|
return [getattr(self, f.name) for f in fields(self)]
|
|
|
|
|
|
|
|
|
|
|
|
def _dataclass_setstate(self, state):
|
|
|
|
for field, value in zip(fields(self), state):
|
|
|
|
# use setattr because dataclass may be frozen
|
|
|
|
object.__setattr__(self, field.name, value)
|
|
|
|
|
|
|
|
|
2022-03-20 05:01:17 +08:00
|
|
|
def _get_slots(cls):
|
|
|
|
match cls.__dict__.get('__slots__'):
|
|
|
|
case None:
|
|
|
|
return
|
|
|
|
case str(slot):
|
|
|
|
yield slot
|
|
|
|
# Slots may be any iterable, but we cannot handle an iterator
|
|
|
|
# because it will already be (partially) consumed.
|
|
|
|
case iterable if not hasattr(iterable, '__next__'):
|
|
|
|
yield from iterable
|
|
|
|
case _:
|
|
|
|
raise TypeError(f"Slots of '{cls.__name__}' cannot be determined")
|
|
|
|
|
|
|
|
|
2022-05-03 00:36:39 +08:00
|
|
|
def _add_slots(cls, is_frozen, weakref_slot):
|
2021-05-01 10:14:30 +08:00
|
|
|
# Need to create a new class, since we can't set __slots__
|
|
|
|
# after a class has been created.
|
|
|
|
|
|
|
|
# Make sure __slots__ isn't already set.
|
|
|
|
if '__slots__' in cls.__dict__:
|
|
|
|
raise TypeError(f'{cls.__name__} already specifies __slots__')
|
|
|
|
|
|
|
|
# Create a new dict for our new class.
|
|
|
|
cls_dict = dict(cls.__dict__)
|
|
|
|
field_names = tuple(f.name for f in fields(cls))
|
2022-03-20 05:01:17 +08:00
|
|
|
# Make sure slots don't overlap with those in base classes.
|
|
|
|
inherited_slots = set(
|
|
|
|
itertools.chain.from_iterable(map(_get_slots, cls.__mro__[1:-1]))
|
|
|
|
)
|
2022-05-03 00:36:39 +08:00
|
|
|
# The slots for our class. Remove slots from our base classes. Add
|
2022-06-08 08:53:08 +08:00
|
|
|
# '__weakref__' if weakref_slot was given, unless it is already present.
|
2022-03-20 05:01:17 +08:00
|
|
|
cls_dict["__slots__"] = tuple(
|
2022-06-08 08:53:08 +08:00
|
|
|
itertools.filterfalse(
|
|
|
|
inherited_slots.__contains__,
|
|
|
|
itertools.chain(
|
|
|
|
# gh-93521: '__weakref__' also needs to be filtered out if
|
|
|
|
# already present in inherited_slots
|
|
|
|
field_names, ('__weakref__',) if weakref_slot else ()
|
|
|
|
)
|
|
|
|
),
|
2022-03-20 05:01:17 +08:00
|
|
|
)
|
2022-05-03 00:36:39 +08:00
|
|
|
|
2021-05-01 10:14:30 +08:00
|
|
|
for field_name in field_names:
|
|
|
|
# Remove our attributes, if present. They'll still be
|
|
|
|
# available in _MARKER.
|
|
|
|
cls_dict.pop(field_name, None)
|
|
|
|
|
|
|
|
# Remove __dict__ itself.
|
|
|
|
cls_dict.pop('__dict__', None)
|
|
|
|
|
|
|
|
# And finally create the class.
|
|
|
|
qualname = getattr(cls, '__qualname__', None)
|
|
|
|
cls = type(cls)(cls.__name__, cls.__bases__, cls_dict)
|
|
|
|
if qualname is not None:
|
|
|
|
cls.__qualname__ = qualname
|
|
|
|
|
2021-05-02 01:27:30 +08:00
|
|
|
if is_frozen:
|
|
|
|
# Need this for pickling frozen classes with slots.
|
|
|
|
cls.__getstate__ = _dataclass_getstate
|
|
|
|
cls.__setstate__ = _dataclass_setstate
|
|
|
|
|
2021-05-01 10:14:30 +08:00
|
|
|
return cls
|
|
|
|
|
|
|
|
|
2019-06-01 16:00:15 +08:00
|
|
|
def dataclass(cls=None, /, *, init=True, repr=True, eq=True, order=False,
|
2021-04-26 08:42:39 +08:00
|
|
|
unsafe_hash=False, frozen=False, match_args=True,
|
2022-05-03 00:36:39 +08:00
|
|
|
kw_only=False, slots=False, weakref_slot=False):
|
2017-12-05 05:58:55 +08:00
|
|
|
"""Returns the same class as was passed in, with dunder methods
|
|
|
|
added based on the fields defined in the class.
|
|
|
|
|
|
|
|
Examines PEP 526 __annotations__ to determine fields.
|
|
|
|
|
|
|
|
If init is true, an __init__() method is added to the class. If
|
|
|
|
repr is true, a __repr__() method is added. If order is true, rich
|
2018-02-26 10:30:17 +08:00
|
|
|
comparison dunder methods are added. If unsafe_hash is true, a
|
|
|
|
__hash__() method function is added. If frozen is true, fields may
|
2021-04-11 09:28:42 +08:00
|
|
|
not be assigned to after instance creation. If match_args is true,
|
2021-04-26 08:42:39 +08:00
|
|
|
the __match_args__ tuple is added. If kw_only is true, then by
|
2021-05-01 10:14:30 +08:00
|
|
|
default all fields are keyword-only. If slots is true, an
|
|
|
|
__slots__ attribute is added.
|
2017-12-05 05:58:55 +08:00
|
|
|
"""
|
|
|
|
|
|
|
|
def wrap(cls):
|
2021-04-11 09:28:42 +08:00
|
|
|
return _process_class(cls, init, repr, eq, order, unsafe_hash,
|
2022-05-03 00:36:39 +08:00
|
|
|
frozen, match_args, kw_only, slots,
|
|
|
|
weakref_slot)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
# See if we're being called as @dataclass or @dataclass().
|
2019-06-01 16:00:15 +08:00
|
|
|
if cls is None:
|
2017-12-05 05:58:55 +08:00
|
|
|
# We're called with parens.
|
|
|
|
return wrap
|
|
|
|
|
|
|
|
# We're called as @dataclass without parens.
|
2019-06-01 16:00:15 +08:00
|
|
|
return wrap(cls)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
|
|
|
def fields(class_or_instance):
|
|
|
|
"""Return a tuple describing the fields of this dataclass.
|
|
|
|
|
|
|
|
Accepts a dataclass or an instance of one. Tuple elements are of
|
|
|
|
type Field.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Might it be worth caching this, per class?
|
|
|
|
try:
|
2018-05-16 10:44:27 +08:00
|
|
|
fields = getattr(class_or_instance, _FIELDS)
|
2017-12-05 05:58:55 +08:00
|
|
|
except AttributeError:
|
|
|
|
raise TypeError('must be called with a dataclass type or instance')
|
|
|
|
|
2018-01-08 03:30:17 +08:00
|
|
|
# Exclude pseudo-fields. Note that fields is sorted by insertion
|
2018-05-16 17:14:53 +08:00
|
|
|
# order, so the order of the tuple is as the fields were defined.
|
2017-12-05 05:58:55 +08:00
|
|
|
return tuple(f for f in fields.values() if f._field_type is _FIELD)
|
|
|
|
|
|
|
|
|
2018-01-07 01:41:53 +08:00
|
|
|
def _is_dataclass_instance(obj):
|
2017-12-05 05:58:55 +08:00
|
|
|
"""Returns True if obj is an instance of a dataclass."""
|
2019-08-20 13:40:28 +08:00
|
|
|
return hasattr(type(obj), _FIELDS)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
2018-01-07 01:41:53 +08:00
|
|
|
def is_dataclass(obj):
|
|
|
|
"""Returns True if obj is a dataclass or an instance of a
|
|
|
|
dataclass."""
|
2021-12-06 04:42:50 +08:00
|
|
|
cls = obj if isinstance(obj, type) and not isinstance(obj, GenericAlias) else type(obj)
|
2019-08-20 13:40:28 +08:00
|
|
|
return hasattr(cls, _FIELDS)
|
2018-01-07 01:41:53 +08:00
|
|
|
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
def asdict(obj, *, dict_factory=dict):
|
|
|
|
"""Return the fields of a dataclass instance as a new dictionary mapping
|
|
|
|
field names to field values.
|
|
|
|
|
|
|
|
Example usage:
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class C:
|
|
|
|
x: int
|
|
|
|
y: int
|
|
|
|
|
|
|
|
c = C(1, 2)
|
|
|
|
assert asdict(c) == {'x': 1, 'y': 2}
|
|
|
|
|
|
|
|
If given, 'dict_factory' will be used instead of built-in dict.
|
|
|
|
The function applies recursively to field values that are
|
|
|
|
dataclass instances. This will also look into built-in containers:
|
|
|
|
tuples, lists, and dicts.
|
|
|
|
"""
|
2018-01-07 01:41:53 +08:00
|
|
|
if not _is_dataclass_instance(obj):
|
2017-12-05 05:58:55 +08:00
|
|
|
raise TypeError("asdict() should be called on dataclass instances")
|
|
|
|
return _asdict_inner(obj, dict_factory)
|
|
|
|
|
2018-02-26 10:30:17 +08:00
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
def _asdict_inner(obj, dict_factory):
|
2018-01-07 01:41:53 +08:00
|
|
|
if _is_dataclass_instance(obj):
|
2017-12-05 05:58:55 +08:00
|
|
|
result = []
|
|
|
|
for f in fields(obj):
|
|
|
|
value = _asdict_inner(getattr(obj, f.name), dict_factory)
|
|
|
|
result.append((f.name, value))
|
|
|
|
return dict_factory(result)
|
2018-09-14 23:32:16 +08:00
|
|
|
elif isinstance(obj, tuple) and hasattr(obj, '_fields'):
|
|
|
|
# obj is a namedtuple. Recurse into it, but the returned
|
|
|
|
# object is another namedtuple of the same type. This is
|
|
|
|
# similar to how other list- or tuple-derived classes are
|
|
|
|
# treated (see below), but we just need to create them
|
|
|
|
# differently because a namedtuple's __init__ needs to be
|
|
|
|
# called differently (see bpo-34363).
|
|
|
|
|
|
|
|
# I'm not using namedtuple's _asdict()
|
|
|
|
# method, because:
|
|
|
|
# - it does not recurse in to the namedtuple fields and
|
|
|
|
# convert them to dicts (using dict_factory).
|
2020-06-24 18:46:52 +08:00
|
|
|
# - I don't actually want to return a dict here. The main
|
2018-09-14 23:32:16 +08:00
|
|
|
# use case here is json.dumps, and it handles converting
|
|
|
|
# namedtuples to lists. Admittedly we're losing some
|
|
|
|
# information here when we produce a json list instead of a
|
|
|
|
# dict. Note that if we returned dicts here instead of
|
|
|
|
# namedtuples, we could no longer call asdict() on a data
|
|
|
|
# structure where a namedtuple was used as a dict key.
|
|
|
|
|
|
|
|
return type(obj)(*[_asdict_inner(v, dict_factory) for v in obj])
|
2017-12-05 05:58:55 +08:00
|
|
|
elif isinstance(obj, (list, tuple)):
|
2018-09-14 23:32:16 +08:00
|
|
|
# Assume we can create an object of this type by passing in a
|
|
|
|
# generator (which is not true for namedtuples, handled
|
|
|
|
# above).
|
2017-12-05 05:58:55 +08:00
|
|
|
return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
|
|
|
|
elif isinstance(obj, dict):
|
2018-09-14 23:32:16 +08:00
|
|
|
return type(obj)((_asdict_inner(k, dict_factory),
|
|
|
|
_asdict_inner(v, dict_factory))
|
|
|
|
for k, v in obj.items())
|
2017-12-05 05:58:55 +08:00
|
|
|
else:
|
2018-03-25 05:20:26 +08:00
|
|
|
return copy.deepcopy(obj)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
|
|
|
def astuple(obj, *, tuple_factory=tuple):
|
|
|
|
"""Return the fields of a dataclass instance as a new tuple of field values.
|
|
|
|
|
|
|
|
Example usage::
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class C:
|
|
|
|
x: int
|
|
|
|
y: int
|
|
|
|
|
|
|
|
c = C(1, 2)
|
2018-01-11 12:56:41 +08:00
|
|
|
assert astuple(c) == (1, 2)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
If given, 'tuple_factory' will be used instead of built-in tuple.
|
|
|
|
The function applies recursively to field values that are
|
|
|
|
dataclass instances. This will also look into built-in containers:
|
|
|
|
tuples, lists, and dicts.
|
|
|
|
"""
|
|
|
|
|
2018-01-07 01:41:53 +08:00
|
|
|
if not _is_dataclass_instance(obj):
|
2017-12-05 05:58:55 +08:00
|
|
|
raise TypeError("astuple() should be called on dataclass instances")
|
|
|
|
return _astuple_inner(obj, tuple_factory)
|
|
|
|
|
2018-02-26 10:30:17 +08:00
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
def _astuple_inner(obj, tuple_factory):
|
2018-01-07 01:41:53 +08:00
|
|
|
if _is_dataclass_instance(obj):
|
2017-12-05 05:58:55 +08:00
|
|
|
result = []
|
|
|
|
for f in fields(obj):
|
|
|
|
value = _astuple_inner(getattr(obj, f.name), tuple_factory)
|
|
|
|
result.append(value)
|
|
|
|
return tuple_factory(result)
|
2018-09-14 23:32:16 +08:00
|
|
|
elif isinstance(obj, tuple) and hasattr(obj, '_fields'):
|
|
|
|
# obj is a namedtuple. Recurse into it, but the returned
|
|
|
|
# object is another namedtuple of the same type. This is
|
|
|
|
# similar to how other list- or tuple-derived classes are
|
|
|
|
# treated (see below), but we just need to create them
|
|
|
|
# differently because a namedtuple's __init__ needs to be
|
|
|
|
# called differently (see bpo-34363).
|
|
|
|
return type(obj)(*[_astuple_inner(v, tuple_factory) for v in obj])
|
2017-12-05 05:58:55 +08:00
|
|
|
elif isinstance(obj, (list, tuple)):
|
2018-09-14 23:32:16 +08:00
|
|
|
# Assume we can create an object of this type by passing in a
|
|
|
|
# generator (which is not true for namedtuples, handled
|
|
|
|
# above).
|
2017-12-05 05:58:55 +08:00
|
|
|
return type(obj)(_astuple_inner(v, tuple_factory) for v in obj)
|
|
|
|
elif isinstance(obj, dict):
|
|
|
|
return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory))
|
|
|
|
for k, v in obj.items())
|
|
|
|
else:
|
2018-03-25 05:20:26 +08:00
|
|
|
return copy.deepcopy(obj)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
|
2018-01-07 06:09:58 +08:00
|
|
|
def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
|
2018-03-01 21:01:41 +08:00
|
|
|
repr=True, eq=True, order=False, unsafe_hash=False,
|
2022-05-03 00:36:39 +08:00
|
|
|
frozen=False, match_args=True, kw_only=False, slots=False,
|
|
|
|
weakref_slot=False):
|
2017-12-05 05:58:55 +08:00
|
|
|
"""Return a new dynamically created dataclass.
|
|
|
|
|
2018-01-07 05:14:03 +08:00
|
|
|
The dataclass name will be 'cls_name'. 'fields' is an iterable
|
|
|
|
of either (name), (name, type) or (name, type, Field) objects. If type is
|
|
|
|
omitted, use the string 'typing.Any'. Field objects are created by
|
2018-01-07 21:19:45 +08:00
|
|
|
the equivalent of calling 'field(name, type [, Field-info])'.
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-01-11 12:56:41 +08:00
|
|
|
C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,))
|
2017-12-05 05:58:55 +08:00
|
|
|
|
|
|
|
is equivalent to:
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class C(Base):
|
2018-01-11 12:56:41 +08:00
|
|
|
x: 'typing.Any'
|
|
|
|
y: int
|
|
|
|
z: int = field(init=False)
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-01-11 12:56:41 +08:00
|
|
|
For the bases and namespace parameters, see the builtin type() function.
|
2018-01-07 06:09:58 +08:00
|
|
|
|
2018-02-26 10:30:17 +08:00
|
|
|
The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to
|
2018-01-07 06:09:58 +08:00
|
|
|
dataclass().
|
2017-12-05 05:58:55 +08:00
|
|
|
"""
|
|
|
|
|
|
|
|
if namespace is None:
|
|
|
|
namespace = {}
|
|
|
|
|
2018-05-16 23:31:29 +08:00
|
|
|
# While we're looking through the field names, validate that they
|
|
|
|
# are identifiers, are not keywords, and not duplicates.
|
|
|
|
seen = set()
|
2021-04-13 09:02:02 +08:00
|
|
|
annotations = {}
|
|
|
|
defaults = {}
|
2017-12-05 05:58:55 +08:00
|
|
|
for item in fields:
|
2018-01-07 05:14:03 +08:00
|
|
|
if isinstance(item, str):
|
|
|
|
name = item
|
|
|
|
tp = 'typing.Any'
|
|
|
|
elif len(item) == 2:
|
|
|
|
name, tp, = item
|
|
|
|
elif len(item) == 3:
|
2017-12-05 05:58:55 +08:00
|
|
|
name, tp, spec = item
|
2021-04-13 09:02:02 +08:00
|
|
|
defaults[name] = spec
|
2018-05-16 23:31:29 +08:00
|
|
|
else:
|
|
|
|
raise TypeError(f'Invalid field: {item!r}')
|
|
|
|
|
|
|
|
if not isinstance(name, str) or not name.isidentifier():
|
2019-07-22 04:12:33 +08:00
|
|
|
raise TypeError(f'Field names must be valid identifiers: {name!r}')
|
2018-05-16 23:31:29 +08:00
|
|
|
if keyword.iskeyword(name):
|
|
|
|
raise TypeError(f'Field names must not be keywords: {name!r}')
|
|
|
|
if name in seen:
|
|
|
|
raise TypeError(f'Field name duplicated: {name!r}')
|
|
|
|
|
|
|
|
seen.add(name)
|
2021-04-13 09:02:02 +08:00
|
|
|
annotations[name] = tp
|
|
|
|
|
|
|
|
# Update 'ns' with the user-supplied namespace plus our calculated values.
|
|
|
|
def exec_body_callback(ns):
|
|
|
|
ns.update(namespace)
|
|
|
|
ns.update(defaults)
|
|
|
|
ns['__annotations__'] = annotations
|
2018-01-07 05:14:03 +08:00
|
|
|
|
2018-03-31 20:41:17 +08:00
|
|
|
# We use `types.new_class()` instead of simply `type()` to allow dynamic creation
|
2021-10-10 03:17:52 +08:00
|
|
|
# of generic dataclasses.
|
2021-04-13 09:02:02 +08:00
|
|
|
cls = types.new_class(cls_name, bases, {}, exec_body_callback)
|
|
|
|
|
|
|
|
# Apply the normal decorator.
|
2018-01-07 06:09:58 +08:00
|
|
|
return dataclass(cls, init=init, repr=repr, eq=eq, order=order,
|
2021-04-11 09:28:42 +08:00
|
|
|
unsafe_hash=unsafe_hash, frozen=frozen,
|
2022-05-03 00:36:39 +08:00
|
|
|
match_args=match_args, kw_only=kw_only, slots=slots,
|
|
|
|
weakref_slot=weakref_slot)
|
2018-02-26 10:30:17 +08:00
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2019-06-27 00:07:44 +08:00
|
|
|
def replace(obj, /, **changes):
|
2017-12-05 05:58:55 +08:00
|
|
|
"""Return a new object replacing specified fields with new values.
|
|
|
|
|
|
|
|
This is especially useful for frozen classes. Example usage:
|
|
|
|
|
|
|
|
@dataclass(frozen=True)
|
|
|
|
class C:
|
|
|
|
x: int
|
|
|
|
y: int
|
|
|
|
|
|
|
|
c = C(1, 2)
|
|
|
|
c1 = replace(c, x=3)
|
|
|
|
assert c1.x == 3 and c1.y == 2
|
|
|
|
"""
|
|
|
|
|
2018-05-16 17:14:53 +08:00
|
|
|
# We're going to mutate 'changes', but that's okay because it's a
|
|
|
|
# new dict, even if called with 'replace(obj, **my_changes)'.
|
2017-12-05 05:58:55 +08:00
|
|
|
|
2018-01-07 01:41:53 +08:00
|
|
|
if not _is_dataclass_instance(obj):
|
2017-12-05 05:58:55 +08:00
|
|
|
raise TypeError("replace() should be called on dataclass instances")
|
|
|
|
|
|
|
|
# It's an error to have init=False fields in 'changes'.
|
|
|
|
# If a field is not in 'changes', read its value from the provided obj.
|
|
|
|
|
2018-03-19 08:40:34 +08:00
|
|
|
for f in getattr(obj, _FIELDS).values():
|
2018-06-08 02:43:59 +08:00
|
|
|
# Only consider normal fields or InitVars.
|
|
|
|
if f._field_type is _FIELD_CLASSVAR:
|
|
|
|
continue
|
|
|
|
|
2017-12-05 05:58:55 +08:00
|
|
|
if not f.init:
|
|
|
|
# Error if this field is specified in changes.
|
|
|
|
if f.name in changes:
|
|
|
|
raise ValueError(f'field {f.name} is declared with '
|
|
|
|
'init=False, it cannot be specified with '
|
|
|
|
'replace()')
|
|
|
|
continue
|
|
|
|
|
|
|
|
if f.name not in changes:
|
2021-04-06 03:41:01 +08:00
|
|
|
if f._field_type is _FIELD_INITVAR and f.default is MISSING:
|
2018-06-23 22:46:32 +08:00
|
|
|
raise ValueError(f"InitVar {f.name!r} "
|
|
|
|
'must be specified with replace()')
|
2017-12-05 05:58:55 +08:00
|
|
|
changes[f.name] = getattr(obj, f.name)
|
|
|
|
|
2018-03-25 05:20:26 +08:00
|
|
|
# Create the new object, which calls __init__() and
|
2018-05-16 17:14:53 +08:00
|
|
|
# __post_init__() (if defined), using all of the init fields we've
|
|
|
|
# added and/or left in 'changes'. If there are values supplied in
|
|
|
|
# changes that aren't fields, this will correctly raise a
|
|
|
|
# TypeError.
|
2017-12-05 05:58:55 +08:00
|
|
|
return obj.__class__(**changes)
|