Skip to content

databind.core

databind.core

Alias

Bases: Setting

The #Alias setting is used to attach one or more alternative names to a dataclass field that should be used instead of the field's name in the code.

Example:

import typing
from dataclasses import dataclass
from databind.core.settings import Alias

@dataclass
class MyClass:
  my_field: typing.Annotated[int, Alias('foobar', 'spam')]

When deserializing a payload, converters should now use foobar if it exists, or fall back to spam when looking up the value for the field in the payload as opposed to my_field. When serializing, converters should use foobar as the name in the generated payload (always the first alias).

Source code in databind/core/settings.py
class Alias(Setting):
    """The #Alias setting is used to attach one or more alternative names to a dataclass field that should be used
    instead of the field's name in the code.

    Example:

    ```py
    import typing
    from dataclasses import dataclass
    from databind.core.settings import Alias

    @dataclass
    class MyClass:
      my_field: typing.Annotated[int, Alias('foobar', 'spam')]
    ```

    When deserializing a payload, converters should now use `foobar` if it exists, or fall back to `spam` when looking
    up the value for the field in the payload as opposed to `my_field`. When serializing, converters should use `foobar`
    as the name in the generated payload (always the first alias).
    """

    #: A tuple of the aliases provided to the constructor.
    aliases: t.Tuple[str, ...]
    priority: Priority = Priority.NORMAL

    def __init__(self, alias: str, *additional_aliases: str, priority: Priority = Priority.NORMAL) -> None:
        self.aliases = (alias,) + additional_aliases
        self.priority = priority

    def __repr__(self) -> str:
        return f'Alias({", ".join(map(repr, self.aliases))}, priority={self.priority!r})'

BooleanSetting dataclass

Bases: Setting

Base class for boolean settings.

Source code in databind/core/settings.py
@dataclasses.dataclass(frozen=True)
class BooleanSetting(Setting):
    """Base class for boolean settings."""

    enabled: bool = True
    priority: Priority = Priority.NORMAL

    def __post_init__(self) -> None:
        if type(self) is BooleanSetting:
            raise TypeError("BooleanSetting cannot be directly instantiated")

ClassDecoratorSetting

Bases: Setting

Source code in databind/core/settings.py
class ClassDecoratorSetting(Setting):
    bound_to: t.Optional[type] = None

    def __init__(self) -> None:
        if type(self) is ClassDecoratorSetting:
            raise TypeError("ClassDecoratorSetting cannot be directly instantiated")
        super().__init__()

    def __call__(self, type_: t.Type[T]) -> t.Type[T]:
        """Decorate the class *type_* with this setting, adding the setting to its `__databind_settings__` list
        (which is created if it does not exist) and sets #bound_to. The same setting instance cannot decorate multiple
        types."""

        assert isinstance(type_, type), type_
        if self.bound_to is not None:
            raise RuntimeError("cannot decorate multiple types with the same setting instance")

        self.bound_to = type_
        settings = getattr(type_, "__databind_settings__", None)
        if settings is None:
            settings = []
            setattr(type_, "__databind_settings__", settings)
        settings.append(self)

        return type_
__call__
__call__(type_: Type[T]) -> Type[T]

Decorate the class type_ with this setting, adding the setting to its __databind_settings__ list (which is created if it does not exist) and sets #bound_to. The same setting instance cannot decorate multiple types.

Source code in databind/core/settings.py
def __call__(self, type_: t.Type[T]) -> t.Type[T]:
    """Decorate the class *type_* with this setting, adding the setting to its `__databind_settings__` list
    (which is created if it does not exist) and sets #bound_to. The same setting instance cannot decorate multiple
    types."""

    assert isinstance(type_, type), type_
    if self.bound_to is not None:
        raise RuntimeError("cannot decorate multiple types with the same setting instance")

    self.bound_to = type_
    settings = getattr(type_, "__databind_settings__", None)
    if settings is None:
        settings = []
        setattr(type_, "__databind_settings__", settings)
    settings.append(self)

    return type_

Context dataclass

The context is constructed by the #ObjectMapper and passed to an applicable #Converter to convert #value according to the #datatype.

Source code in databind/core/context.py
@dataclasses.dataclass
class Context:
    """The context is constructed by the #ObjectMapper and passed to an applicable #Converter to convert #value
    according to the #datatype."""

    #: The parent context.
    parent: t.Optional["Context"] = dataclasses.field(repr=False)

    #: The direction (i.e. deserialization or serialization).
    direction: Direction

    #: The value to convert.
    value: t.Any = dataclasses.field(repr=False)

    #: The expected datatype of the value to inform the converter of what to convert the #value from or to.
    datatype: TypeHint

    #: A list of #Setting#s that are to be taken into account by the converter which can potentialy impact
    #: the conversion process.
    settings: "SettingsProvider" = dataclasses.field(repr=False)

    #: The key or index under which #value is present in the source material relative to the #parent context.
    #: This is `None` only for the root value in the same source. The value must be #Context.ROOT if the context
    #: has no parent.
    key: t.Union[int, str, Root, None, t.Any]

    #: The location of the #value in the source material.
    location: Location

    #: A function to dispatch the further conversion of a #Context.
    convert_func: t.Callable[["Context"], t.Any] = dataclasses.field(repr=False)

    ROOT: t.ClassVar = Root.Value

    def __post_init__(self) -> None:
        assert isinstance(self.datatype, TypeHint), self.datatype
        assert self.location is not None
        assert self.parent is not None or self.key == Context.ROOT

    def get_setting(self, setting_type: t.Type["T_Setting"]) -> "T_Setting | None":
        """Retrieve a setting by type that for the current context."""

        return self.settings.get_setting(self, setting_type)

    def spawn(
        self,
        value: t.Any,
        datatype: t.Union[TypeHint, t.Any],
        key: t.Union[int, str, None],
        location: t.Optional[Location] = None,
    ) -> "Context":
        """Spawn a sub context with a new value, datatype, key and optionally a new location. If the location is
        not overwritten, the parent filename is inherited, but not line number and column.

        Arguments:
          value: The value to convert.
          datatype: The datatype of *value*. If this is not already a #TypeHint, it will be converted to one
            using #TypeHint().
          key: The key or index at which the *value* can be found relative to the parent.
          location: The location of the new value. If not specified, the parent filename is inherited but not the
            line number and column.
        Returns:
          A new #Context object that has *self* as its #parent.
        """

        if not isinstance(datatype, TypeHint):
            datatype = TypeHint(datatype)

        if location is None:
            location = Location(self.location.filename, None, None)

        return Context(self, self.direction, value, datatype, self.settings, key, location, self.convert_func)

    def convert(self) -> t.Any:
        """Invoke the #convert_func with *self*."""

        return self.convert_func(self)

    def iter_hierarchy_up(self) -> t.Iterable["Context"]:
        current: t.Optional[Context] = self
        while current:
            yield current
            current = current.parent
convert
convert() -> Any

Invoke the #convert_func with self.

Source code in databind/core/context.py
def convert(self) -> t.Any:
    """Invoke the #convert_func with *self*."""

    return self.convert_func(self)
get_setting
get_setting(setting_type: Type[T_Setting]) -> T_Setting | None

Retrieve a setting by type that for the current context.

Source code in databind/core/context.py
def get_setting(self, setting_type: t.Type["T_Setting"]) -> "T_Setting | None":
    """Retrieve a setting by type that for the current context."""

    return self.settings.get_setting(self, setting_type)
spawn
spawn(value: Any, datatype: Union[TypeHint, Any], key: Union[int, str, None], location: Optional[Location] = None) -> Context

Spawn a sub context with a new value, datatype, key and optionally a new location. If the location is not overwritten, the parent filename is inherited, but not line number and column.

Parameters:

Name Type Description Default
value Any

The value to convert.

required
datatype Union[TypeHint, Any]

The datatype of value. If this is not already a #TypeHint, it will be converted to one using #TypeHint().

required
key Union[int, str, None]

The key or index at which the value can be found relative to the parent.

required
location Optional[Location]

The location of the new value. If not specified, the parent filename is inherited but not the line number and column.

None

Returns: A new #Context object that has self as its #parent.

Source code in databind/core/context.py
def spawn(
    self,
    value: t.Any,
    datatype: t.Union[TypeHint, t.Any],
    key: t.Union[int, str, None],
    location: t.Optional[Location] = None,
) -> "Context":
    """Spawn a sub context with a new value, datatype, key and optionally a new location. If the location is
    not overwritten, the parent filename is inherited, but not line number and column.

    Arguments:
      value: The value to convert.
      datatype: The datatype of *value*. If this is not already a #TypeHint, it will be converted to one
        using #TypeHint().
      key: The key or index at which the *value* can be found relative to the parent.
      location: The location of the new value. If not specified, the parent filename is inherited but not the
        line number and column.
    Returns:
      A new #Context object that has *self* as its #parent.
    """

    if not isinstance(datatype, TypeHint):
        datatype = TypeHint(datatype)

    if location is None:
        location = Location(self.location.filename, None, None)

    return Context(self, self.direction, value, datatype, self.settings, key, location, self.convert_func)

ConversionError

Bases: Exception

For any errors that occur during conversion.

Source code in databind/core/converter.py
class ConversionError(Exception):
    """For any errors that occur during conversion."""

    def __init__(
        self,
        origin: Converter,
        context: "Context",
        message: str,
        errors: "t.Sequence[t.Tuple[Converter, Exception]] | None" = None,
    ) -> None:
        self.origin = origin
        self.context = context
        self.message = message
        self.errors = errors or []

    @exception_safe_str
    def __str__(self) -> str:
        import textwrap

        from databind.core.context import format_context_trace

        message = f'{self.message}\n\nTrace:\n{textwrap.indent(format_context_trace(self.context), "  ")}'
        if self.errors:
            message += "\n\nThe following errors have been reported by converters:"
            for converter, exc in self.errors:
                if str(exc):
                    message += f"\n\n  {converter}: {indent(str(exc), '    ').lstrip()}"
        return message

    @staticmethod
    def expected(
        origin: Converter,
        ctx: "Context",
        types: t.Union[type, t.Sequence[type]],
        got: t.Optional[type] = None,
    ) -> "ConversionError":
        if not isinstance(types, t.Sequence):
            types = (types,)
        expected = "|".join(type_repr(t) for t in types)
        got = type(ctx.value) if got is None else got
        return ConversionError(origin, ctx, f"expected {expected}, got {type_repr(got)} instead")

Converter

Bases: ABC

Interface for converting a value from one representation to another.

Source code in databind/core/converter.py
class Converter(abc.ABC):
    """Interface for converting a value from one representation to another."""

    def __repr__(self) -> str:
        return f"{type_repr(type(self))}()"

    def convert(self, ctx: "Context") -> t.Any:
        """Convert the value in *ctx* to another value.

        The default implementation will dispatch to #serialize() and #deserialize() depending on the direction
        given by the context. Because these methods raise #NotImplementedError, an instance of #Converter without
        custom logic will effectively be a no-op.

        Argument:
          ctx: The conversion context that contains the direction, value, datatype, settings, location and allows
            you to recursively continue the conversion process for sub values.

        Raises:
          NotImplementedError: If the converter does not support the conversion for the given context.
          NoMatchingConverter: If the converter is delegating to other converters, to point out that none
            of its delegates can convert the value.

        Returns:
          The new value.
        """

        if ctx.direction.is_serialize():
            return self.serialize(ctx)
        elif ctx.direction.is_deserialize():
            return self.deserialize(ctx)
        else:
            raise RuntimeError(f"unexpected direction: {ctx.direction!r}")

    def serialize(self, ctx: "Context") -> t.Any:
        raise NotImplementedError

    def deserialize(self, ctx: "Context") -> t.Any:
        raise NotImplementedError
convert
convert(ctx: Context) -> Any

Convert the value in ctx to another value.

The default implementation will dispatch to #serialize() and #deserialize() depending on the direction given by the context. Because these methods raise #NotImplementedError, an instance of #Converter without custom logic will effectively be a no-op.

Argument

ctx: The conversion context that contains the direction, value, datatype, settings, location and allows you to recursively continue the conversion process for sub values.

Raises:

Type Description
NotImplementedError

If the converter does not support the conversion for the given context.

NoMatchingConverter

If the converter is delegating to other converters, to point out that none of its delegates can convert the value.

Returns:

Type Description
Any

The new value.

Source code in databind/core/converter.py
def convert(self, ctx: "Context") -> t.Any:
    """Convert the value in *ctx* to another value.

    The default implementation will dispatch to #serialize() and #deserialize() depending on the direction
    given by the context. Because these methods raise #NotImplementedError, an instance of #Converter without
    custom logic will effectively be a no-op.

    Argument:
      ctx: The conversion context that contains the direction, value, datatype, settings, location and allows
        you to recursively continue the conversion process for sub values.

    Raises:
      NotImplementedError: If the converter does not support the conversion for the given context.
      NoMatchingConverter: If the converter is delegating to other converters, to point out that none
        of its delegates can convert the value.

    Returns:
      The new value.
    """

    if ctx.direction.is_serialize():
        return self.serialize(ctx)
    elif ctx.direction.is_deserialize():
        return self.deserialize(ctx)
    else:
        raise RuntimeError(f"unexpected direction: {ctx.direction!r}")

DateFormat dataclass

Bases: Setting

The #DateFormat setting is used to describe the date format to use for #datetime.datetime, #datetime.date and #datetime.time values when formatting them as a string, i.e. usually when the date/time is serialized, and when parsing them.

The #nr.date module provides types to describe the format of a date, time and datetime (see #date_format,

time_format and #datetime_format), as well as an entire suite of formats for all types of date/time values.

Parameters:

Name Type Description Default
formats T_Input

One or more datetime formats to use when parsing. The first of the formats is used for formatting. Each element must be one of the following:

  • A formatter (like #date_format, #time_format or #datetime_format),
  • a #format_set,
  • a string that is a date/time format, or
  • a string starting with a period (.) that names a builtin format set (like .ISO_8601)

Attempting to use #parse() or #format() for a date/time value type for which the #DateFormat does not provide an applicable format results in a #ValueError.

()
Source code in databind/core/settings.py
@dataclasses.dataclass(init=False, unsafe_hash=True)
class DateFormat(Setting):
    """The #DateFormat setting is used to describe the date format to use for #datetime.datetime, #datetime.date
    and #datetime.time values when formatting them as a string, i.e. usually when the date/time is serialized, and
    when parsing them.

    The #nr.date module provides types to describe the format of a date, time and datetime (see #date_format,
    #time_format and #datetime_format), as well as an entire suite of formats for all types of date/time values.

    Arguments:
      formats: One or more datetime formats to use when parsing. The first of the formats is used for formatting.
        Each element must be one of the following:

        * A formatter (like #date_format, #time_format or #datetime_format),
        * a #format_set,
        * a string that is a date/time format, or
        * a string starting with a period (`.`) that names a builtin format set (like `.ISO_8601`)

        Attempting to use #parse() or #format() for a date/time value type for which the #DateFormat does not
        provide an applicable format results in a #ValueError.
    """

    Dtype = t.Union[datetime.date, datetime.time, datetime.datetime]
    Formatter = t.Union["date_format", "time_format", "datetime_format", "format_set"]
    T_Input = t.Union[str, Formatter]
    T_Dtype = t.TypeVar("T_Dtype", bound=Dtype)

    formats: t.Sequence[T_Input]

    def __init__(self, *formats: T_Input) -> None:
        if not formats:
            raise ValueError("need at least one date format")
        self.formats = formats

    @staticmethod
    def __get_builtin_format(fmt: str) -> Formatter:
        if fmt == ".ISO_8601":
            from nr.date.format_sets import ISO_8601

            return ISO_8601
        if fmt == ".JAVA_OFFSET_DATETIME":
            from nr.date.format_sets import JAVA_OFFSET_DATETIME

            return JAVA_OFFSET_DATETIME
        raise ValueError(f"{fmt!r} is not a built-in date/time format set")

    def __iter_formats(self, type_: t.Type[Formatter]) -> t.Iterable[Formatter]:
        for fmt in self.formats:
            if isinstance(fmt, str):
                if fmt.startswith("."):
                    yield self.__get_builtin_format(fmt)
                else:
                    yield type_.compile(fmt)  # type: ignore
            elif type(fmt) is type_:
                yield fmt
            elif isinstance(fmt, format_set):
                yield from getattr(fmt, type_.__name__ + "s")
            # else:
            #  raise RuntimeError(f'bad date format type: {type(fmt).__name__}')

    def parse(self, type_: t.Type[T_Dtype], value: str) -> T_Dtype:
        """Parse a date/time value from a string.

        Arguments:
          type_: The type to parse the value into, i.e. #datetime.date, #datetime.time or #datetime.datetime.
          value: The string to parse.
        Raises:
          ValueError: If no date format is sufficient to parse *value* into the given *type_*.
        Returns:
          The parsed date/time value.
        """

        from nr.date import date_format, datetime_format, time_format

        format_t: t.Type[DateFormat.Formatter]
        format_t, method_name = {  # type: ignore
            datetime.date: (date_format, "parse_date"),
            datetime.time: (time_format, "parse_time"),
            datetime.datetime: (datetime_format, "parse_datetime"),
        }[type_]
        for fmt in self.__iter_formats(format_t):
            try:
                return t.cast(DateFormat.T_Dtype, getattr(fmt, method_name)(value))
            except ValueError:
                pass
        raise self._formulate_parse_error(list(self.__iter_formats(format_t)), value)

    def format(self, dt: T_Dtype) -> str:
        """Format a date/time value to a string.

        Arguments:
          dt: The date/time value to format (i.e. an instance of #datetime.date, #datetime.time or
            #datetime.datetime).
        Raises:
          ValueError: If no date format to format the type of *value* is available.
        Returns:
          The formatted date/time value.
        """

        from nr.date import date_format, datetime_format, time_format

        format_t: t.Type[DateFormat.Formatter]
        format_t, method_name = {  # type: ignore
            datetime.date: (date_format, "format_date"),
            datetime.time: (time_format, "format_time"),
            datetime.datetime: (datetime_format, "format_datetime"),
        }[type(dt)]
        for fmt in self.__iter_formats(format_t):
            try:
                return t.cast(str, getattr(fmt, method_name)(dt))
            except ValueError:
                pass
        raise self._formulate_parse_error(list(self.__iter_formats(format_t)), dt)

    @staticmethod
    def _formulate_parse_error(formats: t.Sequence[Formatter], s: t.Any) -> ValueError:
        return ValueError(
            f'"{s}" does not match date formats ({len(formats)}):'
            + "".join(f"\n  | {str(x) if isinstance(x, format_set) else x.format_str}" for x in formats)
        )
format
format(dt: T_Dtype) -> str

Format a date/time value to a string.

Parameters:

Name Type Description Default
dt T_Dtype

The date/time value to format (i.e. an instance of #datetime.date, #datetime.time or

datetime.datetime).
required

Raises: ValueError: If no date format to format the type of value is available. Returns: The formatted date/time value.

Source code in databind/core/settings.py
def format(self, dt: T_Dtype) -> str:
    """Format a date/time value to a string.

    Arguments:
      dt: The date/time value to format (i.e. an instance of #datetime.date, #datetime.time or
        #datetime.datetime).
    Raises:
      ValueError: If no date format to format the type of *value* is available.
    Returns:
      The formatted date/time value.
    """

    from nr.date import date_format, datetime_format, time_format

    format_t: t.Type[DateFormat.Formatter]
    format_t, method_name = {  # type: ignore
        datetime.date: (date_format, "format_date"),
        datetime.time: (time_format, "format_time"),
        datetime.datetime: (datetime_format, "format_datetime"),
    }[type(dt)]
    for fmt in self.__iter_formats(format_t):
        try:
            return t.cast(str, getattr(fmt, method_name)(dt))
        except ValueError:
            pass
    raise self._formulate_parse_error(list(self.__iter_formats(format_t)), dt)
parse
parse(type_: Type[T_Dtype], value: str) -> T_Dtype

Parse a date/time value from a string.

Parameters:

Name Type Description Default
type_ Type[T_Dtype]

The type to parse the value into, i.e. #datetime.date, #datetime.time or #datetime.datetime.

required
value str

The string to parse.

required

Raises: ValueError: If no date format is sufficient to parse value into the given type_. Returns: The parsed date/time value.

Source code in databind/core/settings.py
def parse(self, type_: t.Type[T_Dtype], value: str) -> T_Dtype:
    """Parse a date/time value from a string.

    Arguments:
      type_: The type to parse the value into, i.e. #datetime.date, #datetime.time or #datetime.datetime.
      value: The string to parse.
    Raises:
      ValueError: If no date format is sufficient to parse *value* into the given *type_*.
    Returns:
      The parsed date/time value.
    """

    from nr.date import date_format, datetime_format, time_format

    format_t: t.Type[DateFormat.Formatter]
    format_t, method_name = {  # type: ignore
        datetime.date: (date_format, "parse_date"),
        datetime.time: (time_format, "parse_time"),
        datetime.datetime: (datetime_format, "parse_datetime"),
    }[type_]
    for fmt in self.__iter_formats(format_t):
        try:
            return t.cast(DateFormat.T_Dtype, getattr(fmt, method_name)(value))
        except ValueError:
            pass
    raise self._formulate_parse_error(list(self.__iter_formats(format_t)), value)

DelegateToClassmethodConverter

Bases: Converter

This converter delegaes to the methods defined by name to perform serialization and deserialization of a type. This converter is usually used in conjunction with settings that override the converteer to be used in a specifc scenario (e.g. such as de/serializing JSON with the #databind.json.settings.JsonConverter setting).

Source code in databind/core/converter.py
class DelegateToClassmethodConverter(Converter):
    """
    This converter delegaes to the methods defined by name to perform serialization and deserialization of a type. This
    converter is usually used in conjunction with settings that override the converteer to be used in a specifc
    scenario (e.g. such as de/serializing JSON with the #databind.json.settings.JsonConverter setting).
    """

    def __init__(
        self,
        serialized_type: t.Union[t.Type[t.Any], t.Tuple[t.Type[t.Any], ...], None] = None,
        *,
        serialize: "str | None" = None,
        deserialize: "str | None" = None,
    ) -> None:
        self._serialized_type = serialized_type
        self._serialize = serialize
        self._deserialize = deserialize

    def serialize(self, ctx: "Context") -> t.Any:
        if self._serialize is None or not isinstance(ctx.datatype, ClassTypeHint):
            raise NotImplementedError
        if not isinstance(ctx.value, ctx.datatype.type):
            raise ConversionError.expected(self, ctx, ctx.datatype.type)
        method: t.Callable[[t.Any], t.Any] = getattr(ctx.datatype.type, self._serialize)
        return method(ctx.value)

    def deserialize(self, ctx: "Context") -> t.Any:
        if self._deserialize is None or not isinstance(ctx.datatype, ClassTypeHint):
            raise NotImplementedError
        if self._serialized_type is not None and not isinstance(ctx.value, self._serialized_type):
            raise ConversionError.expected(self, ctx, self._serialized_type)
        method: t.Callable[[t.Any], t.Any] = getattr(ctx.datatype.type, self._deserialize)
        return method(ctx.value)

DeserializeAs dataclass

Bases: Setting

Indicates that a field should be deserialized as the given type instead of the type of the field. This is typically used when a field should be typed as an abstract class or interface, but during deserialization of the field, a concrete type should be used instead.

Example:

import typing
from dataclasses import dataclass
from databind.core.settings import DeserializeAs

@dataclass
class A:
    pass

@dataclass
class B(A):
    pass

@dataclass
class MyClass:
  my_field: typing.Annotated[A, DeserializeAs(B)]

Here, although MyClass.my_field is annotated as A, when a payload is deserialized into an instance of MyClass, the value for my_field will be deserialized as an instance of B instead of A.

Source code in databind/core/settings.py
@dataclasses.dataclass(frozen=True)
class DeserializeAs(Setting):
    """Indicates that a field should be deserialized as the given type instead of the type of the field. This is
    typically used when a field should be typed as an abstract class or interface, but during deserialization of the
    field, a concrete type should be used instead.

    Example:

    ```py
    import typing
    from dataclasses import dataclass
    from databind.core.settings import DeserializeAs

    @dataclass
    class A:
        pass

    @dataclass
    class B(A):
        pass

    @dataclass
    class MyClass:
      my_field: typing.Annotated[A, DeserializeAs(B)]
    ```

    Here, although `MyClass.my_field` is annotated as `A`, when a payload is deserialized into an instance of
    `MyClass`, the value for `my_field` will be deserialized as an instance of `B` instead of `A`.
    """

    type: t.Type[t.Any]
    priority: Priority = Priority.NORMAL

ExtraKeys

Bases: ClassDecoratorSetting

If discovered while deserializing a #databind.core.schema.Schema, it's callback is used to inform when extras keys are encountered. If the setting is not available, or if the arg is set to False (the default), it will cause an error.

The setting may also be supplied at an individual schema level.

Can be used as a decorator for a class to indicate that extra keys on the schema informed by the class are allowed, as a global setting or as an annotation on a schema field.

Note

Only the first, highest priority annotation is used; thus if you pass a callback for arg it may not be called if the #ExtraKeys setting you pass it to is overruled by another.

Source code in databind/core/settings.py
class ExtraKeys(ClassDecoratorSetting):
    """If discovered while deserializing a #databind.core.schema.Schema, it's callback is used to inform when extras
    keys are encountered. If the setting is not available, or if the arg is set to `False` (the default), it will
    cause an error.

    The setting may also be supplied at an individual schema level.

    Can be used as a decorator for a class to indicate that extra keys on the schema informed by the class are allowed,
    as a global setting or as an annotation on a schema field.

    !!! note

        Only the first, highest priority annotation is used; thus if you pass a callback for *arg* it may not be called
        if the #ExtraKeys setting you pass it to is overruled by another.
    """

    def __init__(
        self,
        allow: bool = True,
        recorder: "t.Callable[[Context, t.Set[str]], t.Any] | None" = None,
        priority: Priority = Priority.NORMAL,
    ) -> None:
        self.allow = allow
        self.recorder = recorder
        self.priority = priority

    def inform(self, origin: "Converter", ctx: "Context", extra_keys: "t.Set[str]") -> None:
        from databind.core.converter import ConversionError

        if self.allow is False:
            raise ConversionError(origin, ctx, f"encountered extra keys: {extra_keys}")
        elif self.recorder is not None:
            self.recorder(ctx, extra_keys)

Field dataclass

Describes a field in a schema.

Source code in databind/core/schema.py
@dataclasses.dataclass
class Field:
    """Describes a field in a schema."""

    #: The datatype of the field.
    datatype: TypeHint

    #: Whether the field is required to be present, if this is `False` and the field does not have a #default or
    #: #default_factorty, the field value will not be passed to the schema constructor. Even if a #default or
    #: #default_factory is present, if he field is required it must be present in the payload being deserialized.
    required: bool = True

    #: The default value for the field, if any.
    default: t.Union[NotSet, t.Any] = NotSet.Value

    #: The default value factory for the field, if any.
    default_factory: t.Union[NotSet, t.Any] = NotSet.Value

    #: Indicates whether the field is to be treated "flat". If the #datatype is a structured type that has fields of its
    #: own, those fields should be treated as if expanded into the same level as this field.
    flattened: bool = False

    def has_default(self) -> bool:
        return self.default is not NotSet.Value or self.default_factory is not NotSet.Value

    def get_default(self) -> t.Any:
        if self.default is not NotSet.Value:
            return self.default
        elif self.default_factory is not NotSet.Value:
            return self.default_factory()
        else:
            raise RuntimeError("Field does not have a default value")

    @property
    def aliases(self) -> t.Tuple[str, ...]:
        """For convience, the aliases described in the #datatype#'s annotations are listed here. Do note however, that
        during the conversion process, the #Alias setting should still be looked up through #Context.get_setting()
        and this field should be ignored. It serves only a introspective purpose. Returns an empty tuple if no alias
        setting is present in the type hint."""

        from databind.core.settings import Alias, get_annotation_setting

        alias = get_annotation_setting(self.datatype, Alias)
        return alias.aliases if alias else ()
aliases property
aliases: Tuple[str, ...]

For convience, the aliases described in the #datatype#'s annotations are listed here. Do note however, that during the conversion process, the #Alias setting should still be looked up through #Context.get_setting() and this field should be ignored. It serves only a introspective purpose. Returns an empty tuple if no alias setting is present in the type hint.

Flattened dataclass

Bases: BooleanSetting

Indicates whether a field should be "flattened" by virtually expanding it's sub fields into the parent datastructure's serialized form.

Example:

import typing
from dataclasses import dataclass
from databind.core.settings import Flattened

@dataclass
class Inner:
  a: int
  b: str

@dataclass
class Outter:
  inner: typing.Annotated[Inner, Flattened()]
  c: str

The Outter class in the example above may be deserialized, for example, from a JSON payload of the form {"a": 0, "b": "", "c": ""} as opposed to {"inner": {"a": 0, "b": ""}, "c": ""} due to the Outter.inner field's sub fields being expanded into Outter.

Source code in databind/core/settings.py
class Flattened(BooleanSetting):
    """Indicates whether a field should be "flattened" by virtually expanding it's sub fields into the parent
    datastructure's serialized form.

    Example:

    ```py
    import typing
    from dataclasses import dataclass
    from databind.core.settings import Flattened

    @dataclass
    class Inner:
      a: int
      b: str

    @dataclass
    class Outter:
      inner: typing.Annotated[Inner, Flattened()]
      c: str
    ```

    The `Outter` class in the example above may be deserialized, for example, from a JSON payload of the form
    `{"a": 0, "b": "", "c": ""}` as opposed to `{"inner": {"a": 0, "b": ""}, "c": ""}` due to the `Outter.inner`
    field's sub fields being expanded into `Outter`.
    """

Location dataclass

Represents a location in a file.

Source code in databind/core/context.py
@dataclasses.dataclass(frozen=True)
class Location:
    """Represents a location in a file."""

    #: The name of the file.
    filename: t.Optional[str]

    #: The line number in the file.
    line: t.Optional[int]

    #: The column number in the file.
    column: t.Optional[int]

    EMPTY: t.ClassVar["Location"]

Module

Bases: Converter

A module is a collection of #Converter#s.

Source code in databind/core/converter.py
class Module(Converter):
    """A module is a collection of #Converter#s."""

    def __init__(self, name: str) -> None:
        self.name = name
        self.converters: t.List[Converter] = []

    def __repr__(self) -> str:
        return f"Module({self.name!r})"

    def register(self, converter: Converter, first: bool = False) -> None:
        assert isinstance(converter, Converter), converter
        if first:
            self.converters.insert(0, converter)
        else:
            self.converters.append(converter)

    def get_converters(self, ctx: "Context") -> t.Iterator[Converter]:
        for converter in self.converters:
            if isinstance(converter, Module):
                yield from converter.get_converters(ctx)
            else:
                yield converter

    def convert(self, ctx: "Context") -> t.Any:
        errors: t.List[t.Tuple[Converter, Exception]] = []
        for converter in self.get_converters(ctx):
            try:
                return converter.convert(ctx)
            except NotImplementedError:
                pass
            except ConversionError as exc:
                errors.append((converter, exc))
        if len(errors) == 1:
            raise errors[0][1]
        raise NoMatchingConverter(self, ctx, errors)

NoMatchingConverter

Bases: ConversionError

If no converter matched to convert the value and datatype in the context.

Source code in databind/core/converter.py
class NoMatchingConverter(ConversionError):
    """If no converter matched to convert the value and datatype in the context."""

    def __init__(self, origin: Converter, context: "Context", errors: "t.List[t.Tuple[Converter, Exception]]") -> None:
        super().__init__(
            origin,
            context,
            f"no {context.direction.name.lower()}r for `{context.datatype}` and payload of type "
            f"`{type_repr(type(context.value))}`",
            errors,
        )

ObjectMapper

Bases: Generic[T, U]

The object mapper is responsible for dispatching the conversion process into a #Module.

The type parameter T represents the deserialized type, while U represents the serialized type.

Source code in databind/core/mapper.py
class ObjectMapper(t.Generic[T, U]):
    """The object mapper is responsible for dispatching the conversion process into a #Module.

    The type parameter *T* represents the deserialized type, while *U* represents the serialized type.
    """

    def __init__(self, settings: t.Optional["Settings"] = None) -> None:
        from databind.core.converter import Module
        from databind.core.settings import Settings

        assert isinstance(settings, (type(None), Settings)), settings
        self.module = Module("ObjectMapper.module")
        self.settings = settings or Settings()

    def copy(self) -> "ObjectMapper[T, U]":
        new = type(self)(self.settings.copy())
        new.module.converters.extend(self.module.converters)
        return new

    def convert(
        self,
        direction: "Direction",
        value: t.Any,
        datatype: "TypeHint | t.Any",
        location: "Location | None" = None,
        settings: "SettingsProvider | t.List[Setting] | None" = None,
    ) -> t.Any:
        """Convert a value according to the given datatype.

        Arguments:
          direction: The direction, i.e. either deserialization or serialization.
          value: The value to convert.
          datatype: The datatype. If not already a #TypeHint instance, it will be converted using #TypeHint().
          location: The location of where *value* is coming from. Useful to specify to make debugging easier.
          settings: A list of settings, in which case they will be treated as global settings in addition to the
            mapper's #settings, or an entirely different #SettingsProvider instance (for which it is recommended that
            it is taking the ObjectMapper's #settings into account, for example by passing them for the
            #Settings.parent).

        Raises:
          ConversionError: For more generic errosr during the conversion process.
          NoMatchingConverter: If at any point during the conversion a datatype was encountered for which no matching
            converter was found.
        """

        from databind.core.context import Context, Location
        from databind.core.settings import Settings

        if not isinstance(datatype, TypeHint):
            datatype = TypeHint(datatype)
        if isinstance(settings, list):
            settings = Settings(self.settings, global_settings=settings)

        context = Context(
            parent=None,
            direction=direction,
            value=value,
            datatype=datatype,
            settings=settings or self.settings,
            key=Context.ROOT,
            location=location or Location.EMPTY,
            convert_func=self.module.convert,
        )

        return context.convert()

    def serialize(
        self,
        value: T,
        datatype: "TypeHint | t.Any",
        filename: "str | None" = None,
        settings: "SettingsProvider | t.List[Setting] | None" = None,
    ) -> U:
        """Serialize *value* according to the its *datatype*."""

        from databind.core.context import Direction, Location

        return t.cast(U, self.convert(Direction.SERIALIZE, value, datatype, Location(filename, None, None), settings))

    def deserialize(
        self,
        value: U,
        datatype: "TypeHint | t.Any",
        filename: "str | None" = None,
        settings: "SettingsProvider | t.List[Setting] | None" = None,
    ) -> T:
        """Deserialize *value* according to the its *datatype*."""

        from databind.core.context import Direction, Location

        return t.cast(
            T,
            self.convert(Direction.DESERIALIZE, value, datatype, Location(filename, None, None), settings),
        )
__init__
__init__(settings: Optional[Settings] = None) -> None
Source code in databind/core/mapper.py
def __init__(self, settings: t.Optional["Settings"] = None) -> None:
    from databind.core.converter import Module
    from databind.core.settings import Settings

    assert isinstance(settings, (type(None), Settings)), settings
    self.module = Module("ObjectMapper.module")
    self.settings = settings or Settings()
convert
convert(direction: Direction, value: Any, datatype: TypeHint | Any, location: Location | None = None, settings: SettingsProvider | List[Setting] | None = None) -> Any

Convert a value according to the given datatype.

Parameters:

Name Type Description Default
direction Direction

The direction, i.e. either deserialization or serialization.

required
value Any

The value to convert.

required
datatype TypeHint | Any

The datatype. If not already a #TypeHint instance, it will be converted using #TypeHint().

required
location Location | None

The location of where value is coming from. Useful to specify to make debugging easier.

None
settings SettingsProvider | List[Setting] | None

A list of settings, in which case they will be treated as global settings in addition to the mapper's #settings, or an entirely different #SettingsProvider instance (for which it is recommended that it is taking the ObjectMapper's #settings into account, for example by passing them for the

Settings.parent).
None

Raises:

Type Description
ConversionError

For more generic errosr during the conversion process.

NoMatchingConverter

If at any point during the conversion a datatype was encountered for which no matching converter was found.

Source code in databind/core/mapper.py
def convert(
    self,
    direction: "Direction",
    value: t.Any,
    datatype: "TypeHint | t.Any",
    location: "Location | None" = None,
    settings: "SettingsProvider | t.List[Setting] | None" = None,
) -> t.Any:
    """Convert a value according to the given datatype.

    Arguments:
      direction: The direction, i.e. either deserialization or serialization.
      value: The value to convert.
      datatype: The datatype. If not already a #TypeHint instance, it will be converted using #TypeHint().
      location: The location of where *value* is coming from. Useful to specify to make debugging easier.
      settings: A list of settings, in which case they will be treated as global settings in addition to the
        mapper's #settings, or an entirely different #SettingsProvider instance (for which it is recommended that
        it is taking the ObjectMapper's #settings into account, for example by passing them for the
        #Settings.parent).

    Raises:
      ConversionError: For more generic errosr during the conversion process.
      NoMatchingConverter: If at any point during the conversion a datatype was encountered for which no matching
        converter was found.
    """

    from databind.core.context import Context, Location
    from databind.core.settings import Settings

    if not isinstance(datatype, TypeHint):
        datatype = TypeHint(datatype)
    if isinstance(settings, list):
        settings = Settings(self.settings, global_settings=settings)

    context = Context(
        parent=None,
        direction=direction,
        value=value,
        datatype=datatype,
        settings=settings or self.settings,
        key=Context.ROOT,
        location=location or Location.EMPTY,
        convert_func=self.module.convert,
    )

    return context.convert()
deserialize
deserialize(value: U, datatype: TypeHint | Any, filename: str | None = None, settings: SettingsProvider | List[Setting] | None = None) -> T

Deserialize value according to the its datatype.

Source code in databind/core/mapper.py
def deserialize(
    self,
    value: U,
    datatype: "TypeHint | t.Any",
    filename: "str | None" = None,
    settings: "SettingsProvider | t.List[Setting] | None" = None,
) -> T:
    """Deserialize *value* according to the its *datatype*."""

    from databind.core.context import Direction, Location

    return t.cast(
        T,
        self.convert(Direction.DESERIALIZE, value, datatype, Location(filename, None, None), settings),
    )
serialize
serialize(value: T, datatype: TypeHint | Any, filename: str | None = None, settings: SettingsProvider | List[Setting] | None = None) -> U

Serialize value according to the its datatype.

Source code in databind/core/mapper.py
def serialize(
    self,
    value: T,
    datatype: "TypeHint | t.Any",
    filename: "str | None" = None,
    settings: "SettingsProvider | t.List[Setting] | None" = None,
) -> U:
    """Serialize *value* according to the its *datatype*."""

    from databind.core.context import Direction, Location

    return t.cast(U, self.convert(Direction.SERIALIZE, value, datatype, Location(filename, None, None), settings))

Precision dataclass

Bases: Setting

A setting to describe the precision for #decimal.Decimal fields.

Source code in databind/core/settings.py
@dataclasses.dataclass(frozen=True)
class Precision(Setting):
    """A setting to describe the precision for #decimal.Decimal fields."""

    prec: t.Optional[int] = None
    rounding: t.Optional[str] = None
    Emin: t.Optional[int] = None
    Emax: t.Optional[int] = None
    capitals: t.Optional[bool] = None
    clamp: t.Optional[bool] = None
    priority: Priority = Priority.NORMAL

    def to_decimal_context(self) -> decimal.Context:
        return decimal.Context(
            prec=self.prec,
            rounding=self.rounding,
            Emin=self.Emin,
            Emax=self.Emax,
            capitals=self.capitals,
            clamp=self.clamp,
        )

Priority

Bases: IntEnum

The priority for settings determines their order in the presence of multiple conflicting settings. Settings should default to using the #NORMAL priority. The other priorities are used to either prevent overriding a field setting globally or to enforce overriding of local field settings globally using #Settings.

Source code in databind/core/settings.py
class Priority(enum.IntEnum):
    """The priority for settings determines their order in the presence of multiple conflicting settings. Settings
    should default to using the #NORMAL priority. The other priorities are used to either prevent overriding a field
    setting globally or to enforce overriding of local field settings globally using #Settings."""

    LOW = 0
    NORMAL = 1
    HIGH = 2
    ULTIMATE = 3

Remainder dataclass

Bases: BooleanSetting

This setting can be used to indicate on a field of a schema that is of a mapping type that it consumes any extra keys that are not otherwise understood by the schema. Note that there can only be a maximum of 1 remainder field in the same schema.

Source code in databind/core/settings.py
class Remainder(BooleanSetting):
    """This setting can be used to indicate on a field of a schema that is of a mapping type that it consumes any
    extra keys that are not otherwise understood by the schema. Note that there can only be a maximum of 1 remainder
    field in the same schema."""

Required dataclass

Bases: BooleanSetting

Indicates whether a field is required during deserialization, even if it's type specifies that it is an optional field.

Example:

import typing
from dataclasses import dataclass
from databind.core.settings import Required

@dataclass
class MyClass:
  my_field: typing.Annotated[typing.Optional[int], Required()]
Source code in databind/core/settings.py
class Required(BooleanSetting):
    """Indicates whether a field is required during deserialization, even if it's type specifies that it is an
    optional field.

    Example:

    ```py
    import typing
    from dataclasses import dataclass
    from databind.core.settings import Required

    @dataclass
    class MyClass:
      my_field: typing.Annotated[typing.Optional[int], Required()]
    ```
    """

Schema dataclass

A #Schema describes a set of fields with a name and datatype.

Source code in databind/core/schema.py
@dataclasses.dataclass
class Schema:
    """A #Schema describes a set of fields with a name and datatype."""

    #: A dictionary that maps the field descriptions in the schema. The key is the name of the field in code. Given an
    #: instance of an object that complies to a given #Schema, this is the name by which the value of the field should
    #: be read using attribute lookup.
    fields: t.Dict[str, Field]

    #: A function that constructs an instance of a Python object that this schema represents given a dictionary as
    #: keyword arguments of the deserialized field values. Fields that are not present in the source payload and a that
    #: do not have a default value will not be present in the passed dictionary.
    constructor: "Constructor"

    #: The underlying native Python type associated with the schema.
    type: type

    #: Annotation metadata that goes with the schema, possibly derived from a #AnnotatedTypeHint hint or the underlying
    #: Python type object.
    annotations: t.List[t.Any] = dataclasses.field(default_factory=list)

SerializeDefaults dataclass

Bases: BooleanSetting

Control whether default values are to be encoded in the serialized form of a structure. The default behaviour is up to the serializer implementation, though we consider it good practices to include values that match the default value of a field by default. However, using the setting defaults to #enabled having a value of True due to how the name of the setting appears assertive of the fact that the instance indicates the setting is enabled.

Source code in databind/core/settings.py
class SerializeDefaults(BooleanSetting):
    """Control whether default values are to be encoded in the serialized form of a structure. The default behaviour
    is up to the serializer implementation, though we consider it good practices to include values that match the
    default value of a field by default. However, using the setting defaults to #enabled having a value of `True` due
    to how the name of the setting appears assertive of the fact that the instance indicates the setting is enabled."""

Setting

Base class for types of which instances represent a setting to be taken into account during data conversion. Every setting has a priority that is used to construct and order or to determine the single setting to use in the presence of multiple instances of the same setting type being present.

Settings are usually attached to dataclass fields using #typing.Annotated, or added to a #Settings object for applying the setting globally, but some subclasses may support being used as decorators to attach the setting to a type object. Such settings would registers themselves under the __databind_settings__ attribute (created if it does not exist) such that it can be picked up when introspected by a converter. Such #Setting subclasses should inherit from #DecoratorSetting instead.

Source code in databind/core/settings.py
class Setting:
    """Base class for types of which instances represent a setting to be taken into account during data conversion.
    Every setting has a priority that is used to construct and order or to determine the single setting to use in
    the presence of multiple instances of the same setting type being present.

    Settings are usually attached to dataclass fields using #typing.Annotated, or added to a #Settings object for
    applying the setting globally, but some subclasses may support being used as decorators to attach the setting
    to a type object. Such settings would registers themselves under the `__databind_settings__` attribute (created
    if it does not exist) such that it can be picked up when introspected by a converter. Such #Setting subclasses
    should inherit from #DecoratorSetting instead."""

    priority: Priority = Priority.NORMAL

    def __init__(self) -> None:
        if type(self) is Setting:
            raise TypeError("Setting cannot be directly instantiated")

Settings

Bases: SettingsProvider

This class is used as a container for other objects that serve as a provider of settings that may taken into account during data conversion. Objects that provide settings are instances of #Setting subclasses, such as

FieldAlias or #DateFormat.

Depending on the type of setting, they may be taken into account if present on a field of a dataclass, or globally from an instance of the #Settings class that is passed to the #ObjectMapper, or both. Which settings are recognized and considered depends also on the implementation of the converter(s) being used.

The #Settings class provides capabilities to supply global settings, as well as supplying settings conditionally based on the type that is being looked at by the #ObjectMapper at the given point in time.

Example:

from databind.core.settings import DateFormat, Priority, Settings, Strict
settings = Settings()
settings.add_global(DateFormat('.ISO_8601', priority=Priority.HIGH))
settings.add_local(int, Strict(false))
Source code in databind/core/settings.py
class Settings(SettingsProvider):
    """This class is used as a container for other objects that serve as a provider of settings that may taken into
    account during data conversion. Objects that provide settings are instances of #Setting subclasses, such as
    #FieldAlias or #DateFormat.

    Depending on the type of setting, they may be taken into account if present on a field of a dataclass, or globally
    from an instance of the #Settings class that is passed to the #ObjectMapper, or both. Which settings are recognized
    and considered depends also on the implementation of the converter(s) being used.

    The #Settings class provides capabilities to supply global settings, as well as supplying settings conditionally
    based on the type that is being looked at by the #ObjectMapper at the given point in time.

    Example:

    ```py
    from databind.core.settings import DateFormat, Priority, Settings, Strict
    settings = Settings()
    settings.add_global(DateFormat('.ISO_8601', priority=Priority.HIGH))
    settings.add_local(int, Strict(false))
    ```
    """

    def __init__(
        self, parent: t.Optional[SettingsProvider] = None, global_settings: t.Optional[t.List["Setting"]] = None
    ) -> None:
        self.parent = parent
        self.global_settings: t.List[Setting] = list(global_settings) if global_settings else []
        self.local_settings: t.Dict[type, t.List[Setting]] = {}
        self.providers: t.List[t.Callable[[Context], t.List[Setting]]] = []

    def add_global(self, setting: "Setting") -> None:
        """Add a global setting."""

        self.global_settings.append(setting)

    def add_local(self, type_: type, setting: "Setting") -> None:
        """Add a setting locally for a particular Python type. If that Python type is encountered, the settings are
        combined with any other settings that are found for the type."""

        self.local_settings.setdefault(type_, []).append(setting)

    def add_conditional(self, predicate: t.Callable[["Context"], bool], setting: "Setting") -> None:
        """Adds a setting conditional on the given *predicate*."""

        def _provider(context: Context) -> t.List[Setting]:
            if predicate(context):
                return [setting]
            return []

        self.providers.append(_provider)

    def add_provider(self, provider: t.Callable[["Context"], t.List["Setting"]]) -> None:
        """Add a provider callback that is invoked for every conversion context to provide additional settings that
        the subsequent converter should have access to."""

        self.providers.append(provider)

    def copy(self) -> "Settings":
        new = type(self)(self.parent, self.global_settings)
        new.local_settings = {k: list(v) for k, v in self.local_settings.items()}
        new.providers = list(self.providers)
        return new

    # SettingsProvider

    def get_setting(self, context: "Context", setting_type: t.Type[T_Setting]) -> "T_Setting | None":
        """Resolves the highest priority instance of the given setting type relevant to the current context. The places
        that the setting is looked for are, in order:

        1. If the context's datatype is #AnnotatedTypeHint, look for it in the #AnnotatedTypeHint.metadata. Otherwise,
           use the wrapped type in the following steps.
        2. If the datatype is a #ClassTypeHint, look for it as a class setting, then subsequently in the settings added
           with #add_local().
        3. Check the setting providers added with #add_provider() or #add_conditional().
        4. Look for it in the global settings.
        5. Delegate to the #parent settings provider (if any).

        If multiple settings are find using any of these steps, the setting with the highest priority among the
        settings is returned. If multiple settings have the same priority, the setting found first via the above order
        is returned.
        """

        from nr.stream import Stream

        def _all_settings() -> t.Iterator[t.Any]:
            datatype = context.datatype
            if isinstance(datatype, AnnotatedTypeHint):
                yield from (s for s in datatype.metadata if isinstance(s, setting_type))
                datatype = datatype[0]
            if isinstance(datatype, ClassTypeHint):
                yield from get_class_settings(datatype.type, setting_type)  # type: ignore[type-var]
                yield from self.local_settings.get(datatype.type, [])
            for provider in self.providers:
                yield from provider(context)
            yield from self.global_settings
            if self.parent:
                setting = self.parent.get_setting(context, setting_type)
                if setting is not None:
                    yield setting

        return get_highest_setting(Stream(_all_settings()).of_type(setting_type))
add_conditional
add_conditional(predicate: Callable[[Context], bool], setting: Setting) -> None

Adds a setting conditional on the given predicate.

Source code in databind/core/settings.py
def add_conditional(self, predicate: t.Callable[["Context"], bool], setting: "Setting") -> None:
    """Adds a setting conditional on the given *predicate*."""

    def _provider(context: Context) -> t.List[Setting]:
        if predicate(context):
            return [setting]
        return []

    self.providers.append(_provider)
add_global
add_global(setting: Setting) -> None

Add a global setting.

Source code in databind/core/settings.py
def add_global(self, setting: "Setting") -> None:
    """Add a global setting."""

    self.global_settings.append(setting)
add_local
add_local(type_: type, setting: Setting) -> None

Add a setting locally for a particular Python type. If that Python type is encountered, the settings are combined with any other settings that are found for the type.

Source code in databind/core/settings.py
def add_local(self, type_: type, setting: "Setting") -> None:
    """Add a setting locally for a particular Python type. If that Python type is encountered, the settings are
    combined with any other settings that are found for the type."""

    self.local_settings.setdefault(type_, []).append(setting)
add_provider
add_provider(provider: Callable[[Context], List[Setting]]) -> None

Add a provider callback that is invoked for every conversion context to provide additional settings that the subsequent converter should have access to.

Source code in databind/core/settings.py
def add_provider(self, provider: t.Callable[["Context"], t.List["Setting"]]) -> None:
    """Add a provider callback that is invoked for every conversion context to provide additional settings that
    the subsequent converter should have access to."""

    self.providers.append(provider)
get_setting
get_setting(context: Context, setting_type: Type[T_Setting]) -> T_Setting | None

Resolves the highest priority instance of the given setting type relevant to the current context. The places that the setting is looked for are, in order:

  1. If the context's datatype is #AnnotatedTypeHint, look for it in the #AnnotatedTypeHint.metadata. Otherwise, use the wrapped type in the following steps.
  2. If the datatype is a #ClassTypeHint, look for it as a class setting, then subsequently in the settings added with #add_local().
  3. Check the setting providers added with #add_provider() or #add_conditional().
  4. Look for it in the global settings.
  5. Delegate to the #parent settings provider (if any).

If multiple settings are find using any of these steps, the setting with the highest priority among the settings is returned. If multiple settings have the same priority, the setting found first via the above order is returned.

Source code in databind/core/settings.py
def get_setting(self, context: "Context", setting_type: t.Type[T_Setting]) -> "T_Setting | None":
    """Resolves the highest priority instance of the given setting type relevant to the current context. The places
    that the setting is looked for are, in order:

    1. If the context's datatype is #AnnotatedTypeHint, look for it in the #AnnotatedTypeHint.metadata. Otherwise,
       use the wrapped type in the following steps.
    2. If the datatype is a #ClassTypeHint, look for it as a class setting, then subsequently in the settings added
       with #add_local().
    3. Check the setting providers added with #add_provider() or #add_conditional().
    4. Look for it in the global settings.
    5. Delegate to the #parent settings provider (if any).

    If multiple settings are find using any of these steps, the setting with the highest priority among the
    settings is returned. If multiple settings have the same priority, the setting found first via the above order
    is returned.
    """

    from nr.stream import Stream

    def _all_settings() -> t.Iterator[t.Any]:
        datatype = context.datatype
        if isinstance(datatype, AnnotatedTypeHint):
            yield from (s for s in datatype.metadata if isinstance(s, setting_type))
            datatype = datatype[0]
        if isinstance(datatype, ClassTypeHint):
            yield from get_class_settings(datatype.type, setting_type)  # type: ignore[type-var]
            yield from self.local_settings.get(datatype.type, [])
        for provider in self.providers:
            yield from provider(context)
        yield from self.global_settings
        if self.parent:
            setting = self.parent.get_setting(context, setting_type)
            if setting is not None:
                yield setting

    return get_highest_setting(Stream(_all_settings()).of_type(setting_type))

SettingsProvider

Bases: ABC

Interface for providing settings.

Source code in databind/core/settings.py
class SettingsProvider(abc.ABC):
    """Interface for providing settings."""

    def get_setting(self, context: "Context", setting_type: "t.Type[T_Setting]") -> "T_Setting | None":
        ...

Strict dataclass

Bases: BooleanSetting

Enable strict conversion of the field during conversion (this should be the default for converters unless some maybe available option to affect the strictness in a converter is changed). This setting should particularly affect only loss-less type conversions (such as int to string and the reverse being allowed when strict handling is disabled).

Source code in databind/core/settings.py
class Strict(BooleanSetting):
    """Enable strict conversion of the field during conversion (this should be the default for converters unless
    some maybe available option to affect the strictness in a converter is changed). This setting should particularly
    affect only loss-less type conversions (such as `int` to `string` and the reverse being allowed when strict
    handling is disabled)."""

Union dataclass

Bases: ClassDecoratorSetting

A setting that decorates a class or can be attached to the #typing.Annotated metadata of a #typing.Union type hint to specify that the type should be regarded as a union of more than one types. Which concrete type is to be used at the point of deserialization is usually clarified through a discriminator key. Unions may be of various styles that dictate how the discriminator key and the remaining fields are to be stored or read from.

For serialiazation, the type of the Python value should inform the converter about which member of the union is being used. If the a union definition has multiple type IDs mapping to the same Python type, the behaviour is entirely up to the converter (an adequate resolution may be to pick the first matching type ID and ignore the remaining matches).

Note

The the examples for the different styles below, "type" is a stand-in for the value of the #discriminator_key and ... serves as a stand-in for the remaining fields of the type that is represented by the discriminator.

Source code in databind/core/settings.py
@dataclasses.dataclass
class Union(ClassDecoratorSetting):
    """A setting that decorates a class or can be attached to the #typing.Annotated metadata of a #typing.Union
    type hint to specify that the type should be regarded as a union of more than one types. Which concrete type
    is to be used at the point of deserialization is usually clarified through a discriminator key. Unions may be
    of various styles that dictate how the discriminator key and the remaining fields are to be stored or read
    from.

    For serialiazation, the type of the Python value should inform the converter about which member of the union
    is being used. If the a union definition has multiple type IDs mapping to the same Python type, the behaviour
    is entirely up to the converter (an adequate resolution may be to pick the first matching type ID and ignore
    the remaining matches).

    !!! note

        The the examples for the different styles below, `"type"` is a stand-in for the value of the #discriminator_key
        and `...` serves as a stand-in for the remaining fields of the type that is represented by the discriminator.
    """

    #: The nested style in JSON equivalent is best described as `{"type": "<typeid>", "<typeid>": { ... }}`.
    NESTED: t.ClassVar = "nested"

    #: The flat style in JSON equivalent is best described as `{"type": "<typeid>", ... }`.
    FLAT: t.ClassVar = "flat"

    #: The keyed style in JSON equivalent is best described as `{"<typeid>": { ... }}`.
    KEYED: t.ClassVar = "keyed"

    #: The "best match" style attempts to deserialize the payload in an implementation-defined order and return
    #: the first or best succeeding result. No discriminator key is used.
    BEST_MATCH: t.ClassVar = "best_match"

    #: The subtypes of the union as an implementation of the #UnionMembers interface. When constructing the #Union
    #: setting, a dictionary may be passed in place of a #UnionMembers implementation, or a list of #UnionMembers
    #: to chain them together. Te constructor will also accept a string that is either `"<import>"`, which will
    #: be converted to an #ImportUnionMembers handler, or a string formatted as `"!<entrypoint>"`, which will be
    #: converted to an #EntrypointUnionMembers handler.
    members: "UnionMembers"

    #: The style of the union. This should be one of #NESTED, #FLAT, #KEYED or #BEST_MATCH. The default is #NESTED.
    style: str = NESTED

    #: The discriminator key to use, if valid for the #style. Defaults to `"type"`.
    discriminator_key: str = "type"

    #: The key to use when looking up the fields for the member type. Only used with the #NESTED style. If not set,
    #: the union member's type ID is used as the key.
    nesting_key: t.Optional[str] = None

    def __init__(
        self,
        members: t.Union[
            "UnionMembers",
            "StaticUnionMembers._MembersMappingType",
            "t.List[UnionMembers | str | StaticUnionMembers._MembersMappingType]",
            str,
            None,
        ] = None,
        style: str = NESTED,
        discriminator_key: str = "type",
        nesting_key: t.Optional[str] = None,
    ) -> None:
        def _convert_handler(handler: "UnionMembers | StaticUnionMembers._MembersMappingType | str") -> "UnionMembers":
            if isinstance(handler, t.Mapping) or handler is None:
                from databind.core.union import StaticUnionMembers

                return StaticUnionMembers(dict(handler) or {})
            elif isinstance(handler, str):
                if handler == "<import>":
                    return Union.import_()
                elif handler.startswith("!"):
                    return Union.entrypoint(handler[1:])
                raise ValueError(f"invalid union members string specified: {handler!r}")
            return handler

        if isinstance(members, list):
            from databind.core.union import ChainUnionMembers

            members = ChainUnionMembers(*(_convert_handler(x) for x in members))
        elif members is None:
            members = _convert_handler({})
        else:
            members = _convert_handler(members)

        self.members = members
        self.style = style
        self.discriminator_key = discriminator_key
        self.nesting_key = nesting_key

    def __hash__(self) -> int:
        return id(self)  # Needs to be hashable for Annotated[...] in Python 3.6

    @staticmethod
    def register(extends: type, name: t.Optional[str] = None) -> t.Callable[[t.Type[T]], t.Type[T]]:
        """A convenience method to use as a decorator for classes that should be registered as members of a #Union
        setting that is attached to the type *extends*. The #Union setting on *extends* must have a #StaticUnionMembers
        #members object. The decorated class must also be a subclass of *extends*.

        Example:

        ```py
        import abc
        import dataclasses
        from databind.core.settings import Union

        @Union()
        class MyInterface(abc.ABC):
          # ...
          pass

        @dataclasses.dataclass
        @Union.register(MyInterface, 'some')
        class SomeImplementation(MyInterface):
          # ...
          pass
        ```
        """

        from databind.core.union import StaticUnionMembers

        check_instance_of(extends, type)
        inst = check_not_none(
            get_class_setting(extends, Union), lambda: f"{extends.__name__} is not annotated with @union"
        )

        members = check_instance_of(inst.members, StaticUnionMembers)

        def _decorator(subtype: t.Type[T]) -> t.Type[T]:
            check_instance_of(subtype, type)
            check_subclass_of(subtype, extends)
            return members.register(name)(subtype)

        return _decorator

    @staticmethod
    def entrypoint(group: str) -> "EntrypointUnionMembers":
        from databind.core.union import EntrypointUnionMembers

        return EntrypointUnionMembers(group)

    @staticmethod
    def import_() -> "ImportUnionMembers":
        from databind.core.union import ImportUnionMembers

        return ImportUnionMembers()
__init__
__init__(members: Union[UnionMembers, _MembersMappingType, List[UnionMembers | str | _MembersMappingType], str, None] = None, style: str = NESTED, discriminator_key: str = 'type', nesting_key: Optional[str] = None) -> None
Source code in databind/core/settings.py
def __init__(
    self,
    members: t.Union[
        "UnionMembers",
        "StaticUnionMembers._MembersMappingType",
        "t.List[UnionMembers | str | StaticUnionMembers._MembersMappingType]",
        str,
        None,
    ] = None,
    style: str = NESTED,
    discriminator_key: str = "type",
    nesting_key: t.Optional[str] = None,
) -> None:
    def _convert_handler(handler: "UnionMembers | StaticUnionMembers._MembersMappingType | str") -> "UnionMembers":
        if isinstance(handler, t.Mapping) or handler is None:
            from databind.core.union import StaticUnionMembers

            return StaticUnionMembers(dict(handler) or {})
        elif isinstance(handler, str):
            if handler == "<import>":
                return Union.import_()
            elif handler.startswith("!"):
                return Union.entrypoint(handler[1:])
            raise ValueError(f"invalid union members string specified: {handler!r}")
        return handler

    if isinstance(members, list):
        from databind.core.union import ChainUnionMembers

        members = ChainUnionMembers(*(_convert_handler(x) for x in members))
    elif members is None:
        members = _convert_handler({})
    else:
        members = _convert_handler(members)

    self.members = members
    self.style = style
    self.discriminator_key = discriminator_key
    self.nesting_key = nesting_key
register staticmethod
register(extends: type, name: Optional[str] = None) -> Callable[[Type[T]], Type[T]]

A convenience method to use as a decorator for classes that should be registered as members of a #Union setting that is attached to the type extends. The #Union setting on extends must have a #StaticUnionMembers

members object. The decorated class must also be a subclass of extends.

Example:

import abc
import dataclasses
from databind.core.settings import Union

@Union()
class MyInterface(abc.ABC):
  # ...
  pass

@dataclasses.dataclass
@Union.register(MyInterface, 'some')
class SomeImplementation(MyInterface):
  # ...
  pass
Source code in databind/core/settings.py
@staticmethod
def register(extends: type, name: t.Optional[str] = None) -> t.Callable[[t.Type[T]], t.Type[T]]:
    """A convenience method to use as a decorator for classes that should be registered as members of a #Union
    setting that is attached to the type *extends*. The #Union setting on *extends* must have a #StaticUnionMembers
    #members object. The decorated class must also be a subclass of *extends*.

    Example:

    ```py
    import abc
    import dataclasses
    from databind.core.settings import Union

    @Union()
    class MyInterface(abc.ABC):
      # ...
      pass

    @dataclasses.dataclass
    @Union.register(MyInterface, 'some')
    class SomeImplementation(MyInterface):
      # ...
      pass
    ```
    """

    from databind.core.union import StaticUnionMembers

    check_instance_of(extends, type)
    inst = check_not_none(
        get_class_setting(extends, Union), lambda: f"{extends.__name__} is not annotated with @union"
    )

    members = check_instance_of(inst.members, StaticUnionMembers)

    def _decorator(subtype: t.Type[T]) -> t.Type[T]:
        check_instance_of(subtype, type)
        check_subclass_of(subtype, extends)
        return members.register(name)(subtype)

    return _decorator

convert_dataclass_to_schema

convert_dataclass_to_schema(dataclass_type: Union[type, GenericAlias, ClassTypeHint]) -> Schema

Converts a Python class that is decorated with #dataclasses.dataclass() to a Schema.

The function will respect the #Required setting if it is present in a field's datatype if, and only if, the setting occurs in the root type hint, which must be a #typing.Annotated hint.

Parameters:

Name Type Description Default
dataclass_type Union[type, GenericAlias, ClassTypeHint]

A Python type that is a dataclass, or a generic alias of a dataclass.

required

Returns: A schema that represents the dataclass. If a generic alias was passed, fields of which the type hint contained type parameters will have their type parameters substituted with the respective arguments present in the alias.

Example:

import dataclasses
from typing import Generic, TypeVar
from typeapi import TypeHint
from databind.core.schema import convert_dataclass_to_schema, Field, Schema
T = TypeVar('T')
@dataclasses.dataclass
class A(Generic[T]):
  a: T
assert convert_dataclass_to_schema(A[int]) == Schema({'a': Field(TypeHint(int))}, A)
Source code in databind/core/schema.py
def convert_dataclass_to_schema(dataclass_type: t.Union[type, GenericAlias, ClassTypeHint]) -> Schema:
    """Converts a Python class that is decorated with #dataclasses.dataclass() to a Schema.

    The function will respect the #Required setting if it is present in a field's datatype if,
    and only if, the setting occurs in the root type hint, which must be a #typing.Annotated hint.

    Arguments:
      dataclass_type: A Python type that is a dataclass, or a generic alias of a dataclass.
    Returns:
      A schema that represents the dataclass. If a generic alias was passed, fields of which the type hint contained
      type parameters will have their type parameters substituted with the respective arguments present in the alias.

    Example:

    ```py
    import dataclasses
    from typing import Generic, TypeVar
    from typeapi import TypeHint
    from databind.core.schema import convert_dataclass_to_schema, Field, Schema
    T = TypeVar('T')
    @dataclasses.dataclass
    class A(Generic[T]):
      a: T
    assert convert_dataclass_to_schema(A[int]) == Schema({'a': Field(TypeHint(int))}, A)
    ```
    """

    from dataclasses import MISSING

    hint: ClassTypeHint
    if isinstance(dataclass_type, ClassTypeHint):
        hint = dataclass_type
    else:
        hint = TypeHint(dataclass_type)  # type: ignore[assignment]
        assert isinstance(hint, ClassTypeHint), hint

    dataclass_type = hint.type
    assert isinstance(dataclass_type, type), repr(dataclass_type)
    assert dataclasses.is_dataclass(
        dataclass_type
    ), f"expected a @dataclass type, but {type_repr(dataclass_type)} is not such a type"

    # Figure out which field is defined on which dataclass in the class hierarchy.
    # This is important because we need to use the correct context when evaluating
    # forward references in field annotations; we can't just use the target
    # dataclass if it was defined in a different module.
    field_origin: t.Dict[str, type] = {}
    base_queue = [hint.type]
    while base_queue:
        base_type = base_queue.pop(0)
        if dataclasses.is_dataclass(base_type):
            annotations = get_annotations(base_type)
            for field in dataclasses.fields(base_type):
                if field.name in annotations and field.name not in field_origin:
                    field_origin[field.name] = base_type
        base_queue += base_type.__bases__

    # Retrieve the context in which type hints from each field origin type need to be
    # evaluated.
    eval_context_by_type: t.Dict[type, t.Mapping[str, t.Any]] = {
        type_: vars(sys.modules[type_.__module__]) for type_ in set(field_origin.values())
    }

    # Collect the members from the dataclass and its base classes.
    queue = [hint]
    fields: t.Dict[str, Field] = {}
    while queue:
        hint = queue.pop(0)
        parameter_map = hint.get_parameter_map()

        if hint.type in eval_context_by_type:
            # Make sure forward references are resolved.
            hint = hint.evaluate(eval_context_by_type[hint.type])  # type: ignore[assignment]
            assert isinstance(hint, ClassTypeHint)

            for field in dataclasses.fields(hint.type):
                if not field.init:
                    # If we cannot initialize the field in the constructor, we should also
                    # exclude it from the definition of the type for de-/serializing.
                    continue
                if field.name in fields:
                    # Subclasses override their parent's fields.
                    continue
                if field_origin[field.name] != hint.type:
                    # If this field does not belong to the current type
                    continue

                field_hint = TypeHint(field.type, field_origin[field.name]).evaluate().parameterize(parameter_map)

                # NOTE(NiklasRosenstein): In Python 3.6, Mypy complains about "Callable does not accept self argument",
                #       but we also cannot ignore it because of warn_unused_ignores.
                _field_default_factory = getattr(field, "default_factory")

                default = NotSet.Value if field.default == MISSING else field.default
                default_factory = NotSet.Value if _field_default_factory == MISSING else _field_default_factory
                has_default = default != NotSet.Value or default_factory != NotSet.Value
                required = _is_required(field_hint, not has_default)

                fields[field.name] = Field(
                    datatype=field_hint,
                    required=required,
                    default=None if not required and not has_default else default,
                    default_factory=default_factory,
                    flattened=_is_flat(field_hint, False),
                )
        else:
            # This could mean that a base class is a dataclass but all of its members
            # are overwritten by other fields.
            pass

        # Continue with the base classes.
        for base in hint.bases or hint.type.__bases__:
            base_hint = TypeHint(base, source=hint.type).evaluate().parameterize(parameter_map)
            assert isinstance(base_hint, ClassTypeHint), f"nani? {base_hint}"
            if dataclasses.is_dataclass(base_hint.type):
                queue.append(base_hint)

    return Schema(fields, t.cast("Constructor", dataclass_type), dataclass_type)

convert_to_schema

convert_to_schema(hint: TypeHint) -> Schema

Convert the given type hint to a #Schema.

The function delegates to #convert_dataclass_to_schema() or #convert_typed_dict_to_schema().

Parameters:

Name Type Description Default
hint TypeHint

The type hint to convert. If it is a #AnnotatedTypeHint hint, it will be unwrapped.

required

Raises: ValueError: If the type hint is not supported.

Source code in databind/core/schema.py
def convert_to_schema(hint: TypeHint) -> Schema:
    """Convert the given type hint to a #Schema.

    The function delegates to #convert_dataclass_to_schema() or #convert_typed_dict_to_schema().

    Arguments:
      hint: The type hint to convert. If it is a #AnnotatedTypeHint hint, it will be unwrapped.
    Raises:
      ValueError: If the type hint is not supported.
    """

    assert isinstance(hint, TypeHint), hint
    original_hint = hint

    annotations = []
    if isinstance(hint, AnnotatedTypeHint):
        annotations = list(hint.metadata)
        hint = hint[0]

    if isinstance(hint, ClassTypeHint) and dataclasses.is_dataclass(hint.type):
        schema = convert_dataclass_to_schema(hint)
    elif isinstance(hint, ClassTypeHint) and is_typed_dict(hint.type):
        # TODO(@NiklasRosenstein): Pass in the original TypeHint which will contain information about
        #   TypeVar parametrization that is lost when we just pass the generic type.
        schema = convert_typed_dict_to_schema(hint.type)
    else:
        raise ValueError(f"cannot be converted to a schema (not a dataclass or TypedDict): {type_repr(original_hint)}")

    schema.annotations.extend(annotations)
    return schema

convert_typed_dict_to_schema

convert_typed_dict_to_schema(typed_dict: Union[TypedDictProtocol, Type[Any], TypeHint]) -> Schema

Converts the definition of a #typing.TypedDict to a #Schema.

Note

This function will take into account default values assigned on the class-level of the typed dict (which is usually only relevant if the class-style declaration method was used, but default values can be assigned to the function-style declared type as well). Fields that have default values are considered not-required even if the declaration specifies them as required.

Be aware that right-hand side values on #typing.TypedDict classes are not allowed by Mypy.

Also note that #typing.TypedDict cannot be mixed with #typing.Generic, so keys with a generic type in the typed dict are not possible (state: 2022-03-17, Python 3.10.2).

Todo

Support understanding #typing.Required and #typing.NotRequired.

Example:

from databind.core.schema import convert_typed_dict_to_schema, Schema, Field
from typing import TypedDict
from typeapi import TypeHint
class Movie(typing.TypedDict):
  name: str
  year: int = 0
assert convert_typed_dict_to_schema(Movie) == Schema({
  'name': Field(TypeHint(str)),
  'year': Field(TypeHint(int), False, 0),
}, Movie)
Source code in databind/core/schema.py
def convert_typed_dict_to_schema(typed_dict: t.Union[TypedDictProtocol, t.Type[t.Any], TypeHint]) -> Schema:
    """Converts the definition of a #typing.TypedDict to a #Schema.

    !!! note

        This function will take into account default values assigned on the class-level of the typed dict (which is
        usually only relevant if the class-style declaration method was used, but default values can be assigned to
        the function-style declared type as well). Fields that have default values are considered not-required even
        if the declaration specifies them as required.

        Be aware that right-hand side values on #typing.TypedDict classes are not allowed by Mypy.

        Also note that #typing.TypedDict cannot be mixed with #typing.Generic, so keys with a generic type in the
        typed dict are not possible (state: 2022-03-17, Python 3.10.2).

    !!! todo

        Support understanding #typing.Required and #typing.NotRequired.

    Example:

    ```py
    from databind.core.schema import convert_typed_dict_to_schema, Schema, Field
    from typing import TypedDict
    from typeapi import TypeHint
    class Movie(typing.TypedDict):
      name: str
      year: int = 0
    assert convert_typed_dict_to_schema(Movie) == Schema({
      'name': Field(TypeHint(str)),
      'year': Field(TypeHint(int), False, 0),
    }, Movie)
    ```
    """

    if isinstance(typed_dict, TypeHint):
        if not isinstance(typed_dict, ClassTypeHint):
            raise TypeError(f"expected ClassTypeHint, got {typed_dict}")
        typed_dict = typed_dict.type

    assert is_typed_dict(typed_dict), typed_dict

    eval_context = vars(sys.modules[typed_dict.__module__])

    annotations = get_annotations(t.cast(type, typed_dict))
    fields: t.Dict[str, Field] = {}
    for key in typed_dict.__required_keys__ | typed_dict.__optional_keys__:
        field_hint = TypeHint(annotations[key]).evaluate(eval_context)

        has_default = hasattr(typed_dict, key)
        required = _is_required(field_hint, not has_default)
        fields[key] = Field(
            datatype=field_hint,
            required=required and typed_dict.__total__,
            default=getattr(typed_dict, key) if has_default else None if not required else NotSet.Value,
            flattened=_is_flat(field_hint, False),
        )

    return Schema(fields, t.cast("Constructor", typed_dict), t.cast(type, typed_dict))

format_context_trace

format_context_trace(ctx: Context) -> str

Formats a trace for the given context that is convenient to inspect in case of errors to understand where the context is pointing to in the payload that is being converted.

Source code in databind/core/context.py
def format_context_trace(ctx: Context) -> str:
    """Formats a trace for the given context that is convenient to inspect in case of errors to understand where the
    context is pointing to in the payload that is being converted."""

    lines = []
    prev_filename: t.Union[str, None] = None
    for ctx in reversed(list(ctx.iter_hierarchy_up())):
        # On the first context, or if the filename changed, we output the filename.
        if ctx.location.filename != prev_filename and ctx.location.filename is not None:
            lines.append(f'In "{ctx.location.filename}"')
            prev_filename = ctx.location.filename

        if ctx.key is Context.ROOT:
            key = "$"
        elif isinstance(ctx.key, str):
            key = f".{ctx.key}"
        elif isinstance(ctx.key, int):
            key = f"[{ctx.key}]"
        elif ctx.key is None:
            key = "^"
        else:
            raise TypeError(f"encountered unexpected type in Context.key: {ctx.key.__class__.__name__!r}")

        line = f"  {key}: {ctx.datatype}"
        if ctx.location.line or ctx.location.column:
            line = f"{line} (at {ctx.location.line}:{ctx.location.column})"

        lines.append(line)

    return "\n".join(lines)

get_annotation_setting

get_annotation_setting(type_: TypeHint, setting_type: Type[T_Setting]) -> T_Setting | None

Returns the first setting of the given setting_type from the given type hint from inspecting the metadata of the #AnnotatedTypeHint. Returns None if no such setting exists or if type_ is not an #AnnotatedTypeHint instance.

Source code in databind/core/settings.py
def get_annotation_setting(type_: TypeHint, setting_type: t.Type[T_Setting]) -> "T_Setting | None":
    """Returns the first setting of the given *setting_type* from the given type hint from inspecting the metadata
    of the #AnnotatedTypeHint. Returns `None` if no such setting exists or if *type_* is not an #AnnotatedTypeHint
    instance."""

    if isinstance(type_, AnnotatedTypeHint):
        return get_highest_setting(s for s in type_.metadata if isinstance(s, setting_type))
    return None

get_class_setting

get_class_setting(type_: type, setting_type: Type[T_ClassDecoratorSetting]) -> T_ClassDecoratorSetting | None

Returns the first instance of the given setting_type on type_.

Source code in databind/core/settings.py
def get_class_setting(type_: type, setting_type: t.Type[T_ClassDecoratorSetting]) -> "T_ClassDecoratorSetting | None":
    """Returns the first instance of the given *setting_type* on *type_*."""

    return get_highest_setting(get_class_settings(type_, setting_type))

get_class_settings

get_class_settings(type_: type, setting_type: Type[T_ClassDecoratorSetting]) -> Iterable[T_ClassDecoratorSetting]

Returns all matching settings on type_.

Source code in databind/core/settings.py
def get_class_settings(
    type_: type, setting_type: t.Type[T_ClassDecoratorSetting]
) -> t.Iterable[T_ClassDecoratorSetting]:
    """Returns all matching settings on *type_*."""

    for item in vars(type_).get("__databind_settings__", []):
        if isinstance(item, setting_type):
            yield item

get_fields_expanded

get_fields_expanded(schema: Schema, convert_to_schema: Callable[[TypeHint], Schema] = convert_to_schema) -> Dict[str, Dict[str, Field]]

Returns a dictionary that contains an entry for each flattened field in the schema, mapping to another dictionary that contains all fields expanded from the flattened field's sub-schema.

Given a schema like the following example, this function returns something akin to the below.

=== "Schema"

```
Schema1:
  a: int
  b: Schema2, flattened=True

Schema2:
  c: str
  d: Schema3, flattened=True

Schema3:
  e: int
```

=== "Result"

```py
{
  "b": {
    "c": Field(str),
    "e": Field(int)
  }
}

Arguments: schema: The schema to compile the expanded fields for. convert_to_schema: A function that accepts a #TypeHint and converts it to a schema. Defaults to the #convert_to_schema() function.

Note

The top-level dictionary returned by this function contains only those fields that are flattened and should be "composed" of other fields.

```

Source code in databind/core/schema.py
def get_fields_expanded(
    schema: Schema,
    convert_to_schema: t.Callable[[TypeHint], Schema] = convert_to_schema,
) -> t.Dict[str, t.Dict[str, Field]]:
    """Returns a dictionary that contains an entry for each flattened field in the schema, mapping to another
    dictionary that contains _all_ fields expanded from the flattened field's sub-schema.

    Given a schema like the following example, this function returns something akin to the below.

    === "Schema"

        ```
        Schema1:
          a: int
          b: Schema2, flattened=True

        Schema2:
          c: str
          d: Schema3, flattened=True

        Schema3:
          e: int
        ```

    === "Result"

        ```py
        {
          "b": {
            "c": Field(str),
            "e": Field(int)
          }
        }

    Arguments:
      schema: The schema to compile the expanded fields for.
      convert_to_schema: A function that accepts a #TypeHint and converts it to a schema.
        Defaults to the #convert_to_schema() function.

    !!! note

        The top-level dictionary returned by this function contains _only_ those fields that are
        flattened and should be "composed" of other fields.
    ```
    """

    result = {}
    for field_name, field in schema.fields.items():
        if field.flattened:
            field_schema = convert_to_schema(field.datatype)
            result[field_name] = {
                **{k: v for k, v in field_schema.fields.items() if not v.flattened},
                **{k: v for sf in get_fields_expanded(field_schema).values() for k, v in sf.items()},
            }
            for sub_field_name in result[field_name]:
                if sub_field_name in schema.fields and sub_field_name != field_name:
                    raise RuntimeError(f"field {sub_field_name!r} occurs multiple times")
    return result

get_highest_setting

get_highest_setting(settings: Iterable[T_Setting]) -> T_Setting | None

Return the first, highest setting of settings.

Source code in databind/core/settings.py
def get_highest_setting(settings: t.Iterable[T_Setting]) -> "T_Setting | None":
    """Return the first, highest setting of *settings*."""

    try:
        return max(settings, key=lambda s: s.priority)
    except ValueError:
        return None