summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLouis Sautier <sbraz@gentoo.org>2020-10-08 18:30:30 +0200
committerLouis Sautier <sbraz@gentoo.org>2020-10-08 21:43:27 +0200
commitb46fa2605f7b0ddc4427f2d601c82eb4a289faef (patch)
treeec64859b1baf5105bc0e835fc8b8724e0ff59b03
parentmedia-plugins/live: Stabilize 2020.05.15 arm, #747205 (diff)
downloadgentoo-b46fa260.tar.gz
gentoo-b46fa260.tar.bz2
gentoo-b46fa260.zip
dev-python/pydantic: new package, v1.6.1, indirect dep. of isort
Newer versions of dev-python/isort require hypothesis-auto which in turn requires pydantic. Package-Manager: Portage-3.0.8, Repoman-3.0.1 Signed-off-by: Louis Sautier <sbraz@gentoo.org>
-rw-r--r--dev-python/pydantic/Manifest1
-rw-r--r--dev-python/pydantic/files/pydantic-1.6.1-fix-tests.patch45
-rw-r--r--dev-python/pydantic/files/pydantic-1.6.1-py39.patch316
-rw-r--r--dev-python/pydantic/metadata.xml14
-rw-r--r--dev-python/pydantic/pydantic-1.6.1.ebuild38
5 files changed, 414 insertions, 0 deletions
diff --git a/dev-python/pydantic/Manifest b/dev-python/pydantic/Manifest
new file mode 100644
index 000000000000..08803723d751
--- /dev/null
+++ b/dev-python/pydantic/Manifest
@@ -0,0 +1 @@
+DIST pydantic-1.6.1.tar.gz 244677 BLAKE2B 0a1d6986858ab753cbc4dd262bd268b2a365c241a7de106a7bf21714c6c499c7dbacaea095789d3503507965ca50106da32545be670ed0261827f1fe0316ff67 SHA512 b0442f78a661f56d29697da7e27393261420bdb0f08473d2f7872145a49a18de8c17c903d0899a44263bcd8229e8790161da032dd5a3d5f2dae347c66bd6a567
diff --git a/dev-python/pydantic/files/pydantic-1.6.1-fix-tests.patch b/dev-python/pydantic/files/pydantic-1.6.1-fix-tests.patch
new file mode 100644
index 000000000000..1d6dc5d311a6
--- /dev/null
+++ b/dev-python/pydantic/files/pydantic-1.6.1-fix-tests.patch
@@ -0,0 +1,45 @@
+commit 44eac223df11a1b711ffe0d7946019a173c2a88e
+Author: Louis Sautier <sautier.louis@gmail.com>
+Date: Thu Oct 8 14:19:58 2020 +0200
+
+ test_config_file_settings_nornir: use less common env. var names
+
+ Gentoo's Portage sets A during the test phase, switching to less common
+ environment variable names reduces the likelihood of something similar
+ happening.
+
+diff --git a/tests/test_settings.py b/tests/test_settings.py
+index d24aae3..1f4d678 100644
+--- a/tests/test_settings.py
++++ b/tests/test_settings.py
+@@ -437,21 +437,21 @@ def test_config_file_settings_nornir(env):
+ """
+
+ class Settings(BaseSettings):
+- a: str
+- b: str
+- c: str
++ param_a: str
++ param_b: str
++ param_c: str
+
+ def _build_values(self, init_kwargs, _env_file, _env_file_encoding):
+ config_settings = init_kwargs.pop('__config_settings__')
+ return {**config_settings, **init_kwargs, **self._build_environ()}
+
+- env.set('C', 'env setting c')
++ env.set('PARAM_C', 'env setting c')
+
+- config = {'a': 'config a', 'b': 'config b', 'c': 'config c'}
+- s = Settings(__config_settings__=config, b='argument b', c='argument c')
+- assert s.a == 'config a'
+- assert s.b == 'argument b'
+- assert s.c == 'env setting c'
++ config = {'param_a': 'config a', 'param_b': 'config b', 'param_c': 'config c'}
++ s = Settings(__config_settings__=config, param_b='argument b', param_c='argument c')
++ assert s.param_a == 'config a'
++ assert s.param_b == 'argument b'
++ assert s.param_c == 'env setting c'
+
+
+ test_env_file = """\
diff --git a/dev-python/pydantic/files/pydantic-1.6.1-py39.patch b/dev-python/pydantic/files/pydantic-1.6.1-py39.patch
new file mode 100644
index 000000000000..7cd5290cd591
--- /dev/null
+++ b/dev-python/pydantic/files/pydantic-1.6.1-py39.patch
@@ -0,0 +1,316 @@
+diff --git a/changes/1832-PrettyWood.md b/changes/1832-PrettyWood.md
+new file mode 100644
+index 000000000..5038a0da0
+--- /dev/null
++++ b/changes/1832-PrettyWood.md
+@@ -0,0 +1 @@
++add basic support of python 3.9
+diff --git a/pydantic/fields.py b/pydantic/fields.py
+index 01792b628..c52b34ea4 100644
+--- a/pydantic/fields.py
++++ b/pydantic/fields.py
+@@ -32,6 +32,8 @@
+ NoArgAnyCallable,
+ NoneType,
+ display_as_type,
++ get_args,
++ get_origin,
+ is_literal_type,
+ is_new_type,
+ new_type_supertype,
+@@ -411,7 +413,7 @@ def _type_analysis(self) -> None: # noqa: C901 (ignore complexity)
+ elif is_literal_type(self.type_):
+ return
+
+- origin = getattr(self.type_, '__origin__', None)
++ origin = get_origin(self.type_)
+ if origin is None:
+ # field is not "typing" object eg. Union, Dict, List etc.
+ # allow None for virtual superclasses of NoneType, e.g. Hashable
+@@ -422,7 +424,7 @@ def _type_analysis(self) -> None: # noqa: C901 (ignore complexity)
+ return
+ if origin is Union:
+ types_ = []
+- for type_ in self.type_.__args__:
++ for type_ in get_args(self.type_):
+ if type_ is NoneType:
+ if self.required is Undefined:
+ self.required = False
+@@ -444,9 +446,9 @@ def _type_analysis(self) -> None: # noqa: C901 (ignore complexity)
+ if issubclass(origin, Tuple): # type: ignore
+ self.shape = SHAPE_TUPLE
+ self.sub_fields = []
+- for i, t in enumerate(self.type_.__args__):
++ for i, t in enumerate(get_args(self.type_)):
+ if t is Ellipsis:
+- self.type_ = self.type_.__args__[0]
++ self.type_ = get_args(self.type_)[0]
+ self.shape = SHAPE_TUPLE_ELLIPSIS
+ return
+ self.sub_fields.append(self._create_sub_type(t, f'{self.name}_{i}'))
+@@ -460,7 +462,7 @@ def _type_analysis(self) -> None: # noqa: C901 (ignore complexity)
+ {f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
+ )
+
+- self.type_ = self.type_.__args__[0]
++ self.type_ = get_args(self.type_)[0]
+ self.shape = SHAPE_LIST
+ elif issubclass(origin, Set):
+ # Create self validators
+@@ -470,22 +472,22 @@ def _type_analysis(self) -> None: # noqa: C901 (ignore complexity)
+ {f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
+ )
+
+- self.type_ = self.type_.__args__[0]
++ self.type_ = get_args(self.type_)[0]
+ self.shape = SHAPE_SET
+ elif issubclass(origin, FrozenSet):
+- self.type_ = self.type_.__args__[0]
++ self.type_ = get_args(self.type_)[0]
+ self.shape = SHAPE_FROZENSET
+ elif issubclass(origin, Sequence):
+- self.type_ = self.type_.__args__[0]
++ self.type_ = get_args(self.type_)[0]
+ self.shape = SHAPE_SEQUENCE
+ elif issubclass(origin, Mapping):
+- self.key_field = self._create_sub_type(self.type_.__args__[0], 'key_' + self.name, for_keys=True)
+- self.type_ = self.type_.__args__[1]
++ self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
++ self.type_ = get_args(self.type_)[1]
+ self.shape = SHAPE_MAPPING
+ # Equality check as almost everything inherits form Iterable, including str
+ # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other
+ elif origin in {Iterable, CollectionsIterable}:
+- self.type_ = self.type_.__args__[0]
++ self.type_ = get_args(self.type_)[0]
+ self.shape = SHAPE_ITERABLE
+ self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')]
+ elif issubclass(origin, Type): # type: ignore
+@@ -494,7 +496,7 @@ def _type_analysis(self) -> None: # noqa: C901 (ignore complexity)
+ # Is a Pydantic-compatible generic that handles itself
+ # or we have arbitrary_types_allowed = True
+ self.shape = SHAPE_GENERIC
+- self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(self.type_.__args__)]
++ self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))]
+ self.type_ = origin
+ return
+ else:
+diff --git a/pydantic/generics.py b/pydantic/generics.py
+index 64562227d..0a5e75401 100644
+--- a/pydantic/generics.py
++++ b/pydantic/generics.py
+@@ -3,6 +3,7 @@
+ from .class_validators import gather_all_validators
+ from .fields import FieldInfo, ModelField
+ from .main import BaseModel, create_model
++from .typing import get_origin
+ from .utils import lenient_issubclass
+
+ _generic_types_cache: Dict[Tuple[Type[Any], Union[Any, Tuple[Any, ...]]], Type[BaseModel]] = {}
+@@ -37,7 +38,7 @@ def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[T
+ check_parameters_count(cls, params)
+ typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params))
+ type_hints = get_type_hints(cls).items()
+- instance_type_hints = {k: v for k, v in type_hints if getattr(v, '__origin__', None) is not ClassVar}
++ instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar}
+ concrete_type_hints: Dict[str, Type[Any]] = {
+ k: resolve_type_hint(v, typevars_map) for k, v in instance_type_hints.items()
+ }
+@@ -79,7 +80,7 @@ def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str:
+
+
+ def resolve_type_hint(type_: Any, typevars_map: Dict[Any, Any]) -> Type[Any]:
+- if hasattr(type_, '__origin__') and getattr(type_, '__parameters__', None):
++ if get_origin(type_) and getattr(type_, '__parameters__', None):
+ concrete_type_args = tuple([typevars_map[x] for x in type_.__parameters__])
+ return type_[concrete_type_args]
+ return typevars_map.get(type_, type_)
+diff --git a/pydantic/main.py b/pydantic/main.py
+index c872f1e3b..87299b645 100644
+--- a/pydantic/main.py
++++ b/pydantic/main.py
+@@ -33,7 +33,7 @@
+ from .parse import Protocol, load_file, load_str_bytes
+ from .schema import model_schema
+ from .types import PyObject, StrBytes
+-from .typing import AnyCallable, ForwardRef, is_classvar, resolve_annotations, update_field_forward_refs
++from .typing import AnyCallable, ForwardRef, get_origin, is_classvar, resolve_annotations, update_field_forward_refs
+ from .utils import (
+ ClassAttribute,
+ GetterDict,
+@@ -256,7 +256,7 @@ def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901
+ if (
+ isinstance(value, untouched_types)
+ and ann_type != PyObject
+- and not lenient_issubclass(getattr(ann_type, '__origin__', None), Type)
++ and not lenient_issubclass(get_origin(ann_type), Type)
+ ):
+ continue
+ fields[ann_name] = inferred = ModelField.infer(
+diff --git a/pydantic/schema.py b/pydantic/schema.py
+index 27c66b2bd..4f6258ab1 100644
+--- a/pydantic/schema.py
++++ b/pydantic/schema.py
+@@ -55,7 +55,7 @@
+ conset,
+ constr,
+ )
+-from .typing import ForwardRef, Literal, is_callable_type, is_literal_type, literal_values
++from .typing import ForwardRef, Literal, get_args, get_origin, is_callable_type, is_literal_type, literal_values
+ from .utils import get_model, lenient_issubclass, sequence_like
+
+ if TYPE_CHECKING:
+@@ -803,9 +803,9 @@ def go(type_: Any) -> Type[Any]:
+ or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet))
+ ):
+ return type_
+- origin = getattr(type_, '__origin__', None)
++ origin = get_origin(type_)
+ if origin is not None:
+- args: Tuple[Any, ...] = type_.__args__
++ args: Tuple[Any, ...] = get_args(type_)
+ if any(isinstance(a, ForwardRef) for a in args):
+ # forward refs cause infinite recursion below
+ return type_
+diff --git a/pydantic/typing.py b/pydantic/typing.py
+index 070691eeb..729ebd71b 100644
+--- a/pydantic/typing.py
++++ b/pydantic/typing.py
+@@ -44,12 +44,19 @@ def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
+ return type_._eval_type(globalns, localns)
+
+
+-else:
++elif sys.version_info < (3, 9):
+
+ def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
+ return type_._evaluate(globalns, localns)
+
+
++else:
++
++ # TODO: remove the pragma: no cover once we can run CI on python 3.9
++ def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: # pragma: no cover
++ return type_._evaluate(globalns, localns, set())
++
++
+ if sys.version_info < (3, 7):
+ from typing import Callable as Callable
+
+@@ -70,8 +77,50 @@ def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
+ from typing_extensions import Literal
+ except ImportError:
+ Literal = None
++
++ def get_args(t: Type[Any]) -> Tuple[Any, ...]:
++ return getattr(t, '__args__', ())
++
++ def get_origin(t: Type[Any]) -> Optional[Type[Any]]:
++ return getattr(t, '__origin__', None)
++
++
+ else:
+- from typing import Literal
++ from typing import Literal, get_args as typing_get_args, get_origin as typing_get_origin
++
++ def get_origin(tp: Type[Any]) -> Type[Any]:
++ return typing_get_origin(tp) or getattr(tp, '__origin__', None)
++
++ def generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]:
++ """
++ In python 3.9, `typing.Dict`, `typing.List`, ...
++ do have an empty `__args__` by default (instead of the generic ~T for example).
++ In order to still support `Dict` for example and consider it as `Dict[Any, Any]`,
++ we retrieve the `_nparams` value that tells us how many parameters it needs.
++ """
++ # TODO: remove the pragma: no cover once we can run CI on python 3.9
++ if hasattr(tp, '_nparams'): # pragma: no cover
++ return (Any,) * tp._nparams
++ return ()
++
++ def get_args(tp: Type[Any]) -> Tuple[Any, ...]:
++ """Get type arguments with all substitutions performed.
++
++ For unions, basic simplifications used by Union constructor are performed.
++ Examples::
++ get_args(Dict[str, int]) == (str, int)
++ get_args(int) == ()
++ get_args(Union[int, Union[T, int], str][int]) == (int, str)
++ get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
++ get_args(Callable[[], T][int]) == ([], int)
++ """
++ try:
++ args = typing_get_args(tp)
++ # TODO: remove the pragma: no cover once we can run CI on python 3.9
++ except IndexError: # pragma: no cover
++ args = ()
++ return args or getattr(tp, '__args__', ()) or generic_get_args(tp)
++
+
+ if TYPE_CHECKING:
+ from .fields import ModelField
+@@ -115,6 +164,8 @@ def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
+ 'CallableGenerator',
+ 'ReprArgs',
+ 'CallableGenerator',
++ 'get_args',
++ 'get_origin',
+ )
+
+
+@@ -167,16 +218,16 @@ def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Opti
+
+
+ def is_callable_type(type_: Type[Any]) -> bool:
+- return type_ is Callable or getattr(type_, '__origin__', None) is Callable
++ return type_ is Callable or get_origin(type_) is Callable
+
+
+ if sys.version_info >= (3, 7):
+
+ def is_literal_type(type_: Type[Any]) -> bool:
+- return Literal is not None and getattr(type_, '__origin__', None) is Literal
++ return Literal is not None and get_origin(type_) is Literal
+
+ def literal_values(type_: Type[Any]) -> Tuple[Any, ...]:
+- return type_.__args__
++ return get_args(type_)
+
+
+ else:
+@@ -217,12 +268,15 @@ def new_type_supertype(type_: Type[Any]) -> Type[Any]:
+ return type_
+
+
+-def _check_classvar(v: Type[Any]) -> bool:
++def _check_classvar(v: Optional[Type[Any]]) -> bool:
++ if v is None:
++ return False
++
+ return v.__class__ == ClassVar.__class__ and (sys.version_info < (3, 7) or getattr(v, '_name', None) == 'ClassVar')
+
+
+ def is_classvar(ann_type: Type[Any]) -> bool:
+- return _check_classvar(ann_type) or _check_classvar(getattr(ann_type, '__origin__', None))
++ return _check_classvar(ann_type) or _check_classvar(get_origin(ann_type))
+
+
+ def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None:
+@@ -243,13 +297,13 @@ def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]:
+ without brackets. Otherwise returns None.
+ """
+ try:
+- origin = getattr(type_, '__origin__')
++ origin = get_origin(type_)
+ if origin is None: # Python 3.6
+ origin = type_
+ if issubclass(origin, Type): # type: ignore
+- if type_.__args__ is None or not isinstance(type_.__args__[0], type):
++ if not get_args(type_) or not isinstance(get_args(type_)[0], type):
+ return True
+- return type_.__args__[0]
+- except AttributeError:
++ return get_args(type_)[0]
++ except (AttributeError, TypeError):
+ pass
+ return None
diff --git a/dev-python/pydantic/metadata.xml b/dev-python/pydantic/metadata.xml
new file mode 100644
index 000000000000..3f626e20bc7d
--- /dev/null
+++ b/dev-python/pydantic/metadata.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE pkgmetadata SYSTEM "http://www.gentoo.org/dtd/metadata.dtd">
+<pkgmetadata>
+ <maintainer type="project">
+ <email>python@gentoo.org</email>
+ <name>Python</name>
+ </maintainer>
+ <upstream>
+ <remote-id type="pypi">pydantic</remote-id>
+ <remote-id type="github">samuelcolvin/pydantic</remote-id>
+ <bugs-to>https://github.com/samuelcolvin/pydantic/issues</bugs-to>
+ <doc>https://pydantic-docs.helpmanual.io/</doc>
+ </upstream>
+</pkgmetadata>
diff --git a/dev-python/pydantic/pydantic-1.6.1.ebuild b/dev-python/pydantic/pydantic-1.6.1.ebuild
new file mode 100644
index 000000000000..584f26cc9ec3
--- /dev/null
+++ b/dev-python/pydantic/pydantic-1.6.1.ebuild
@@ -0,0 +1,38 @@
+# Copyright 1999-2020 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=7
+
+# At the moment, PyPy3 doesn't have a dataclasses module
+# It can probably be added when PyPy3.7 is stable
+PYTHON_COMPAT=( python3_{7..9} )
+
+inherit distutils-r1
+
+DESCRIPTION="Data parsing and validation using Python type hints"
+HOMEPAGE="https://github.com/samuelcolvin/pydantic"
+# No tests on PyPI: https://github.com/samuelcolvin/pydantic/pull/1976
+SRC_URI="https://github.com/samuelcolvin/${PN}/archive/v${PV}.tar.gz -> ${P}.tar.gz"
+
+LICENSE="MIT"
+SLOT="0"
+KEYWORDS="~amd64 ~x86"
+
+BDEPEND="
+ dev-python/cython[${PYTHON_USEDEP}]
+"
+
+PATCHES=(
+ # https://github.com/samuelcolvin/pydantic/pull/1977
+ "${FILESDIR}/${P}-fix-tests.patch"
+ # https://github.com/samuelcolvin/pydantic/pull/1844
+ "${FILESDIR}/${P}-py39.patch"
+)
+
+distutils_enable_tests pytest
+
+python_prepare_all() {
+ # So we don't need pytest-timeout
+ sed -i '/^timeout = /d' setup.cfg || die
+ distutils-r1_python_prepare_all
+}