• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1"""
2The typing module: Support for gradual typing as defined by PEP 484 and subsequent PEPs.
3
4Among other things, the module includes the following:
5* Generic, Protocol, and internal machinery to support generic aliases.
6  All subscripted types like X[int], Union[int, str] are generic aliases.
7* Various "special forms" that have unique meanings in type annotations:
8  NoReturn, Never, ClassVar, Self, Concatenate, Unpack, and others.
9* Classes whose instances can be type arguments to generic classes and functions:
10  TypeVar, ParamSpec, TypeVarTuple.
11* Public helper functions: get_type_hints, overload, cast, final, and others.
12* Several protocols to support duck-typing:
13  SupportsFloat, SupportsIndex, SupportsAbs, and others.
14* Special types: NewType, NamedTuple, TypedDict.
15* Deprecated aliases for builtin types and collections.abc ABCs.
16
17Any name not present in __all__ is an implementation detail
18that may be changed without notice. Use at your own risk!
19"""
20
21from abc import abstractmethod, ABCMeta
22import collections
23from collections import defaultdict
24import collections.abc
25import copyreg
26import functools
27import operator
28import sys
29import types
30from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType, GenericAlias
31
32from _typing import (
33    _idfunc,
34    TypeVar,
35    ParamSpec,
36    TypeVarTuple,
37    ParamSpecArgs,
38    ParamSpecKwargs,
39    TypeAliasType,
40    Generic,
41    NoDefault,
42)
43
44# Please keep __all__ alphabetized within each category.
45__all__ = [
46    # Super-special typing primitives.
47    'Annotated',
48    'Any',
49    'Callable',
50    'ClassVar',
51    'Concatenate',
52    'Final',
53    'ForwardRef',
54    'Generic',
55    'Literal',
56    'Optional',
57    'ParamSpec',
58    'Protocol',
59    'Tuple',
60    'Type',
61    'TypeVar',
62    'TypeVarTuple',
63    'Union',
64
65    # ABCs (from collections.abc).
66    'AbstractSet',  # collections.abc.Set.
67    'ByteString',
68    'Container',
69    'ContextManager',
70    'Hashable',
71    'ItemsView',
72    'Iterable',
73    'Iterator',
74    'KeysView',
75    'Mapping',
76    'MappingView',
77    'MutableMapping',
78    'MutableSequence',
79    'MutableSet',
80    'Sequence',
81    'Sized',
82    'ValuesView',
83    'Awaitable',
84    'AsyncIterator',
85    'AsyncIterable',
86    'Coroutine',
87    'Collection',
88    'AsyncGenerator',
89    'AsyncContextManager',
90
91    # Structural checks, a.k.a. protocols.
92    'Reversible',
93    'SupportsAbs',
94    'SupportsBytes',
95    'SupportsComplex',
96    'SupportsFloat',
97    'SupportsIndex',
98    'SupportsInt',
99    'SupportsRound',
100
101    # Concrete collection types.
102    'ChainMap',
103    'Counter',
104    'Deque',
105    'Dict',
106    'DefaultDict',
107    'List',
108    'OrderedDict',
109    'Set',
110    'FrozenSet',
111    'NamedTuple',  # Not really a type.
112    'TypedDict',  # Not really a type.
113    'Generator',
114
115    # Other concrete types.
116    'BinaryIO',
117    'IO',
118    'Match',
119    'Pattern',
120    'TextIO',
121
122    # One-off things.
123    'AnyStr',
124    'assert_type',
125    'assert_never',
126    'cast',
127    'clear_overloads',
128    'dataclass_transform',
129    'final',
130    'get_args',
131    'get_origin',
132    'get_overloads',
133    'get_protocol_members',
134    'get_type_hints',
135    'is_protocol',
136    'is_typeddict',
137    'LiteralString',
138    'Never',
139    'NewType',
140    'no_type_check',
141    'no_type_check_decorator',
142    'NoDefault',
143    'NoReturn',
144    'NotRequired',
145    'overload',
146    'override',
147    'ParamSpecArgs',
148    'ParamSpecKwargs',
149    'ReadOnly',
150    'Required',
151    'reveal_type',
152    'runtime_checkable',
153    'Self',
154    'Text',
155    'TYPE_CHECKING',
156    'TypeAlias',
157    'TypeGuard',
158    'TypeIs',
159    'TypeAliasType',
160    'Unpack',
161]
162
163
164def _type_convert(arg, module=None, *, allow_special_forms=False):
165    """For converting None to type(None), and strings to ForwardRef."""
166    if arg is None:
167        return type(None)
168    if isinstance(arg, str):
169        return ForwardRef(arg, module=module, is_class=allow_special_forms)
170    return arg
171
172
173def _type_check(arg, msg, is_argument=True, module=None, *, allow_special_forms=False):
174    """Check that the argument is a type, and return it (internal helper).
175
176    As a special case, accept None and return type(None) instead. Also wrap strings
177    into ForwardRef instances. Consider several corner cases, for example plain
178    special forms like Union are not valid, while Union[int, str] is OK, etc.
179    The msg argument is a human-readable error message, e.g.::
180
181        "Union[arg, ...]: arg should be a type."
182
183    We append the repr() of the actual value (truncated to 100 chars).
184    """
185    invalid_generic_forms = (Generic, Protocol)
186    if not allow_special_forms:
187        invalid_generic_forms += (ClassVar,)
188        if is_argument:
189            invalid_generic_forms += (Final,)
190
191    arg = _type_convert(arg, module=module, allow_special_forms=allow_special_forms)
192    if (isinstance(arg, _GenericAlias) and
193            arg.__origin__ in invalid_generic_forms):
194        raise TypeError(f"{arg} is not valid as type argument")
195    if arg in (Any, LiteralString, NoReturn, Never, Self, TypeAlias):
196        return arg
197    if allow_special_forms and arg in (ClassVar, Final):
198        return arg
199    if isinstance(arg, _SpecialForm) or arg in (Generic, Protocol):
200        raise TypeError(f"Plain {arg} is not valid as type argument")
201    if type(arg) is tuple:
202        raise TypeError(f"{msg} Got {arg!r:.100}.")
203    return arg
204
205
206def _is_param_expr(arg):
207    return arg is ... or isinstance(arg,
208            (tuple, list, ParamSpec, _ConcatenateGenericAlias))
209
210
211def _should_unflatten_callable_args(typ, args):
212    """Internal helper for munging collections.abc.Callable's __args__.
213
214    The canonical representation for a Callable's __args__ flattens the
215    argument types, see https://github.com/python/cpython/issues/86361.
216
217    For example::
218
219        >>> import collections.abc
220        >>> P = ParamSpec('P')
221        >>> collections.abc.Callable[[int, int], str].__args__ == (int, int, str)
222        True
223        >>> collections.abc.Callable[P, str].__args__ == (P, str)
224        True
225
226    As a result, if we need to reconstruct the Callable from its __args__,
227    we need to unflatten it.
228    """
229    return (
230        typ.__origin__ is collections.abc.Callable
231        and not (len(args) == 2 and _is_param_expr(args[0]))
232    )
233
234
235def _type_repr(obj):
236    """Return the repr() of an object, special-casing types (internal helper).
237
238    If obj is a type, we return a shorter version than the default
239    type.__repr__, based on the module and qualified name, which is
240    typically enough to uniquely identify a type.  For everything
241    else, we fall back on repr(obj).
242    """
243    # When changing this function, don't forget about
244    # `_collections_abc._type_repr`, which does the same thing
245    # and must be consistent with this one.
246    if isinstance(obj, type):
247        if obj.__module__ == 'builtins':
248            return obj.__qualname__
249        return f'{obj.__module__}.{obj.__qualname__}'
250    if obj is ...:
251        return '...'
252    if isinstance(obj, types.FunctionType):
253        return obj.__name__
254    if isinstance(obj, tuple):
255        # Special case for `repr` of types with `ParamSpec`:
256        return '[' + ', '.join(_type_repr(t) for t in obj) + ']'
257    return repr(obj)
258
259
260def _collect_type_parameters(args, *, enforce_default_ordering: bool = True):
261    """Collect all type parameters in args
262    in order of first appearance (lexicographic order).
263
264    For example::
265
266        >>> P = ParamSpec('P')
267        >>> T = TypeVar('T')
268        >>> _collect_type_parameters((T, Callable[P, T]))
269        (~T, ~P)
270    """
271    # required type parameter cannot appear after parameter with default
272    default_encountered = False
273    # or after TypeVarTuple
274    type_var_tuple_encountered = False
275    parameters = []
276    for t in args:
277        if isinstance(t, type):
278            # We don't want __parameters__ descriptor of a bare Python class.
279            pass
280        elif isinstance(t, tuple):
281            # `t` might be a tuple, when `ParamSpec` is substituted with
282            # `[T, int]`, or `[int, *Ts]`, etc.
283            for x in t:
284                for collected in _collect_type_parameters([x]):
285                    if collected not in parameters:
286                        parameters.append(collected)
287        elif hasattr(t, '__typing_subst__'):
288            if t not in parameters:
289                if enforce_default_ordering:
290                    if type_var_tuple_encountered and t.has_default():
291                        raise TypeError('Type parameter with a default'
292                                        ' follows TypeVarTuple')
293
294                    if t.has_default():
295                        default_encountered = True
296                    elif default_encountered:
297                        raise TypeError(f'Type parameter {t!r} without a default'
298                                        ' follows type parameter with a default')
299
300                parameters.append(t)
301        else:
302            if _is_unpacked_typevartuple(t):
303                type_var_tuple_encountered = True
304            for x in getattr(t, '__parameters__', ()):
305                if x not in parameters:
306                    parameters.append(x)
307    return tuple(parameters)
308
309
310def _check_generic_specialization(cls, arguments):
311    """Check correct count for parameters of a generic cls (internal helper).
312
313    This gives a nice error message in case of count mismatch.
314    """
315    expected_len = len(cls.__parameters__)
316    if not expected_len:
317        raise TypeError(f"{cls} is not a generic class")
318    actual_len = len(arguments)
319    if actual_len != expected_len:
320        # deal with defaults
321        if actual_len < expected_len:
322            # If the parameter at index `actual_len` in the parameters list
323            # has a default, then all parameters after it must also have
324            # one, because we validated as much in _collect_type_parameters().
325            # That means that no error needs to be raised here, despite
326            # the number of arguments being passed not matching the number
327            # of parameters: all parameters that aren't explicitly
328            # specialized in this call are parameters with default values.
329            if cls.__parameters__[actual_len].has_default():
330                return
331
332            expected_len -= sum(p.has_default() for p in cls.__parameters__)
333            expect_val = f"at least {expected_len}"
334        else:
335            expect_val = expected_len
336
337        raise TypeError(f"Too {'many' if actual_len > expected_len else 'few'} arguments"
338                        f" for {cls}; actual {actual_len}, expected {expect_val}")
339
340
341def _unpack_args(*args):
342    newargs = []
343    for arg in args:
344        subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
345        if subargs is not None and not (subargs and subargs[-1] is ...):
346            newargs.extend(subargs)
347        else:
348            newargs.append(arg)
349    return newargs
350
351def _deduplicate(params, *, unhashable_fallback=False):
352    # Weed out strict duplicates, preserving the first of each occurrence.
353    try:
354        return dict.fromkeys(params)
355    except TypeError:
356        if not unhashable_fallback:
357            raise
358        # Happens for cases like `Annotated[dict, {'x': IntValidator()}]`
359        return _deduplicate_unhashable(params)
360
361def _deduplicate_unhashable(unhashable_params):
362    new_unhashable = []
363    for t in unhashable_params:
364        if t not in new_unhashable:
365            new_unhashable.append(t)
366    return new_unhashable
367
368def _compare_args_orderless(first_args, second_args):
369    first_unhashable = _deduplicate_unhashable(first_args)
370    second_unhashable = _deduplicate_unhashable(second_args)
371    t = list(second_unhashable)
372    try:
373        for elem in first_unhashable:
374            t.remove(elem)
375    except ValueError:
376        return False
377    return not t
378
379def _remove_dups_flatten(parameters):
380    """Internal helper for Union creation and substitution.
381
382    Flatten Unions among parameters, then remove duplicates.
383    """
384    # Flatten out Union[Union[...], ...].
385    params = []
386    for p in parameters:
387        if isinstance(p, (_UnionGenericAlias, types.UnionType)):
388            params.extend(p.__args__)
389        else:
390            params.append(p)
391
392    return tuple(_deduplicate(params, unhashable_fallback=True))
393
394
395def _flatten_literal_params(parameters):
396    """Internal helper for Literal creation: flatten Literals among parameters."""
397    params = []
398    for p in parameters:
399        if isinstance(p, _LiteralGenericAlias):
400            params.extend(p.__args__)
401        else:
402            params.append(p)
403    return tuple(params)
404
405
406_cleanups = []
407_caches = {}
408
409
410def _tp_cache(func=None, /, *, typed=False):
411    """Internal wrapper caching __getitem__ of generic types.
412
413    For non-hashable arguments, the original function is used as a fallback.
414    """
415    def decorator(func):
416        # The callback 'inner' references the newly created lru_cache
417        # indirectly by performing a lookup in the global '_caches' dictionary.
418        # This breaks a reference that can be problematic when combined with
419        # C API extensions that leak references to types. See GH-98253.
420
421        cache = functools.lru_cache(typed=typed)(func)
422        _caches[func] = cache
423        _cleanups.append(cache.cache_clear)
424        del cache
425
426        @functools.wraps(func)
427        def inner(*args, **kwds):
428            try:
429                return _caches[func](*args, **kwds)
430            except TypeError:
431                pass  # All real errors (not unhashable args) are raised below.
432            return func(*args, **kwds)
433        return inner
434
435    if func is not None:
436        return decorator(func)
437
438    return decorator
439
440
441def _deprecation_warning_for_no_type_params_passed(funcname: str) -> None:
442    import warnings
443
444    depr_message = (
445        f"Failing to pass a value to the 'type_params' parameter "
446        f"of {funcname!r} is deprecated, as it leads to incorrect behaviour "
447        f"when calling {funcname} on a stringified annotation "
448        f"that references a PEP 695 type parameter. "
449        f"It will be disallowed in Python 3.15."
450    )
451    warnings.warn(depr_message, category=DeprecationWarning, stacklevel=3)
452
453
454class _Sentinel:
455    __slots__ = ()
456    def __repr__(self):
457        return '<sentinel>'
458
459
460_sentinel = _Sentinel()
461
462
463def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=frozenset()):
464    """Evaluate all forward references in the given type t.
465
466    For use of globalns and localns see the docstring for get_type_hints().
467    recursive_guard is used to prevent infinite recursion with a recursive
468    ForwardRef.
469    """
470    if type_params is _sentinel:
471        _deprecation_warning_for_no_type_params_passed("typing._eval_type")
472        type_params = ()
473    if isinstance(t, ForwardRef):
474        return t._evaluate(globalns, localns, type_params, recursive_guard=recursive_guard)
475    if isinstance(t, (_GenericAlias, GenericAlias, types.UnionType)):
476        if isinstance(t, GenericAlias):
477            args = tuple(
478                ForwardRef(arg) if isinstance(arg, str) else arg
479                for arg in t.__args__
480            )
481            is_unpacked = t.__unpacked__
482            if _should_unflatten_callable_args(t, args):
483                t = t.__origin__[(args[:-1], args[-1])]
484            else:
485                t = t.__origin__[args]
486            if is_unpacked:
487                t = Unpack[t]
488
489        ev_args = tuple(
490            _eval_type(
491                a, globalns, localns, type_params, recursive_guard=recursive_guard
492            )
493            for a in t.__args__
494        )
495        if ev_args == t.__args__:
496            return t
497        if isinstance(t, GenericAlias):
498            return GenericAlias(t.__origin__, ev_args)
499        if isinstance(t, types.UnionType):
500            return functools.reduce(operator.or_, ev_args)
501        else:
502            return t.copy_with(ev_args)
503    return t
504
505
506class _Final:
507    """Mixin to prohibit subclassing."""
508
509    __slots__ = ('__weakref__',)
510
511    def __init_subclass__(cls, /, *args, **kwds):
512        if '_root' not in kwds:
513            raise TypeError("Cannot subclass special typing classes")
514
515
516class _NotIterable:
517    """Mixin to prevent iteration, without being compatible with Iterable.
518
519    That is, we could do::
520
521        def __iter__(self): raise TypeError()
522
523    But this would make users of this mixin duck type-compatible with
524    collections.abc.Iterable - isinstance(foo, Iterable) would be True.
525
526    Luckily, we can instead prevent iteration by setting __iter__ to None, which
527    is treated specially.
528    """
529
530    __slots__ = ()
531    __iter__ = None
532
533
534# Internal indicator of special typing constructs.
535# See __doc__ instance attribute for specific docs.
536class _SpecialForm(_Final, _NotIterable, _root=True):
537    __slots__ = ('_name', '__doc__', '_getitem')
538
539    def __init__(self, getitem):
540        self._getitem = getitem
541        self._name = getitem.__name__
542        self.__doc__ = getitem.__doc__
543
544    def __getattr__(self, item):
545        if item in {'__name__', '__qualname__'}:
546            return self._name
547
548        raise AttributeError(item)
549
550    def __mro_entries__(self, bases):
551        raise TypeError(f"Cannot subclass {self!r}")
552
553    def __repr__(self):
554        return 'typing.' + self._name
555
556    def __reduce__(self):
557        return self._name
558
559    def __call__(self, *args, **kwds):
560        raise TypeError(f"Cannot instantiate {self!r}")
561
562    def __or__(self, other):
563        return Union[self, other]
564
565    def __ror__(self, other):
566        return Union[other, self]
567
568    def __instancecheck__(self, obj):
569        raise TypeError(f"{self} cannot be used with isinstance()")
570
571    def __subclasscheck__(self, cls):
572        raise TypeError(f"{self} cannot be used with issubclass()")
573
574    @_tp_cache
575    def __getitem__(self, parameters):
576        return self._getitem(self, parameters)
577
578
579class _TypedCacheSpecialForm(_SpecialForm, _root=True):
580    def __getitem__(self, parameters):
581        if not isinstance(parameters, tuple):
582            parameters = (parameters,)
583        return self._getitem(self, *parameters)
584
585
586class _AnyMeta(type):
587    def __instancecheck__(self, obj):
588        if self is Any:
589            raise TypeError("typing.Any cannot be used with isinstance()")
590        return super().__instancecheck__(obj)
591
592    def __repr__(self):
593        if self is Any:
594            return "typing.Any"
595        return super().__repr__()  # respect to subclasses
596
597
598class Any(metaclass=_AnyMeta):
599    """Special type indicating an unconstrained type.
600
601    - Any is compatible with every type.
602    - Any assumed to have all methods.
603    - All values assumed to be instances of Any.
604
605    Note that all the above statements are true from the point of view of
606    static type checkers. At runtime, Any should not be used with instance
607    checks.
608    """
609
610    def __new__(cls, *args, **kwargs):
611        if cls is Any:
612            raise TypeError("Any cannot be instantiated")
613        return super().__new__(cls)
614
615
616@_SpecialForm
617def NoReturn(self, parameters):
618    """Special type indicating functions that never return.
619
620    Example::
621
622        from typing import NoReturn
623
624        def stop() -> NoReturn:
625            raise Exception('no way')
626
627    NoReturn can also be used as a bottom type, a type that
628    has no values. Starting in Python 3.11, the Never type should
629    be used for this concept instead. Type checkers should treat the two
630    equivalently.
631    """
632    raise TypeError(f"{self} is not subscriptable")
633
634# This is semantically identical to NoReturn, but it is implemented
635# separately so that type checkers can distinguish between the two
636# if they want.
637@_SpecialForm
638def Never(self, parameters):
639    """The bottom type, a type that has no members.
640
641    This can be used to define a function that should never be
642    called, or a function that never returns::
643
644        from typing import Never
645
646        def never_call_me(arg: Never) -> None:
647            pass
648
649        def int_or_str(arg: int | str) -> None:
650            never_call_me(arg)  # type checker error
651            match arg:
652                case int():
653                    print("It's an int")
654                case str():
655                    print("It's a str")
656                case _:
657                    never_call_me(arg)  # OK, arg is of type Never
658    """
659    raise TypeError(f"{self} is not subscriptable")
660
661
662@_SpecialForm
663def Self(self, parameters):
664    """Used to spell the type of "self" in classes.
665
666    Example::
667
668        from typing import Self
669
670        class Foo:
671            def return_self(self) -> Self:
672                ...
673                return self
674
675    This is especially useful for:
676        - classmethods that are used as alternative constructors
677        - annotating an `__enter__` method which returns self
678    """
679    raise TypeError(f"{self} is not subscriptable")
680
681
682@_SpecialForm
683def LiteralString(self, parameters):
684    """Represents an arbitrary literal string.
685
686    Example::
687
688        from typing import LiteralString
689
690        def run_query(sql: LiteralString) -> None:
691            ...
692
693        def caller(arbitrary_string: str, literal_string: LiteralString) -> None:
694            run_query("SELECT * FROM students")  # OK
695            run_query(literal_string)  # OK
696            run_query("SELECT * FROM " + literal_string)  # OK
697            run_query(arbitrary_string)  # type checker error
698            run_query(  # type checker error
699                f"SELECT * FROM students WHERE name = {arbitrary_string}"
700            )
701
702    Only string literals and other LiteralStrings are compatible
703    with LiteralString. This provides a tool to help prevent
704    security issues such as SQL injection.
705    """
706    raise TypeError(f"{self} is not subscriptable")
707
708
709@_SpecialForm
710def ClassVar(self, parameters):
711    """Special type construct to mark class variables.
712
713    An annotation wrapped in ClassVar indicates that a given
714    attribute is intended to be used as a class variable and
715    should not be set on instances of that class.
716
717    Usage::
718
719        class Starship:
720            stats: ClassVar[dict[str, int]] = {} # class variable
721            damage: int = 10                     # instance variable
722
723    ClassVar accepts only types and cannot be further subscribed.
724
725    Note that ClassVar is not a class itself, and should not
726    be used with isinstance() or issubclass().
727    """
728    item = _type_check(parameters, f'{self} accepts only single type.', allow_special_forms=True)
729    return _GenericAlias(self, (item,))
730
731@_SpecialForm
732def Final(self, parameters):
733    """Special typing construct to indicate final names to type checkers.
734
735    A final name cannot be re-assigned or overridden in a subclass.
736
737    For example::
738
739        MAX_SIZE: Final = 9000
740        MAX_SIZE += 1  # Error reported by type checker
741
742        class Connection:
743            TIMEOUT: Final[int] = 10
744
745        class FastConnector(Connection):
746            TIMEOUT = 1  # Error reported by type checker
747
748    There is no runtime checking of these properties.
749    """
750    item = _type_check(parameters, f'{self} accepts only single type.', allow_special_forms=True)
751    return _GenericAlias(self, (item,))
752
753@_SpecialForm
754def Union(self, parameters):
755    """Union type; Union[X, Y] means either X or Y.
756
757    On Python 3.10 and higher, the | operator
758    can also be used to denote unions;
759    X | Y means the same thing to the type checker as Union[X, Y].
760
761    To define a union, use e.g. Union[int, str]. Details:
762    - The arguments must be types and there must be at least one.
763    - None as an argument is a special case and is replaced by
764      type(None).
765    - Unions of unions are flattened, e.g.::
766
767        assert Union[Union[int, str], float] == Union[int, str, float]
768
769    - Unions of a single argument vanish, e.g.::
770
771        assert Union[int] == int  # The constructor actually returns int
772
773    - Redundant arguments are skipped, e.g.::
774
775        assert Union[int, str, int] == Union[int, str]
776
777    - When comparing unions, the argument order is ignored, e.g.::
778
779        assert Union[int, str] == Union[str, int]
780
781    - You cannot subclass or instantiate a union.
782    - You can use Optional[X] as a shorthand for Union[X, None].
783    """
784    if parameters == ():
785        raise TypeError("Cannot take a Union of no types.")
786    if not isinstance(parameters, tuple):
787        parameters = (parameters,)
788    msg = "Union[arg, ...]: each arg must be a type."
789    parameters = tuple(_type_check(p, msg) for p in parameters)
790    parameters = _remove_dups_flatten(parameters)
791    if len(parameters) == 1:
792        return parameters[0]
793    if len(parameters) == 2 and type(None) in parameters:
794        return _UnionGenericAlias(self, parameters, name="Optional")
795    return _UnionGenericAlias(self, parameters)
796
797def _make_union(left, right):
798    """Used from the C implementation of TypeVar.
799
800    TypeVar.__or__ calls this instead of returning types.UnionType
801    because we want to allow unions between TypeVars and strings
802    (forward references).
803    """
804    return Union[left, right]
805
806@_SpecialForm
807def Optional(self, parameters):
808    """Optional[X] is equivalent to Union[X, None]."""
809    arg = _type_check(parameters, f"{self} requires a single type.")
810    return Union[arg, type(None)]
811
812@_TypedCacheSpecialForm
813@_tp_cache(typed=True)
814def Literal(self, *parameters):
815    """Special typing form to define literal types (a.k.a. value types).
816
817    This form can be used to indicate to type checkers that the corresponding
818    variable or function parameter has a value equivalent to the provided
819    literal (or one of several literals)::
820
821        def validate_simple(data: Any) -> Literal[True]:  # always returns True
822            ...
823
824        MODE = Literal['r', 'rb', 'w', 'wb']
825        def open_helper(file: str, mode: MODE) -> str:
826            ...
827
828        open_helper('/some/path', 'r')  # Passes type check
829        open_helper('/other/path', 'typo')  # Error in type checker
830
831    Literal[...] cannot be subclassed. At runtime, an arbitrary value
832    is allowed as type argument to Literal[...], but type checkers may
833    impose restrictions.
834    """
835    # There is no '_type_check' call because arguments to Literal[...] are
836    # values, not types.
837    parameters = _flatten_literal_params(parameters)
838
839    try:
840        parameters = tuple(p for p, _ in _deduplicate(list(_value_and_type_iter(parameters))))
841    except TypeError:  # unhashable parameters
842        pass
843
844    return _LiteralGenericAlias(self, parameters)
845
846
847@_SpecialForm
848def TypeAlias(self, parameters):
849    """Special form for marking type aliases.
850
851    Use TypeAlias to indicate that an assignment should
852    be recognized as a proper type alias definition by type
853    checkers.
854
855    For example::
856
857        Predicate: TypeAlias = Callable[..., bool]
858
859    It's invalid when used anywhere except as in the example above.
860    """
861    raise TypeError(f"{self} is not subscriptable")
862
863
864@_SpecialForm
865def Concatenate(self, parameters):
866    """Special form for annotating higher-order functions.
867
868    ``Concatenate`` can be used in conjunction with ``ParamSpec`` and
869    ``Callable`` to represent a higher-order function which adds, removes or
870    transforms the parameters of a callable.
871
872    For example::
873
874        Callable[Concatenate[int, P], int]
875
876    See PEP 612 for detailed information.
877    """
878    if parameters == ():
879        raise TypeError("Cannot take a Concatenate of no types.")
880    if not isinstance(parameters, tuple):
881        parameters = (parameters,)
882    if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)):
883        raise TypeError("The last parameter to Concatenate should be a "
884                        "ParamSpec variable or ellipsis.")
885    msg = "Concatenate[arg, ...]: each arg must be a type."
886    parameters = (*(_type_check(p, msg) for p in parameters[:-1]), parameters[-1])
887    return _ConcatenateGenericAlias(self, parameters)
888
889
890@_SpecialForm
891def TypeGuard(self, parameters):
892    """Special typing construct for marking user-defined type predicate functions.
893
894    ``TypeGuard`` can be used to annotate the return type of a user-defined
895    type predicate function.  ``TypeGuard`` only accepts a single type argument.
896    At runtime, functions marked this way should return a boolean.
897
898    ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
899    type checkers to determine a more precise type of an expression within a
900    program's code flow.  Usually type narrowing is done by analyzing
901    conditional code flow and applying the narrowing to a block of code.  The
902    conditional expression here is sometimes referred to as a "type predicate".
903
904    Sometimes it would be convenient to use a user-defined boolean function
905    as a type predicate.  Such a function should use ``TypeGuard[...]`` or
906    ``TypeIs[...]`` as its return type to alert static type checkers to
907    this intention. ``TypeGuard`` should be used over ``TypeIs`` when narrowing
908    from an incompatible type (e.g., ``list[object]`` to ``list[int]``) or when
909    the function does not return ``True`` for all instances of the narrowed type.
910
911    Using  ``-> TypeGuard[NarrowedType]`` tells the static type checker that
912    for a given function:
913
914    1. The return value is a boolean.
915    2. If the return value is ``True``, the type of its argument
916       is ``NarrowedType``.
917
918    For example::
919
920         def is_str_list(val: list[object]) -> TypeGuard[list[str]]:
921             '''Determines whether all objects in the list are strings'''
922             return all(isinstance(x, str) for x in val)
923
924         def func1(val: list[object]):
925             if is_str_list(val):
926                 # Type of ``val`` is narrowed to ``list[str]``.
927                 print(" ".join(val))
928             else:
929                 # Type of ``val`` remains as ``list[object]``.
930                 print("Not a list of strings!")
931
932    Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
933    form of ``TypeA`` (it can even be a wider form) and this may lead to
934    type-unsafe results.  The main reason is to allow for things like
935    narrowing ``list[object]`` to ``list[str]`` even though the latter is not
936    a subtype of the former, since ``list`` is invariant.  The responsibility of
937    writing type-safe type predicates is left to the user.
938
939    ``TypeGuard`` also works with type variables.  For more information, see
940    PEP 647 (User-Defined Type Guards).
941    """
942    item = _type_check(parameters, f'{self} accepts only single type.')
943    return _GenericAlias(self, (item,))
944
945
946@_SpecialForm
947def TypeIs(self, parameters):
948    """Special typing construct for marking user-defined type predicate functions.
949
950    ``TypeIs`` can be used to annotate the return type of a user-defined
951    type predicate function.  ``TypeIs`` only accepts a single type argument.
952    At runtime, functions marked this way should return a boolean and accept
953    at least one argument.
954
955    ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
956    type checkers to determine a more precise type of an expression within a
957    program's code flow.  Usually type narrowing is done by analyzing
958    conditional code flow and applying the narrowing to a block of code.  The
959    conditional expression here is sometimes referred to as a "type predicate".
960
961    Sometimes it would be convenient to use a user-defined boolean function
962    as a type predicate.  Such a function should use ``TypeIs[...]`` or
963    ``TypeGuard[...]`` as its return type to alert static type checkers to
964    this intention.  ``TypeIs`` usually has more intuitive behavior than
965    ``TypeGuard``, but it cannot be used when the input and output types
966    are incompatible (e.g., ``list[object]`` to ``list[int]``) or when the
967    function does not return ``True`` for all instances of the narrowed type.
968
969    Using  ``-> TypeIs[NarrowedType]`` tells the static type checker that for
970    a given function:
971
972    1. The return value is a boolean.
973    2. If the return value is ``True``, the type of its argument
974       is the intersection of the argument's original type and
975       ``NarrowedType``.
976    3. If the return value is ``False``, the type of its argument
977       is narrowed to exclude ``NarrowedType``.
978
979    For example::
980
981        from typing import assert_type, final, TypeIs
982
983        class Parent: pass
984        class Child(Parent): pass
985        @final
986        class Unrelated: pass
987
988        def is_parent(val: object) -> TypeIs[Parent]:
989            return isinstance(val, Parent)
990
991        def run(arg: Child | Unrelated):
992            if is_parent(arg):
993                # Type of ``arg`` is narrowed to the intersection
994                # of ``Parent`` and ``Child``, which is equivalent to
995                # ``Child``.
996                assert_type(arg, Child)
997            else:
998                # Type of ``arg`` is narrowed to exclude ``Parent``,
999                # so only ``Unrelated`` is left.
1000                assert_type(arg, Unrelated)
1001
1002    The type inside ``TypeIs`` must be consistent with the type of the
1003    function's argument; if it is not, static type checkers will raise
1004    an error.  An incorrectly written ``TypeIs`` function can lead to
1005    unsound behavior in the type system; it is the user's responsibility
1006    to write such functions in a type-safe manner.
1007
1008    ``TypeIs`` also works with type variables.  For more information, see
1009    PEP 742 (Narrowing types with ``TypeIs``).
1010    """
1011    item = _type_check(parameters, f'{self} accepts only single type.')
1012    return _GenericAlias(self, (item,))
1013
1014
1015class ForwardRef(_Final, _root=True):
1016    """Internal wrapper to hold a forward reference."""
1017
1018    __slots__ = ('__forward_arg__', '__forward_code__',
1019                 '__forward_evaluated__', '__forward_value__',
1020                 '__forward_is_argument__', '__forward_is_class__',
1021                 '__forward_module__')
1022
1023    def __init__(self, arg, is_argument=True, module=None, *, is_class=False):
1024        if not isinstance(arg, str):
1025            raise TypeError(f"Forward reference must be a string -- got {arg!r}")
1026
1027        # If we do `def f(*args: *Ts)`, then we'll have `arg = '*Ts'`.
1028        # Unfortunately, this isn't a valid expression on its own, so we
1029        # do the unpacking manually.
1030        if arg.startswith('*'):
1031            arg_to_compile = f'({arg},)[0]'  # E.g. (*Ts,)[0] or (*tuple[int, int],)[0]
1032        else:
1033            arg_to_compile = arg
1034        try:
1035            code = compile(arg_to_compile, '<string>', 'eval')
1036        except SyntaxError:
1037            raise SyntaxError(f"Forward reference must be an expression -- got {arg!r}")
1038
1039        self.__forward_arg__ = arg
1040        self.__forward_code__ = code
1041        self.__forward_evaluated__ = False
1042        self.__forward_value__ = None
1043        self.__forward_is_argument__ = is_argument
1044        self.__forward_is_class__ = is_class
1045        self.__forward_module__ = module
1046
1047    def _evaluate(self, globalns, localns, type_params=_sentinel, *, recursive_guard):
1048        if type_params is _sentinel:
1049            _deprecation_warning_for_no_type_params_passed("typing.ForwardRef._evaluate")
1050            type_params = ()
1051        if self.__forward_arg__ in recursive_guard:
1052            return self
1053        if not self.__forward_evaluated__ or localns is not globalns:
1054            if globalns is None and localns is None:
1055                globalns = localns = {}
1056            elif globalns is None:
1057                globalns = localns
1058            elif localns is None:
1059                localns = globalns
1060            if self.__forward_module__ is not None:
1061                globalns = getattr(
1062                    sys.modules.get(self.__forward_module__, None), '__dict__', globalns
1063                )
1064
1065            # type parameters require some special handling,
1066            # as they exist in their own scope
1067            # but `eval()` does not have a dedicated parameter for that scope.
1068            # For classes, names in type parameter scopes should override
1069            # names in the global scope (which here are called `localns`!),
1070            # but should in turn be overridden by names in the class scope
1071            # (which here are called `globalns`!)
1072            if type_params:
1073                globalns, localns = dict(globalns), dict(localns)
1074                for param in type_params:
1075                    param_name = param.__name__
1076                    if not self.__forward_is_class__ or param_name not in globalns:
1077                        globalns[param_name] = param
1078                        localns.pop(param_name, None)
1079
1080            type_ = _type_check(
1081                eval(self.__forward_code__, globalns, localns),
1082                "Forward references must evaluate to types.",
1083                is_argument=self.__forward_is_argument__,
1084                allow_special_forms=self.__forward_is_class__,
1085            )
1086            self.__forward_value__ = _eval_type(
1087                type_,
1088                globalns,
1089                localns,
1090                type_params,
1091                recursive_guard=(recursive_guard | {self.__forward_arg__}),
1092            )
1093            self.__forward_evaluated__ = True
1094        return self.__forward_value__
1095
1096    def __eq__(self, other):
1097        if not isinstance(other, ForwardRef):
1098            return NotImplemented
1099        if self.__forward_evaluated__ and other.__forward_evaluated__:
1100            return (self.__forward_arg__ == other.__forward_arg__ and
1101                    self.__forward_value__ == other.__forward_value__)
1102        return (self.__forward_arg__ == other.__forward_arg__ and
1103                self.__forward_module__ == other.__forward_module__)
1104
1105    def __hash__(self):
1106        return hash((self.__forward_arg__, self.__forward_module__))
1107
1108    def __or__(self, other):
1109        return Union[self, other]
1110
1111    def __ror__(self, other):
1112        return Union[other, self]
1113
1114    def __repr__(self):
1115        if self.__forward_module__ is None:
1116            module_repr = ''
1117        else:
1118            module_repr = f', module={self.__forward_module__!r}'
1119        return f'ForwardRef({self.__forward_arg__!r}{module_repr})'
1120
1121
1122def _is_unpacked_typevartuple(x: Any) -> bool:
1123    return ((not isinstance(x, type)) and
1124            getattr(x, '__typing_is_unpacked_typevartuple__', False))
1125
1126
1127def _is_typevar_like(x: Any) -> bool:
1128    return isinstance(x, (TypeVar, ParamSpec)) or _is_unpacked_typevartuple(x)
1129
1130
1131def _typevar_subst(self, arg):
1132    msg = "Parameters to generic types must be types."
1133    arg = _type_check(arg, msg, is_argument=True)
1134    if ((isinstance(arg, _GenericAlias) and arg.__origin__ is Unpack) or
1135        (isinstance(arg, GenericAlias) and getattr(arg, '__unpacked__', False))):
1136        raise TypeError(f"{arg} is not valid as type argument")
1137    return arg
1138
1139
1140def _typevartuple_prepare_subst(self, alias, args):
1141    params = alias.__parameters__
1142    typevartuple_index = params.index(self)
1143    for param in params[typevartuple_index + 1:]:
1144        if isinstance(param, TypeVarTuple):
1145            raise TypeError(f"More than one TypeVarTuple parameter in {alias}")
1146
1147    alen = len(args)
1148    plen = len(params)
1149    left = typevartuple_index
1150    right = plen - typevartuple_index - 1
1151    var_tuple_index = None
1152    fillarg = None
1153    for k, arg in enumerate(args):
1154        if not isinstance(arg, type):
1155            subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
1156            if subargs and len(subargs) == 2 and subargs[-1] is ...:
1157                if var_tuple_index is not None:
1158                    raise TypeError("More than one unpacked arbitrary-length tuple argument")
1159                var_tuple_index = k
1160                fillarg = subargs[0]
1161    if var_tuple_index is not None:
1162        left = min(left, var_tuple_index)
1163        right = min(right, alen - var_tuple_index - 1)
1164    elif left + right > alen:
1165        raise TypeError(f"Too few arguments for {alias};"
1166                        f" actual {alen}, expected at least {plen-1}")
1167    if left == alen - right and self.has_default():
1168        replacement = _unpack_args(self.__default__)
1169    else:
1170        replacement = args[left: alen - right]
1171
1172    return (
1173        *args[:left],
1174        *([fillarg]*(typevartuple_index - left)),
1175        replacement,
1176        *([fillarg]*(plen - right - left - typevartuple_index - 1)),
1177        *args[alen - right:],
1178    )
1179
1180
1181def _paramspec_subst(self, arg):
1182    if isinstance(arg, (list, tuple)):
1183        arg = tuple(_type_check(a, "Expected a type.") for a in arg)
1184    elif not _is_param_expr(arg):
1185        raise TypeError(f"Expected a list of types, an ellipsis, "
1186                        f"ParamSpec, or Concatenate. Got {arg}")
1187    return arg
1188
1189
1190def _paramspec_prepare_subst(self, alias, args):
1191    params = alias.__parameters__
1192    i = params.index(self)
1193    if i == len(args) and self.has_default():
1194        args = [*args, self.__default__]
1195    if i >= len(args):
1196        raise TypeError(f"Too few arguments for {alias}")
1197    # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
1198    if len(params) == 1 and not _is_param_expr(args[0]):
1199        assert i == 0
1200        args = (args,)
1201    # Convert lists to tuples to help other libraries cache the results.
1202    elif isinstance(args[i], list):
1203        args = (*args[:i], tuple(args[i]), *args[i+1:])
1204    return args
1205
1206
1207@_tp_cache
1208def _generic_class_getitem(cls, args):
1209    """Parameterizes a generic class.
1210
1211    At least, parameterizing a generic class is the *main* thing this method
1212    does. For example, for some generic class `Foo`, this is called when we
1213    do `Foo[int]` - there, with `cls=Foo` and `args=int`.
1214
1215    However, note that this method is also called when defining generic
1216    classes in the first place with `class Foo(Generic[T]): ...`.
1217    """
1218    if not isinstance(args, tuple):
1219        args = (args,)
1220
1221    args = tuple(_type_convert(p) for p in args)
1222    is_generic_or_protocol = cls in (Generic, Protocol)
1223
1224    if is_generic_or_protocol:
1225        # Generic and Protocol can only be subscripted with unique type variables.
1226        if not args:
1227            raise TypeError(
1228                f"Parameter list to {cls.__qualname__}[...] cannot be empty"
1229            )
1230        if not all(_is_typevar_like(p) for p in args):
1231            raise TypeError(
1232                f"Parameters to {cls.__name__}[...] must all be type variables "
1233                f"or parameter specification variables.")
1234        if len(set(args)) != len(args):
1235            raise TypeError(
1236                f"Parameters to {cls.__name__}[...] must all be unique")
1237    else:
1238        # Subscripting a regular Generic subclass.
1239        for param in cls.__parameters__:
1240            prepare = getattr(param, '__typing_prepare_subst__', None)
1241            if prepare is not None:
1242                args = prepare(cls, args)
1243        _check_generic_specialization(cls, args)
1244
1245        new_args = []
1246        for param, new_arg in zip(cls.__parameters__, args):
1247            if isinstance(param, TypeVarTuple):
1248                new_args.extend(new_arg)
1249            else:
1250                new_args.append(new_arg)
1251        args = tuple(new_args)
1252
1253    return _GenericAlias(cls, args)
1254
1255
1256def _generic_init_subclass(cls, *args, **kwargs):
1257    super(Generic, cls).__init_subclass__(*args, **kwargs)
1258    tvars = []
1259    if '__orig_bases__' in cls.__dict__:
1260        error = Generic in cls.__orig_bases__
1261    else:
1262        error = (Generic in cls.__bases__ and
1263                    cls.__name__ != 'Protocol' and
1264                    type(cls) != _TypedDictMeta)
1265    if error:
1266        raise TypeError("Cannot inherit from plain Generic")
1267    if '__orig_bases__' in cls.__dict__:
1268        tvars = _collect_type_parameters(cls.__orig_bases__)
1269        # Look for Generic[T1, ..., Tn].
1270        # If found, tvars must be a subset of it.
1271        # If not found, tvars is it.
1272        # Also check for and reject plain Generic,
1273        # and reject multiple Generic[...].
1274        gvars = None
1275        for base in cls.__orig_bases__:
1276            if (isinstance(base, _GenericAlias) and
1277                    base.__origin__ is Generic):
1278                if gvars is not None:
1279                    raise TypeError(
1280                        "Cannot inherit from Generic[...] multiple times.")
1281                gvars = base.__parameters__
1282        if gvars is not None:
1283            tvarset = set(tvars)
1284            gvarset = set(gvars)
1285            if not tvarset <= gvarset:
1286                s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
1287                s_args = ', '.join(str(g) for g in gvars)
1288                raise TypeError(f"Some type variables ({s_vars}) are"
1289                                f" not listed in Generic[{s_args}]")
1290            tvars = gvars
1291    cls.__parameters__ = tuple(tvars)
1292
1293
1294def _is_dunder(attr):
1295    return attr.startswith('__') and attr.endswith('__')
1296
1297class _BaseGenericAlias(_Final, _root=True):
1298    """The central part of the internal API.
1299
1300    This represents a generic version of type 'origin' with type arguments 'params'.
1301    There are two kind of these aliases: user defined and special. The special ones
1302    are wrappers around builtin collections and ABCs in collections.abc. These must
1303    have 'name' always set. If 'inst' is False, then the alias can't be instantiated;
1304    this is used by e.g. typing.List and typing.Dict.
1305    """
1306
1307    def __init__(self, origin, *, inst=True, name=None):
1308        self._inst = inst
1309        self._name = name
1310        self.__origin__ = origin
1311        self.__slots__ = None  # This is not documented.
1312
1313    def __call__(self, *args, **kwargs):
1314        if not self._inst:
1315            raise TypeError(f"Type {self._name} cannot be instantiated; "
1316                            f"use {self.__origin__.__name__}() instead")
1317        result = self.__origin__(*args, **kwargs)
1318        try:
1319            result.__orig_class__ = self
1320        # Some objects raise TypeError (or something even more exotic)
1321        # if you try to set attributes on them; we guard against that here
1322        except Exception:
1323            pass
1324        return result
1325
1326    def __mro_entries__(self, bases):
1327        res = []
1328        if self.__origin__ not in bases:
1329            res.append(self.__origin__)
1330
1331        # Check if any base that occurs after us in `bases` is either itself a
1332        # subclass of Generic, or something which will add a subclass of Generic
1333        # to `__bases__` via its `__mro_entries__`. If not, add Generic
1334        # ourselves. The goal is to ensure that Generic (or a subclass) will
1335        # appear exactly once in the final bases tuple. If we let it appear
1336        # multiple times, we risk "can't form a consistent MRO" errors.
1337        i = bases.index(self)
1338        for b in bases[i+1:]:
1339            if isinstance(b, _BaseGenericAlias):
1340                break
1341            if not isinstance(b, type):
1342                meth = getattr(b, "__mro_entries__", None)
1343                new_bases = meth(bases) if meth else None
1344                if (
1345                    isinstance(new_bases, tuple) and
1346                    any(
1347                        isinstance(b2, type) and issubclass(b2, Generic)
1348                        for b2 in new_bases
1349                    )
1350                ):
1351                    break
1352            elif issubclass(b, Generic):
1353                break
1354        else:
1355            res.append(Generic)
1356        return tuple(res)
1357
1358    def __getattr__(self, attr):
1359        if attr in {'__name__', '__qualname__'}:
1360            return self._name or self.__origin__.__name__
1361
1362        # We are careful for copy and pickle.
1363        # Also for simplicity we don't relay any dunder names
1364        if '__origin__' in self.__dict__ and not _is_dunder(attr):
1365            return getattr(self.__origin__, attr)
1366        raise AttributeError(attr)
1367
1368    def __setattr__(self, attr, val):
1369        if _is_dunder(attr) or attr in {'_name', '_inst', '_nparams', '_defaults'}:
1370            super().__setattr__(attr, val)
1371        else:
1372            setattr(self.__origin__, attr, val)
1373
1374    def __instancecheck__(self, obj):
1375        return self.__subclasscheck__(type(obj))
1376
1377    def __subclasscheck__(self, cls):
1378        raise TypeError("Subscripted generics cannot be used with"
1379                        " class and instance checks")
1380
1381    def __dir__(self):
1382        return list(set(super().__dir__()
1383                + [attr for attr in dir(self.__origin__) if not _is_dunder(attr)]))
1384
1385
1386# Special typing constructs Union, Optional, Generic, Callable and Tuple
1387# use three special attributes for internal bookkeeping of generic types:
1388# * __parameters__ is a tuple of unique free type parameters of a generic
1389#   type, for example, Dict[T, T].__parameters__ == (T,);
1390# * __origin__ keeps a reference to a type that was subscripted,
1391#   e.g., Union[T, int].__origin__ == Union, or the non-generic version of
1392#   the type.
1393# * __args__ is a tuple of all arguments used in subscripting,
1394#   e.g., Dict[T, int].__args__ == (T, int).
1395
1396
1397class _GenericAlias(_BaseGenericAlias, _root=True):
1398    # The type of parameterized generics.
1399    #
1400    # That is, for example, `type(List[int])` is `_GenericAlias`.
1401    #
1402    # Objects which are instances of this class include:
1403    # * Parameterized container types, e.g. `Tuple[int]`, `List[int]`.
1404    #  * Note that native container types, e.g. `tuple`, `list`, use
1405    #    `types.GenericAlias` instead.
1406    # * Parameterized classes:
1407    #     class C[T]: pass
1408    #     # C[int] is a _GenericAlias
1409    # * `Callable` aliases, generic `Callable` aliases, and
1410    #   parameterized `Callable` aliases:
1411    #     T = TypeVar('T')
1412    #     # _CallableGenericAlias inherits from _GenericAlias.
1413    #     A = Callable[[], None]  # _CallableGenericAlias
1414    #     B = Callable[[T], None]  # _CallableGenericAlias
1415    #     C = B[int]  # _CallableGenericAlias
1416    # * Parameterized `Final`, `ClassVar`, `TypeGuard`, and `TypeIs`:
1417    #     # All _GenericAlias
1418    #     Final[int]
1419    #     ClassVar[float]
1420    #     TypeGuard[bool]
1421    #     TypeIs[range]
1422
1423    def __init__(self, origin, args, *, inst=True, name=None):
1424        super().__init__(origin, inst=inst, name=name)
1425        if not isinstance(args, tuple):
1426            args = (args,)
1427        self.__args__ = tuple(... if a is _TypingEllipsis else
1428                              a for a in args)
1429        enforce_default_ordering = origin in (Generic, Protocol)
1430        self.__parameters__ = _collect_type_parameters(
1431            args,
1432            enforce_default_ordering=enforce_default_ordering,
1433        )
1434        if not name:
1435            self.__module__ = origin.__module__
1436
1437    def __eq__(self, other):
1438        if not isinstance(other, _GenericAlias):
1439            return NotImplemented
1440        return (self.__origin__ == other.__origin__
1441                and self.__args__ == other.__args__)
1442
1443    def __hash__(self):
1444        return hash((self.__origin__, self.__args__))
1445
1446    def __or__(self, right):
1447        return Union[self, right]
1448
1449    def __ror__(self, left):
1450        return Union[left, self]
1451
1452    @_tp_cache
1453    def __getitem__(self, args):
1454        # Parameterizes an already-parameterized object.
1455        #
1456        # For example, we arrive here doing something like:
1457        #   T1 = TypeVar('T1')
1458        #   T2 = TypeVar('T2')
1459        #   T3 = TypeVar('T3')
1460        #   class A(Generic[T1]): pass
1461        #   B = A[T2]  # B is a _GenericAlias
1462        #   C = B[T3]  # Invokes _GenericAlias.__getitem__
1463        #
1464        # We also arrive here when parameterizing a generic `Callable` alias:
1465        #   T = TypeVar('T')
1466        #   C = Callable[[T], None]
1467        #   C[int]  # Invokes _GenericAlias.__getitem__
1468
1469        if self.__origin__ in (Generic, Protocol):
1470            # Can't subscript Generic[...] or Protocol[...].
1471            raise TypeError(f"Cannot subscript already-subscripted {self}")
1472        if not self.__parameters__:
1473            raise TypeError(f"{self} is not a generic class")
1474
1475        # Preprocess `args`.
1476        if not isinstance(args, tuple):
1477            args = (args,)
1478        args = _unpack_args(*(_type_convert(p) for p in args))
1479        new_args = self._determine_new_args(args)
1480        r = self.copy_with(new_args)
1481        return r
1482
1483    def _determine_new_args(self, args):
1484        # Determines new __args__ for __getitem__.
1485        #
1486        # For example, suppose we had:
1487        #   T1 = TypeVar('T1')
1488        #   T2 = TypeVar('T2')
1489        #   class A(Generic[T1, T2]): pass
1490        #   T3 = TypeVar('T3')
1491        #   B = A[int, T3]
1492        #   C = B[str]
1493        # `B.__args__` is `(int, T3)`, so `C.__args__` should be `(int, str)`.
1494        # Unfortunately, this is harder than it looks, because if `T3` is
1495        # anything more exotic than a plain `TypeVar`, we need to consider
1496        # edge cases.
1497
1498        params = self.__parameters__
1499        # In the example above, this would be {T3: str}
1500        for param in params:
1501            prepare = getattr(param, '__typing_prepare_subst__', None)
1502            if prepare is not None:
1503                args = prepare(self, args)
1504        alen = len(args)
1505        plen = len(params)
1506        if alen != plen:
1507            raise TypeError(f"Too {'many' if alen > plen else 'few'} arguments for {self};"
1508                            f" actual {alen}, expected {plen}")
1509        new_arg_by_param = dict(zip(params, args))
1510        return tuple(self._make_substitution(self.__args__, new_arg_by_param))
1511
1512    def _make_substitution(self, args, new_arg_by_param):
1513        """Create a list of new type arguments."""
1514        new_args = []
1515        for old_arg in args:
1516            if isinstance(old_arg, type):
1517                new_args.append(old_arg)
1518                continue
1519
1520            substfunc = getattr(old_arg, '__typing_subst__', None)
1521            if substfunc:
1522                new_arg = substfunc(new_arg_by_param[old_arg])
1523            else:
1524                subparams = getattr(old_arg, '__parameters__', ())
1525                if not subparams:
1526                    new_arg = old_arg
1527                else:
1528                    subargs = []
1529                    for x in subparams:
1530                        if isinstance(x, TypeVarTuple):
1531                            subargs.extend(new_arg_by_param[x])
1532                        else:
1533                            subargs.append(new_arg_by_param[x])
1534                    new_arg = old_arg[tuple(subargs)]
1535
1536            if self.__origin__ == collections.abc.Callable and isinstance(new_arg, tuple):
1537                # Consider the following `Callable`.
1538                #   C = Callable[[int], str]
1539                # Here, `C.__args__` should be (int, str) - NOT ([int], str).
1540                # That means that if we had something like...
1541                #   P = ParamSpec('P')
1542                #   T = TypeVar('T')
1543                #   C = Callable[P, T]
1544                #   D = C[[int, str], float]
1545                # ...we need to be careful; `new_args` should end up as
1546                # `(int, str, float)` rather than `([int, str], float)`.
1547                new_args.extend(new_arg)
1548            elif _is_unpacked_typevartuple(old_arg):
1549                # Consider the following `_GenericAlias`, `B`:
1550                #   class A(Generic[*Ts]): ...
1551                #   B = A[T, *Ts]
1552                # If we then do:
1553                #   B[float, int, str]
1554                # The `new_arg` corresponding to `T` will be `float`, and the
1555                # `new_arg` corresponding to `*Ts` will be `(int, str)`. We
1556                # should join all these types together in a flat list
1557                # `(float, int, str)` - so again, we should `extend`.
1558                new_args.extend(new_arg)
1559            elif isinstance(old_arg, tuple):
1560                # Corner case:
1561                #    P = ParamSpec('P')
1562                #    T = TypeVar('T')
1563                #    class Base(Generic[P]): ...
1564                # Can be substituted like this:
1565                #    X = Base[[int, T]]
1566                # In this case, `old_arg` will be a tuple:
1567                new_args.append(
1568                    tuple(self._make_substitution(old_arg, new_arg_by_param)),
1569                )
1570            else:
1571                new_args.append(new_arg)
1572        return new_args
1573
1574    def copy_with(self, args):
1575        return self.__class__(self.__origin__, args, name=self._name, inst=self._inst)
1576
1577    def __repr__(self):
1578        if self._name:
1579            name = 'typing.' + self._name
1580        else:
1581            name = _type_repr(self.__origin__)
1582        if self.__args__:
1583            args = ", ".join([_type_repr(a) for a in self.__args__])
1584        else:
1585            # To ensure the repr is eval-able.
1586            args = "()"
1587        return f'{name}[{args}]'
1588
1589    def __reduce__(self):
1590        if self._name:
1591            origin = globals()[self._name]
1592        else:
1593            origin = self.__origin__
1594        args = tuple(self.__args__)
1595        if len(args) == 1 and not isinstance(args[0], tuple):
1596            args, = args
1597        return operator.getitem, (origin, args)
1598
1599    def __mro_entries__(self, bases):
1600        if isinstance(self.__origin__, _SpecialForm):
1601            raise TypeError(f"Cannot subclass {self!r}")
1602
1603        if self._name:  # generic version of an ABC or built-in class
1604            return super().__mro_entries__(bases)
1605        if self.__origin__ is Generic:
1606            if Protocol in bases:
1607                return ()
1608            i = bases.index(self)
1609            for b in bases[i+1:]:
1610                if isinstance(b, _BaseGenericAlias) and b is not self:
1611                    return ()
1612        return (self.__origin__,)
1613
1614    def __iter__(self):
1615        yield Unpack[self]
1616
1617
1618# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
1619# 1 for List and 2 for Dict.  It may be -1 if variable number of
1620# parameters are accepted (needs custom __getitem__).
1621
1622class _SpecialGenericAlias(_NotIterable, _BaseGenericAlias, _root=True):
1623    def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
1624        if name is None:
1625            name = origin.__name__
1626        super().__init__(origin, inst=inst, name=name)
1627        self._nparams = nparams
1628        self._defaults = defaults
1629        if origin.__module__ == 'builtins':
1630            self.__doc__ = f'A generic version of {origin.__qualname__}.'
1631        else:
1632            self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
1633
1634    @_tp_cache
1635    def __getitem__(self, params):
1636        if not isinstance(params, tuple):
1637            params = (params,)
1638        msg = "Parameters to generic types must be types."
1639        params = tuple(_type_check(p, msg) for p in params)
1640        if (self._defaults
1641            and len(params) < self._nparams
1642            and len(params) + len(self._defaults) >= self._nparams
1643        ):
1644            params = (*params, *self._defaults[len(params) - self._nparams:])
1645        actual_len = len(params)
1646
1647        if actual_len != self._nparams:
1648            if self._defaults:
1649                expected = f"at least {self._nparams - len(self._defaults)}"
1650            else:
1651                expected = str(self._nparams)
1652            if not self._nparams:
1653                raise TypeError(f"{self} is not a generic class")
1654            raise TypeError(f"Too {'many' if actual_len > self._nparams else 'few'} arguments for {self};"
1655                            f" actual {actual_len}, expected {expected}")
1656        return self.copy_with(params)
1657
1658    def copy_with(self, params):
1659        return _GenericAlias(self.__origin__, params,
1660                             name=self._name, inst=self._inst)
1661
1662    def __repr__(self):
1663        return 'typing.' + self._name
1664
1665    def __subclasscheck__(self, cls):
1666        if isinstance(cls, _SpecialGenericAlias):
1667            return issubclass(cls.__origin__, self.__origin__)
1668        if not isinstance(cls, _GenericAlias):
1669            return issubclass(cls, self.__origin__)
1670        return super().__subclasscheck__(cls)
1671
1672    def __reduce__(self):
1673        return self._name
1674
1675    def __or__(self, right):
1676        return Union[self, right]
1677
1678    def __ror__(self, left):
1679        return Union[left, self]
1680
1681
1682class _DeprecatedGenericAlias(_SpecialGenericAlias, _root=True):
1683    def __init__(
1684        self, origin, nparams, *, removal_version, inst=True, name=None
1685    ):
1686        super().__init__(origin, nparams, inst=inst, name=name)
1687        self._removal_version = removal_version
1688
1689    def __instancecheck__(self, inst):
1690        import warnings
1691        warnings._deprecated(
1692            f"{self.__module__}.{self._name}", remove=self._removal_version
1693        )
1694        return super().__instancecheck__(inst)
1695
1696
1697class _CallableGenericAlias(_NotIterable, _GenericAlias, _root=True):
1698    def __repr__(self):
1699        assert self._name == 'Callable'
1700        args = self.__args__
1701        if len(args) == 2 and _is_param_expr(args[0]):
1702            return super().__repr__()
1703        return (f'typing.Callable'
1704                f'[[{", ".join([_type_repr(a) for a in args[:-1]])}], '
1705                f'{_type_repr(args[-1])}]')
1706
1707    def __reduce__(self):
1708        args = self.__args__
1709        if not (len(args) == 2 and _is_param_expr(args[0])):
1710            args = list(args[:-1]), args[-1]
1711        return operator.getitem, (Callable, args)
1712
1713
1714class _CallableType(_SpecialGenericAlias, _root=True):
1715    def copy_with(self, params):
1716        return _CallableGenericAlias(self.__origin__, params,
1717                                     name=self._name, inst=self._inst)
1718
1719    def __getitem__(self, params):
1720        if not isinstance(params, tuple) or len(params) != 2:
1721            raise TypeError("Callable must be used as "
1722                            "Callable[[arg, ...], result].")
1723        args, result = params
1724        # This relaxes what args can be on purpose to allow things like
1725        # PEP 612 ParamSpec.  Responsibility for whether a user is using
1726        # Callable[...] properly is deferred to static type checkers.
1727        if isinstance(args, list):
1728            params = (tuple(args), result)
1729        else:
1730            params = (args, result)
1731        return self.__getitem_inner__(params)
1732
1733    @_tp_cache
1734    def __getitem_inner__(self, params):
1735        args, result = params
1736        msg = "Callable[args, result]: result must be a type."
1737        result = _type_check(result, msg)
1738        if args is Ellipsis:
1739            return self.copy_with((_TypingEllipsis, result))
1740        if not isinstance(args, tuple):
1741            args = (args,)
1742        args = tuple(_type_convert(arg) for arg in args)
1743        params = args + (result,)
1744        return self.copy_with(params)
1745
1746
1747class _TupleType(_SpecialGenericAlias, _root=True):
1748    @_tp_cache
1749    def __getitem__(self, params):
1750        if not isinstance(params, tuple):
1751            params = (params,)
1752        if len(params) >= 2 and params[-1] is ...:
1753            msg = "Tuple[t, ...]: t must be a type."
1754            params = tuple(_type_check(p, msg) for p in params[:-1])
1755            return self.copy_with((*params, _TypingEllipsis))
1756        msg = "Tuple[t0, t1, ...]: each t must be a type."
1757        params = tuple(_type_check(p, msg) for p in params)
1758        return self.copy_with(params)
1759
1760
1761class _UnionGenericAlias(_NotIterable, _GenericAlias, _root=True):
1762    def copy_with(self, params):
1763        return Union[params]
1764
1765    def __eq__(self, other):
1766        if not isinstance(other, (_UnionGenericAlias, types.UnionType)):
1767            return NotImplemented
1768        try:  # fast path
1769            return set(self.__args__) == set(other.__args__)
1770        except TypeError:  # not hashable, slow path
1771            return _compare_args_orderless(self.__args__, other.__args__)
1772
1773    def __hash__(self):
1774        return hash(frozenset(self.__args__))
1775
1776    def __repr__(self):
1777        args = self.__args__
1778        if len(args) == 2:
1779            if args[0] is type(None):
1780                return f'typing.Optional[{_type_repr(args[1])}]'
1781            elif args[1] is type(None):
1782                return f'typing.Optional[{_type_repr(args[0])}]'
1783        return super().__repr__()
1784
1785    def __instancecheck__(self, obj):
1786        return self.__subclasscheck__(type(obj))
1787
1788    def __subclasscheck__(self, cls):
1789        for arg in self.__args__:
1790            if issubclass(cls, arg):
1791                return True
1792
1793    def __reduce__(self):
1794        func, (origin, args) = super().__reduce__()
1795        return func, (Union, args)
1796
1797
1798def _value_and_type_iter(parameters):
1799    return ((p, type(p)) for p in parameters)
1800
1801
1802class _LiteralGenericAlias(_GenericAlias, _root=True):
1803    def __eq__(self, other):
1804        if not isinstance(other, _LiteralGenericAlias):
1805            return NotImplemented
1806
1807        return set(_value_and_type_iter(self.__args__)) == set(_value_and_type_iter(other.__args__))
1808
1809    def __hash__(self):
1810        return hash(frozenset(_value_and_type_iter(self.__args__)))
1811
1812
1813class _ConcatenateGenericAlias(_GenericAlias, _root=True):
1814    def copy_with(self, params):
1815        if isinstance(params[-1], (list, tuple)):
1816            return (*params[:-1], *params[-1])
1817        if isinstance(params[-1], _ConcatenateGenericAlias):
1818            params = (*params[:-1], *params[-1].__args__)
1819        return super().copy_with(params)
1820
1821
1822@_SpecialForm
1823def Unpack(self, parameters):
1824    """Type unpack operator.
1825
1826    The type unpack operator takes the child types from some container type,
1827    such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'.
1828
1829    For example::
1830
1831        # For some generic class `Foo`:
1832        Foo[Unpack[tuple[int, str]]]  # Equivalent to Foo[int, str]
1833
1834        Ts = TypeVarTuple('Ts')
1835        # Specifies that `Bar` is generic in an arbitrary number of types.
1836        # (Think of `Ts` as a tuple of an arbitrary number of individual
1837        #  `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
1838        #  `Generic[]`.)
1839        class Bar(Generic[Unpack[Ts]]): ...
1840        Bar[int]  # Valid
1841        Bar[int, str]  # Also valid
1842
1843    From Python 3.11, this can also be done using the `*` operator::
1844
1845        Foo[*tuple[int, str]]
1846        class Bar(Generic[*Ts]): ...
1847
1848    And from Python 3.12, it can be done using built-in syntax for generics::
1849
1850        Foo[*tuple[int, str]]
1851        class Bar[*Ts]: ...
1852
1853    The operator can also be used along with a `TypedDict` to annotate
1854    `**kwargs` in a function signature::
1855
1856        class Movie(TypedDict):
1857            name: str
1858            year: int
1859
1860        # This function expects two keyword arguments - *name* of type `str` and
1861        # *year* of type `int`.
1862        def foo(**kwargs: Unpack[Movie]): ...
1863
1864    Note that there is only some runtime checking of this operator. Not
1865    everything the runtime allows may be accepted by static type checkers.
1866
1867    For more information, see PEPs 646 and 692.
1868    """
1869    item = _type_check(parameters, f'{self} accepts only single type.')
1870    return _UnpackGenericAlias(origin=self, args=(item,))
1871
1872
1873class _UnpackGenericAlias(_GenericAlias, _root=True):
1874    def __repr__(self):
1875        # `Unpack` only takes one argument, so __args__ should contain only
1876        # a single item.
1877        return f'typing.Unpack[{_type_repr(self.__args__[0])}]'
1878
1879    def __getitem__(self, args):
1880        if self.__typing_is_unpacked_typevartuple__:
1881            return args
1882        return super().__getitem__(args)
1883
1884    @property
1885    def __typing_unpacked_tuple_args__(self):
1886        assert self.__origin__ is Unpack
1887        assert len(self.__args__) == 1
1888        arg, = self.__args__
1889        if isinstance(arg, (_GenericAlias, types.GenericAlias)):
1890            if arg.__origin__ is not tuple:
1891                raise TypeError("Unpack[...] must be used with a tuple type")
1892            return arg.__args__
1893        return None
1894
1895    @property
1896    def __typing_is_unpacked_typevartuple__(self):
1897        assert self.__origin__ is Unpack
1898        assert len(self.__args__) == 1
1899        return isinstance(self.__args__[0], TypeVarTuple)
1900
1901
1902class _TypingEllipsis:
1903    """Internal placeholder for ... (ellipsis)."""
1904
1905
1906_TYPING_INTERNALS = frozenset({
1907    '__parameters__', '__orig_bases__',  '__orig_class__',
1908    '_is_protocol', '_is_runtime_protocol', '__protocol_attrs__',
1909    '__non_callable_proto_members__', '__type_params__',
1910})
1911
1912_SPECIAL_NAMES = frozenset({
1913    '__abstractmethods__', '__annotations__', '__dict__', '__doc__',
1914    '__init__', '__module__', '__new__', '__slots__',
1915    '__subclasshook__', '__weakref__', '__class_getitem__',
1916    '__match_args__', '__static_attributes__', '__firstlineno__',
1917})
1918
1919# These special attributes will be not collected as protocol members.
1920EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS | _SPECIAL_NAMES | {'_MutableMapping__marker'}
1921
1922
1923def _get_protocol_attrs(cls):
1924    """Collect protocol members from a protocol class objects.
1925
1926    This includes names actually defined in the class dictionary, as well
1927    as names that appear in annotations. Special names (above) are skipped.
1928    """
1929    attrs = set()
1930    for base in cls.__mro__[:-1]:  # without object
1931        if base.__name__ in {'Protocol', 'Generic'}:
1932            continue
1933        annotations = getattr(base, '__annotations__', {})
1934        for attr in (*base.__dict__, *annotations):
1935            if not attr.startswith('_abc_') and attr not in EXCLUDED_ATTRIBUTES:
1936                attrs.add(attr)
1937    return attrs
1938
1939
1940def _no_init_or_replace_init(self, *args, **kwargs):
1941    cls = type(self)
1942
1943    if cls._is_protocol:
1944        raise TypeError('Protocols cannot be instantiated')
1945
1946    # Already using a custom `__init__`. No need to calculate correct
1947    # `__init__` to call. This can lead to RecursionError. See bpo-45121.
1948    if cls.__init__ is not _no_init_or_replace_init:
1949        return
1950
1951    # Initially, `__init__` of a protocol subclass is set to `_no_init_or_replace_init`.
1952    # The first instantiation of the subclass will call `_no_init_or_replace_init` which
1953    # searches for a proper new `__init__` in the MRO. The new `__init__`
1954    # replaces the subclass' old `__init__` (ie `_no_init_or_replace_init`). Subsequent
1955    # instantiation of the protocol subclass will thus use the new
1956    # `__init__` and no longer call `_no_init_or_replace_init`.
1957    for base in cls.__mro__:
1958        init = base.__dict__.get('__init__', _no_init_or_replace_init)
1959        if init is not _no_init_or_replace_init:
1960            cls.__init__ = init
1961            break
1962    else:
1963        # should not happen
1964        cls.__init__ = object.__init__
1965
1966    cls.__init__(self, *args, **kwargs)
1967
1968
1969def _caller(depth=1, default='__main__'):
1970    try:
1971        return sys._getframemodulename(depth + 1) or default
1972    except AttributeError:  # For platforms without _getframemodulename()
1973        pass
1974    try:
1975        return sys._getframe(depth + 1).f_globals.get('__name__', default)
1976    except (AttributeError, ValueError):  # For platforms without _getframe()
1977        pass
1978    return None
1979
1980def _allow_reckless_class_checks(depth=2):
1981    """Allow instance and class checks for special stdlib modules.
1982
1983    The abc and functools modules indiscriminately call isinstance() and
1984    issubclass() on the whole MRO of a user class, which may contain protocols.
1985    """
1986    return _caller(depth) in {'abc', 'functools', None}
1987
1988
1989_PROTO_ALLOWLIST = {
1990    'collections.abc': [
1991        'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
1992        'AsyncIterator', 'Hashable', 'Sized', 'Container', 'Collection',
1993        'Reversible', 'Buffer',
1994    ],
1995    'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
1996}
1997
1998
1999@functools.cache
2000def _lazy_load_getattr_static():
2001    # Import getattr_static lazily so as not to slow down the import of typing.py
2002    # Cache the result so we don't slow down _ProtocolMeta.__instancecheck__ unnecessarily
2003    from inspect import getattr_static
2004    return getattr_static
2005
2006
2007_cleanups.append(_lazy_load_getattr_static.cache_clear)
2008
2009def _pickle_psargs(psargs):
2010    return ParamSpecArgs, (psargs.__origin__,)
2011
2012copyreg.pickle(ParamSpecArgs, _pickle_psargs)
2013
2014def _pickle_pskwargs(pskwargs):
2015    return ParamSpecKwargs, (pskwargs.__origin__,)
2016
2017copyreg.pickle(ParamSpecKwargs, _pickle_pskwargs)
2018
2019del _pickle_psargs, _pickle_pskwargs
2020
2021
2022# Preload these once, as globals, as a micro-optimisation.
2023# This makes a significant difference to the time it takes
2024# to do `isinstance()`/`issubclass()` checks
2025# against runtime-checkable protocols with only one callable member.
2026_abc_instancecheck = ABCMeta.__instancecheck__
2027_abc_subclasscheck = ABCMeta.__subclasscheck__
2028
2029
2030def _type_check_issubclass_arg_1(arg):
2031    """Raise TypeError if `arg` is not an instance of `type`
2032    in `issubclass(arg, <protocol>)`.
2033
2034    In most cases, this is verified by type.__subclasscheck__.
2035    Checking it again unnecessarily would slow down issubclass() checks,
2036    so, we don't perform this check unless we absolutely have to.
2037
2038    For various error paths, however,
2039    we want to ensure that *this* error message is shown to the user
2040    where relevant, rather than a typing.py-specific error message.
2041    """
2042    if not isinstance(arg, type):
2043        # Same error message as for issubclass(1, int).
2044        raise TypeError('issubclass() arg 1 must be a class')
2045
2046
2047class _ProtocolMeta(ABCMeta):
2048    # This metaclass is somewhat unfortunate,
2049    # but is necessary for several reasons...
2050    def __new__(mcls, name, bases, namespace, /, **kwargs):
2051        if name == "Protocol" and bases == (Generic,):
2052            pass
2053        elif Protocol in bases:
2054            for base in bases:
2055                if not (
2056                    base in {object, Generic}
2057                    or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
2058                    or (
2059                        issubclass(base, Generic)
2060                        and getattr(base, "_is_protocol", False)
2061                    )
2062                ):
2063                    raise TypeError(
2064                        f"Protocols can only inherit from other protocols, "
2065                        f"got {base!r}"
2066                    )
2067        return super().__new__(mcls, name, bases, namespace, **kwargs)
2068
2069    def __init__(cls, *args, **kwargs):
2070        super().__init__(*args, **kwargs)
2071        if getattr(cls, "_is_protocol", False):
2072            cls.__protocol_attrs__ = _get_protocol_attrs(cls)
2073
2074    def __subclasscheck__(cls, other):
2075        if cls is Protocol:
2076            return type.__subclasscheck__(cls, other)
2077        if (
2078            getattr(cls, '_is_protocol', False)
2079            and not _allow_reckless_class_checks()
2080        ):
2081            if not getattr(cls, '_is_runtime_protocol', False):
2082                _type_check_issubclass_arg_1(other)
2083                raise TypeError(
2084                    "Instance and class checks can only be used with "
2085                    "@runtime_checkable protocols"
2086                )
2087            if (
2088                # this attribute is set by @runtime_checkable:
2089                cls.__non_callable_proto_members__
2090                and cls.__dict__.get("__subclasshook__") is _proto_hook
2091            ):
2092                _type_check_issubclass_arg_1(other)
2093                non_method_attrs = sorted(cls.__non_callable_proto_members__)
2094                raise TypeError(
2095                    "Protocols with non-method members don't support issubclass()."
2096                    f" Non-method members: {str(non_method_attrs)[1:-1]}."
2097                )
2098        return _abc_subclasscheck(cls, other)
2099
2100    def __instancecheck__(cls, instance):
2101        # We need this method for situations where attributes are
2102        # assigned in __init__.
2103        if cls is Protocol:
2104            return type.__instancecheck__(cls, instance)
2105        if not getattr(cls, "_is_protocol", False):
2106            # i.e., it's a concrete subclass of a protocol
2107            return _abc_instancecheck(cls, instance)
2108
2109        if (
2110            not getattr(cls, '_is_runtime_protocol', False) and
2111            not _allow_reckless_class_checks()
2112        ):
2113            raise TypeError("Instance and class checks can only be used with"
2114                            " @runtime_checkable protocols")
2115
2116        if _abc_instancecheck(cls, instance):
2117            return True
2118
2119        getattr_static = _lazy_load_getattr_static()
2120        for attr in cls.__protocol_attrs__:
2121            try:
2122                val = getattr_static(instance, attr)
2123            except AttributeError:
2124                break
2125            # this attribute is set by @runtime_checkable:
2126            if val is None and attr not in cls.__non_callable_proto_members__:
2127                break
2128        else:
2129            return True
2130
2131        return False
2132
2133
2134@classmethod
2135def _proto_hook(cls, other):
2136    if not cls.__dict__.get('_is_protocol', False):
2137        return NotImplemented
2138
2139    for attr in cls.__protocol_attrs__:
2140        for base in other.__mro__:
2141            # Check if the members appears in the class dictionary...
2142            if attr in base.__dict__:
2143                if base.__dict__[attr] is None:
2144                    return NotImplemented
2145                break
2146
2147            # ...or in annotations, if it is a sub-protocol.
2148            annotations = getattr(base, '__annotations__', {})
2149            if (isinstance(annotations, collections.abc.Mapping) and
2150                    attr in annotations and
2151                    issubclass(other, Generic) and getattr(other, '_is_protocol', False)):
2152                break
2153        else:
2154            return NotImplemented
2155    return True
2156
2157
2158class Protocol(Generic, metaclass=_ProtocolMeta):
2159    """Base class for protocol classes.
2160
2161    Protocol classes are defined as::
2162
2163        class Proto(Protocol):
2164            def meth(self) -> int:
2165                ...
2166
2167    Such classes are primarily used with static type checkers that recognize
2168    structural subtyping (static duck-typing).
2169
2170    For example::
2171
2172        class C:
2173            def meth(self) -> int:
2174                return 0
2175
2176        def func(x: Proto) -> int:
2177            return x.meth()
2178
2179        func(C())  # Passes static type check
2180
2181    See PEP 544 for details. Protocol classes decorated with
2182    @typing.runtime_checkable act as simple-minded runtime protocols that check
2183    only the presence of given attributes, ignoring their type signatures.
2184    Protocol classes can be generic, they are defined as::
2185
2186        class GenProto[T](Protocol):
2187            def meth(self) -> T:
2188                ...
2189    """
2190
2191    __slots__ = ()
2192    _is_protocol = True
2193    _is_runtime_protocol = False
2194
2195    def __init_subclass__(cls, *args, **kwargs):
2196        super().__init_subclass__(*args, **kwargs)
2197
2198        # Determine if this is a protocol or a concrete subclass.
2199        if not cls.__dict__.get('_is_protocol', False):
2200            cls._is_protocol = any(b is Protocol for b in cls.__bases__)
2201
2202        # Set (or override) the protocol subclass hook.
2203        if '__subclasshook__' not in cls.__dict__:
2204            cls.__subclasshook__ = _proto_hook
2205
2206        # Prohibit instantiation for protocol classes
2207        if cls._is_protocol and cls.__init__ is Protocol.__init__:
2208            cls.__init__ = _no_init_or_replace_init
2209
2210
2211class _AnnotatedAlias(_NotIterable, _GenericAlias, _root=True):
2212    """Runtime representation of an annotated type.
2213
2214    At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
2215    with extra annotations. The alias behaves like a normal typing alias.
2216    Instantiating is the same as instantiating the underlying type; binding
2217    it to types is also the same.
2218
2219    The metadata itself is stored in a '__metadata__' attribute as a tuple.
2220    """
2221
2222    def __init__(self, origin, metadata):
2223        if isinstance(origin, _AnnotatedAlias):
2224            metadata = origin.__metadata__ + metadata
2225            origin = origin.__origin__
2226        super().__init__(origin, origin, name='Annotated')
2227        self.__metadata__ = metadata
2228
2229    def copy_with(self, params):
2230        assert len(params) == 1
2231        new_type = params[0]
2232        return _AnnotatedAlias(new_type, self.__metadata__)
2233
2234    def __repr__(self):
2235        return "typing.Annotated[{}, {}]".format(
2236            _type_repr(self.__origin__),
2237            ", ".join(repr(a) for a in self.__metadata__)
2238        )
2239
2240    def __reduce__(self):
2241        return operator.getitem, (
2242            Annotated, (self.__origin__,) + self.__metadata__
2243        )
2244
2245    def __eq__(self, other):
2246        if not isinstance(other, _AnnotatedAlias):
2247            return NotImplemented
2248        return (self.__origin__ == other.__origin__
2249                and self.__metadata__ == other.__metadata__)
2250
2251    def __hash__(self):
2252        return hash((self.__origin__, self.__metadata__))
2253
2254    def __getattr__(self, attr):
2255        if attr in {'__name__', '__qualname__'}:
2256            return 'Annotated'
2257        return super().__getattr__(attr)
2258
2259    def __mro_entries__(self, bases):
2260        return (self.__origin__,)
2261
2262
2263@_TypedCacheSpecialForm
2264@_tp_cache(typed=True)
2265def Annotated(self, *params):
2266    """Add context-specific metadata to a type.
2267
2268    Example: Annotated[int, runtime_check.Unsigned] indicates to the
2269    hypothetical runtime_check module that this type is an unsigned int.
2270    Every other consumer of this type can ignore this metadata and treat
2271    this type as int.
2272
2273    The first argument to Annotated must be a valid type.
2274
2275    Details:
2276
2277    - It's an error to call `Annotated` with less than two arguments.
2278    - Access the metadata via the ``__metadata__`` attribute::
2279
2280        assert Annotated[int, '$'].__metadata__ == ('$',)
2281
2282    - Nested Annotated types are flattened::
2283
2284        assert Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
2285
2286    - Instantiating an annotated type is equivalent to instantiating the
2287    underlying type::
2288
2289        assert Annotated[C, Ann1](5) == C(5)
2290
2291    - Annotated can be used as a generic type alias::
2292
2293        type Optimized[T] = Annotated[T, runtime.Optimize()]
2294        # type checker will treat Optimized[int]
2295        # as equivalent to Annotated[int, runtime.Optimize()]
2296
2297        type OptimizedList[T] = Annotated[list[T], runtime.Optimize()]
2298        # type checker will treat OptimizedList[int]
2299        # as equivalent to Annotated[list[int], runtime.Optimize()]
2300
2301    - Annotated cannot be used with an unpacked TypeVarTuple::
2302
2303        type Variadic[*Ts] = Annotated[*Ts, Ann1]  # NOT valid
2304
2305      This would be equivalent to::
2306
2307        Annotated[T1, T2, T3, ..., Ann1]
2308
2309      where T1, T2 etc. are TypeVars, which would be invalid, because
2310      only one type should be passed to Annotated.
2311    """
2312    if len(params) < 2:
2313        raise TypeError("Annotated[...] should be used "
2314                        "with at least two arguments (a type and an "
2315                        "annotation).")
2316    if _is_unpacked_typevartuple(params[0]):
2317        raise TypeError("Annotated[...] should not be used with an "
2318                        "unpacked TypeVarTuple")
2319    msg = "Annotated[t, ...]: t must be a type."
2320    origin = _type_check(params[0], msg, allow_special_forms=True)
2321    metadata = tuple(params[1:])
2322    return _AnnotatedAlias(origin, metadata)
2323
2324
2325def runtime_checkable(cls):
2326    """Mark a protocol class as a runtime protocol.
2327
2328    Such protocol can be used with isinstance() and issubclass().
2329    Raise TypeError if applied to a non-protocol class.
2330    This allows a simple-minded structural check very similar to
2331    one trick ponies in collections.abc such as Iterable.
2332
2333    For example::
2334
2335        @runtime_checkable
2336        class Closable(Protocol):
2337            def close(self): ...
2338
2339        assert isinstance(open('/some/file'), Closable)
2340
2341    Warning: this will check only the presence of the required methods,
2342    not their type signatures!
2343    """
2344    if not issubclass(cls, Generic) or not getattr(cls, '_is_protocol', False):
2345        raise TypeError('@runtime_checkable can be only applied to protocol classes,'
2346                        ' got %r' % cls)
2347    cls._is_runtime_protocol = True
2348    # PEP 544 prohibits using issubclass()
2349    # with protocols that have non-method members.
2350    # See gh-113320 for why we compute this attribute here,
2351    # rather than in `_ProtocolMeta.__init__`
2352    cls.__non_callable_proto_members__ = set()
2353    for attr in cls.__protocol_attrs__:
2354        try:
2355            is_callable = callable(getattr(cls, attr, None))
2356        except Exception as e:
2357            raise TypeError(
2358                f"Failed to determine whether protocol member {attr!r} "
2359                "is a method member"
2360            ) from e
2361        else:
2362            if not is_callable:
2363                cls.__non_callable_proto_members__.add(attr)
2364    return cls
2365
2366
2367def cast(typ, val):
2368    """Cast a value to a type.
2369
2370    This returns the value unchanged.  To the type checker this
2371    signals that the return value has the designated type, but at
2372    runtime we intentionally don't check anything (we want this
2373    to be as fast as possible).
2374    """
2375    return val
2376
2377
2378def assert_type(val, typ, /):
2379    """Ask a static type checker to confirm that the value is of the given type.
2380
2381    At runtime this does nothing: it returns the first argument unchanged with no
2382    checks or side effects, no matter the actual type of the argument.
2383
2384    When a static type checker encounters a call to assert_type(), it
2385    emits an error if the value is not of the specified type::
2386
2387        def greet(name: str) -> None:
2388            assert_type(name, str)  # OK
2389            assert_type(name, int)  # type checker error
2390    """
2391    return val
2392
2393
2394_allowed_types = (types.FunctionType, types.BuiltinFunctionType,
2395                  types.MethodType, types.ModuleType,
2396                  WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
2397
2398
2399def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
2400    """Return type hints for an object.
2401
2402    This is often the same as obj.__annotations__, but it handles
2403    forward references encoded as string literals and recursively replaces all
2404    'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
2405
2406    The argument may be a module, class, method, or function. The annotations
2407    are returned as a dictionary. For classes, annotations include also
2408    inherited members.
2409
2410    TypeError is raised if the argument is not of a type that can contain
2411    annotations, and an empty dictionary is returned if no annotations are
2412    present.
2413
2414    BEWARE -- the behavior of globalns and localns is counterintuitive
2415    (unless you are familiar with how eval() and exec() work).  The
2416    search order is locals first, then globals.
2417
2418    - If no dict arguments are passed, an attempt is made to use the
2419      globals from obj (or the respective module's globals for classes),
2420      and these are also used as the locals.  If the object does not appear
2421      to have globals, an empty dictionary is used.  For classes, the search
2422      order is globals first then locals.
2423
2424    - If one dict argument is passed, it is used for both globals and
2425      locals.
2426
2427    - If two dict arguments are passed, they specify globals and
2428      locals, respectively.
2429    """
2430    if getattr(obj, '__no_type_check__', None):
2431        return {}
2432    # Classes require a special treatment.
2433    if isinstance(obj, type):
2434        hints = {}
2435        for base in reversed(obj.__mro__):
2436            if globalns is None:
2437                base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {})
2438            else:
2439                base_globals = globalns
2440            ann = base.__dict__.get('__annotations__', {})
2441            if isinstance(ann, types.GetSetDescriptorType):
2442                ann = {}
2443            base_locals = dict(vars(base)) if localns is None else localns
2444            if localns is None and globalns is None:
2445                # This is surprising, but required.  Before Python 3.10,
2446                # get_type_hints only evaluated the globalns of
2447                # a class.  To maintain backwards compatibility, we reverse
2448                # the globalns and localns order so that eval() looks into
2449                # *base_globals* first rather than *base_locals*.
2450                # This only affects ForwardRefs.
2451                base_globals, base_locals = base_locals, base_globals
2452            for name, value in ann.items():
2453                if value is None:
2454                    value = type(None)
2455                if isinstance(value, str):
2456                    value = ForwardRef(value, is_argument=False, is_class=True)
2457                value = _eval_type(value, base_globals, base_locals, base.__type_params__)
2458                hints[name] = value
2459        return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
2460
2461    if globalns is None:
2462        if isinstance(obj, types.ModuleType):
2463            globalns = obj.__dict__
2464        else:
2465            nsobj = obj
2466            # Find globalns for the unwrapped object.
2467            while hasattr(nsobj, '__wrapped__'):
2468                nsobj = nsobj.__wrapped__
2469            globalns = getattr(nsobj, '__globals__', {})
2470        if localns is None:
2471            localns = globalns
2472    elif localns is None:
2473        localns = globalns
2474    hints = getattr(obj, '__annotations__', None)
2475    if hints is None:
2476        # Return empty annotations for something that _could_ have them.
2477        if isinstance(obj, _allowed_types):
2478            return {}
2479        else:
2480            raise TypeError('{!r} is not a module, class, method, '
2481                            'or function.'.format(obj))
2482    hints = dict(hints)
2483    type_params = getattr(obj, "__type_params__", ())
2484    for name, value in hints.items():
2485        if value is None:
2486            value = type(None)
2487        if isinstance(value, str):
2488            # class-level forward refs were handled above, this must be either
2489            # a module-level annotation or a function argument annotation
2490            value = ForwardRef(
2491                value,
2492                is_argument=not isinstance(obj, types.ModuleType),
2493                is_class=False,
2494            )
2495        hints[name] = _eval_type(value, globalns, localns, type_params)
2496    return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
2497
2498
2499def _strip_annotations(t):
2500    """Strip the annotations from a given type."""
2501    if isinstance(t, _AnnotatedAlias):
2502        return _strip_annotations(t.__origin__)
2503    if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
2504        return _strip_annotations(t.__args__[0])
2505    if isinstance(t, _GenericAlias):
2506        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
2507        if stripped_args == t.__args__:
2508            return t
2509        return t.copy_with(stripped_args)
2510    if isinstance(t, GenericAlias):
2511        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
2512        if stripped_args == t.__args__:
2513            return t
2514        return GenericAlias(t.__origin__, stripped_args)
2515    if isinstance(t, types.UnionType):
2516        stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
2517        if stripped_args == t.__args__:
2518            return t
2519        return functools.reduce(operator.or_, stripped_args)
2520
2521    return t
2522
2523
2524def get_origin(tp):
2525    """Get the unsubscripted version of a type.
2526
2527    This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar,
2528    Annotated, and others. Return None for unsupported types.
2529
2530    Examples::
2531
2532        >>> P = ParamSpec('P')
2533        >>> assert get_origin(Literal[42]) is Literal
2534        >>> assert get_origin(int) is None
2535        >>> assert get_origin(ClassVar[int]) is ClassVar
2536        >>> assert get_origin(Generic) is Generic
2537        >>> assert get_origin(Generic[T]) is Generic
2538        >>> assert get_origin(Union[T, int]) is Union
2539        >>> assert get_origin(List[Tuple[T, T]][int]) is list
2540        >>> assert get_origin(P.args) is P
2541    """
2542    if isinstance(tp, _AnnotatedAlias):
2543        return Annotated
2544    if isinstance(tp, (_BaseGenericAlias, GenericAlias,
2545                       ParamSpecArgs, ParamSpecKwargs)):
2546        return tp.__origin__
2547    if tp is Generic:
2548        return Generic
2549    if isinstance(tp, types.UnionType):
2550        return types.UnionType
2551    return None
2552
2553
2554def get_args(tp):
2555    """Get type arguments with all substitutions performed.
2556
2557    For unions, basic simplifications used by Union constructor are performed.
2558
2559    Examples::
2560
2561        >>> T = TypeVar('T')
2562        >>> assert get_args(Dict[str, int]) == (str, int)
2563        >>> assert get_args(int) == ()
2564        >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str)
2565        >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
2566        >>> assert get_args(Callable[[], T][int]) == ([], int)
2567    """
2568    if isinstance(tp, _AnnotatedAlias):
2569        return (tp.__origin__,) + tp.__metadata__
2570    if isinstance(tp, (_GenericAlias, GenericAlias)):
2571        res = tp.__args__
2572        if _should_unflatten_callable_args(tp, res):
2573            res = (list(res[:-1]), res[-1])
2574        return res
2575    if isinstance(tp, types.UnionType):
2576        return tp.__args__
2577    return ()
2578
2579
2580def is_typeddict(tp):
2581    """Check if an annotation is a TypedDict class.
2582
2583    For example::
2584
2585        >>> from typing import TypedDict
2586        >>> class Film(TypedDict):
2587        ...     title: str
2588        ...     year: int
2589        ...
2590        >>> is_typeddict(Film)
2591        True
2592        >>> is_typeddict(dict)
2593        False
2594    """
2595    return isinstance(tp, _TypedDictMeta)
2596
2597
2598_ASSERT_NEVER_REPR_MAX_LENGTH = 100
2599
2600
2601def assert_never(arg: Never, /) -> Never:
2602    """Statically assert that a line of code is unreachable.
2603
2604    Example::
2605
2606        def int_or_str(arg: int | str) -> None:
2607            match arg:
2608                case int():
2609                    print("It's an int")
2610                case str():
2611                    print("It's a str")
2612                case _:
2613                    assert_never(arg)
2614
2615    If a type checker finds that a call to assert_never() is
2616    reachable, it will emit an error.
2617
2618    At runtime, this throws an exception when called.
2619    """
2620    value = repr(arg)
2621    if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
2622        value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
2623    raise AssertionError(f"Expected code to be unreachable, but got: {value}")
2624
2625
2626def no_type_check(arg):
2627    """Decorator to indicate that annotations are not type hints.
2628
2629    The argument must be a class or function; if it is a class, it
2630    applies recursively to all methods and classes defined in that class
2631    (but not to methods defined in its superclasses or subclasses).
2632
2633    This mutates the function(s) or class(es) in place.
2634    """
2635    if isinstance(arg, type):
2636        for key in dir(arg):
2637            obj = getattr(arg, key)
2638            if (
2639                not hasattr(obj, '__qualname__')
2640                or obj.__qualname__ != f'{arg.__qualname__}.{obj.__name__}'
2641                or getattr(obj, '__module__', None) != arg.__module__
2642            ):
2643                # We only modify objects that are defined in this type directly.
2644                # If classes / methods are nested in multiple layers,
2645                # we will modify them when processing their direct holders.
2646                continue
2647            # Instance, class, and static methods:
2648            if isinstance(obj, types.FunctionType):
2649                obj.__no_type_check__ = True
2650            if isinstance(obj, types.MethodType):
2651                obj.__func__.__no_type_check__ = True
2652            # Nested types:
2653            if isinstance(obj, type):
2654                no_type_check(obj)
2655    try:
2656        arg.__no_type_check__ = True
2657    except TypeError:  # built-in classes
2658        pass
2659    return arg
2660
2661
2662def no_type_check_decorator(decorator):
2663    """Decorator to give another decorator the @no_type_check effect.
2664
2665    This wraps the decorator with something that wraps the decorated
2666    function in @no_type_check.
2667    """
2668    import warnings
2669    warnings._deprecated("typing.no_type_check_decorator", remove=(3, 15))
2670    @functools.wraps(decorator)
2671    def wrapped_decorator(*args, **kwds):
2672        func = decorator(*args, **kwds)
2673        func = no_type_check(func)
2674        return func
2675
2676    return wrapped_decorator
2677
2678
2679def _overload_dummy(*args, **kwds):
2680    """Helper for @overload to raise when called."""
2681    raise NotImplementedError(
2682        "You should not call an overloaded function. "
2683        "A series of @overload-decorated functions "
2684        "outside a stub module should always be followed "
2685        "by an implementation that is not @overload-ed.")
2686
2687
2688# {module: {qualname: {firstlineno: func}}}
2689_overload_registry = defaultdict(functools.partial(defaultdict, dict))
2690
2691
2692def overload(func):
2693    """Decorator for overloaded functions/methods.
2694
2695    In a stub file, place two or more stub definitions for the same
2696    function in a row, each decorated with @overload.
2697
2698    For example::
2699
2700        @overload
2701        def utf8(value: None) -> None: ...
2702        @overload
2703        def utf8(value: bytes) -> bytes: ...
2704        @overload
2705        def utf8(value: str) -> bytes: ...
2706
2707    In a non-stub file (i.e. a regular .py file), do the same but
2708    follow it with an implementation.  The implementation should *not*
2709    be decorated with @overload::
2710
2711        @overload
2712        def utf8(value: None) -> None: ...
2713        @overload
2714        def utf8(value: bytes) -> bytes: ...
2715        @overload
2716        def utf8(value: str) -> bytes: ...
2717        def utf8(value):
2718            ...  # implementation goes here
2719
2720    The overloads for a function can be retrieved at runtime using the
2721    get_overloads() function.
2722    """
2723    # classmethod and staticmethod
2724    f = getattr(func, "__func__", func)
2725    try:
2726        _overload_registry[f.__module__][f.__qualname__][f.__code__.co_firstlineno] = func
2727    except AttributeError:
2728        # Not a normal function; ignore.
2729        pass
2730    return _overload_dummy
2731
2732
2733def get_overloads(func):
2734    """Return all defined overloads for *func* as a sequence."""
2735    # classmethod and staticmethod
2736    f = getattr(func, "__func__", func)
2737    if f.__module__ not in _overload_registry:
2738        return []
2739    mod_dict = _overload_registry[f.__module__]
2740    if f.__qualname__ not in mod_dict:
2741        return []
2742    return list(mod_dict[f.__qualname__].values())
2743
2744
2745def clear_overloads():
2746    """Clear all overloads in the registry."""
2747    _overload_registry.clear()
2748
2749
2750def final(f):
2751    """Decorator to indicate final methods and final classes.
2752
2753    Use this decorator to indicate to type checkers that the decorated
2754    method cannot be overridden, and decorated class cannot be subclassed.
2755
2756    For example::
2757
2758        class Base:
2759            @final
2760            def done(self) -> None:
2761                ...
2762        class Sub(Base):
2763            def done(self) -> None:  # Error reported by type checker
2764                ...
2765
2766        @final
2767        class Leaf:
2768            ...
2769        class Other(Leaf):  # Error reported by type checker
2770            ...
2771
2772    There is no runtime checking of these properties. The decorator
2773    attempts to set the ``__final__`` attribute to ``True`` on the decorated
2774    object to allow runtime introspection.
2775    """
2776    try:
2777        f.__final__ = True
2778    except (AttributeError, TypeError):
2779        # Skip the attribute silently if it is not writable.
2780        # AttributeError happens if the object has __slots__ or a
2781        # read-only property, TypeError if it's a builtin class.
2782        pass
2783    return f
2784
2785
2786# Some unconstrained type variables.  These were initially used by the container types.
2787# They were never meant for export and are now unused, but we keep them around to
2788# avoid breaking compatibility with users who import them.
2789T = TypeVar('T')  # Any type.
2790KT = TypeVar('KT')  # Key type.
2791VT = TypeVar('VT')  # Value type.
2792T_co = TypeVar('T_co', covariant=True)  # Any type covariant containers.
2793V_co = TypeVar('V_co', covariant=True)  # Any type covariant containers.
2794VT_co = TypeVar('VT_co', covariant=True)  # Value type covariant containers.
2795T_contra = TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
2796# Internal type variable used for Type[].
2797CT_co = TypeVar('CT_co', covariant=True, bound=type)
2798
2799
2800# A useful type variable with constraints.  This represents string types.
2801# (This one *is* for export!)
2802AnyStr = TypeVar('AnyStr', bytes, str)
2803
2804
2805# Various ABCs mimicking those in collections.abc.
2806_alias = _SpecialGenericAlias
2807
2808Hashable = _alias(collections.abc.Hashable, 0)  # Not generic.
2809Awaitable = _alias(collections.abc.Awaitable, 1)
2810Coroutine = _alias(collections.abc.Coroutine, 3)
2811AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
2812AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
2813Iterable = _alias(collections.abc.Iterable, 1)
2814Iterator = _alias(collections.abc.Iterator, 1)
2815Reversible = _alias(collections.abc.Reversible, 1)
2816Sized = _alias(collections.abc.Sized, 0)  # Not generic.
2817Container = _alias(collections.abc.Container, 1)
2818Collection = _alias(collections.abc.Collection, 1)
2819Callable = _CallableType(collections.abc.Callable, 2)
2820Callable.__doc__ = \
2821    """Deprecated alias to collections.abc.Callable.
2822
2823    Callable[[int], str] signifies a function that takes a single
2824    parameter of type int and returns a str.
2825
2826    The subscription syntax must always be used with exactly two
2827    values: the argument list and the return type.
2828    The argument list must be a list of types, a ParamSpec,
2829    Concatenate or ellipsis. The return type must be a single type.
2830
2831    There is no syntax to indicate optional or keyword arguments;
2832    such function types are rarely used as callback types.
2833    """
2834AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
2835MutableSet = _alias(collections.abc.MutableSet, 1)
2836# NOTE: Mapping is only covariant in the value type.
2837Mapping = _alias(collections.abc.Mapping, 2)
2838MutableMapping = _alias(collections.abc.MutableMapping, 2)
2839Sequence = _alias(collections.abc.Sequence, 1)
2840MutableSequence = _alias(collections.abc.MutableSequence, 1)
2841ByteString = _DeprecatedGenericAlias(
2842    collections.abc.ByteString, 0, removal_version=(3, 14)  # Not generic.
2843)
2844# Tuple accepts variable number of parameters.
2845Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
2846Tuple.__doc__ = \
2847    """Deprecated alias to builtins.tuple.
2848
2849    Tuple[X, Y] is the cross-product type of X and Y.
2850
2851    Example: Tuple[T1, T2] is a tuple of two elements corresponding
2852    to type variables T1 and T2.  Tuple[int, float, str] is a tuple
2853    of an int, a float and a string.
2854
2855    To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
2856    """
2857List = _alias(list, 1, inst=False, name='List')
2858Deque = _alias(collections.deque, 1, name='Deque')
2859Set = _alias(set, 1, inst=False, name='Set')
2860FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
2861MappingView = _alias(collections.abc.MappingView, 1)
2862KeysView = _alias(collections.abc.KeysView, 1)
2863ItemsView = _alias(collections.abc.ItemsView, 2)
2864ValuesView = _alias(collections.abc.ValuesView, 1)
2865Dict = _alias(dict, 2, inst=False, name='Dict')
2866DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
2867OrderedDict = _alias(collections.OrderedDict, 2)
2868Counter = _alias(collections.Counter, 1)
2869ChainMap = _alias(collections.ChainMap, 2)
2870Generator = _alias(collections.abc.Generator, 3, defaults=(types.NoneType, types.NoneType))
2871AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2, defaults=(types.NoneType,))
2872Type = _alias(type, 1, inst=False, name='Type')
2873Type.__doc__ = \
2874    """Deprecated alias to builtins.type.
2875
2876    builtins.type or typing.Type can be used to annotate class objects.
2877    For example, suppose we have the following classes::
2878
2879        class User: ...  # Abstract base for User classes
2880        class BasicUser(User): ...
2881        class ProUser(User): ...
2882        class TeamUser(User): ...
2883
2884    And a function that takes a class argument that's a subclass of
2885    User and returns an instance of the corresponding class::
2886
2887        def new_user[U](user_class: Type[U]) -> U:
2888            user = user_class()
2889            # (Here we could write the user object to a database)
2890            return user
2891
2892        joe = new_user(BasicUser)
2893
2894    At this point the type checker knows that joe has type BasicUser.
2895    """
2896
2897
2898@runtime_checkable
2899class SupportsInt(Protocol):
2900    """An ABC with one abstract method __int__."""
2901
2902    __slots__ = ()
2903
2904    @abstractmethod
2905    def __int__(self) -> int:
2906        pass
2907
2908
2909@runtime_checkable
2910class SupportsFloat(Protocol):
2911    """An ABC with one abstract method __float__."""
2912
2913    __slots__ = ()
2914
2915    @abstractmethod
2916    def __float__(self) -> float:
2917        pass
2918
2919
2920@runtime_checkable
2921class SupportsComplex(Protocol):
2922    """An ABC with one abstract method __complex__."""
2923
2924    __slots__ = ()
2925
2926    @abstractmethod
2927    def __complex__(self) -> complex:
2928        pass
2929
2930
2931@runtime_checkable
2932class SupportsBytes(Protocol):
2933    """An ABC with one abstract method __bytes__."""
2934
2935    __slots__ = ()
2936
2937    @abstractmethod
2938    def __bytes__(self) -> bytes:
2939        pass
2940
2941
2942@runtime_checkable
2943class SupportsIndex(Protocol):
2944    """An ABC with one abstract method __index__."""
2945
2946    __slots__ = ()
2947
2948    @abstractmethod
2949    def __index__(self) -> int:
2950        pass
2951
2952
2953@runtime_checkable
2954class SupportsAbs[T](Protocol):
2955    """An ABC with one abstract method __abs__ that is covariant in its return type."""
2956
2957    __slots__ = ()
2958
2959    @abstractmethod
2960    def __abs__(self) -> T:
2961        pass
2962
2963
2964@runtime_checkable
2965class SupportsRound[T](Protocol):
2966    """An ABC with one abstract method __round__ that is covariant in its return type."""
2967
2968    __slots__ = ()
2969
2970    @abstractmethod
2971    def __round__(self, ndigits: int = 0) -> T:
2972        pass
2973
2974
2975def _make_nmtuple(name, types, module, defaults = ()):
2976    fields = [n for n, t in types]
2977    types = {n: _type_check(t, f"field {n} annotation must be a type")
2978             for n, t in types}
2979    nm_tpl = collections.namedtuple(name, fields,
2980                                    defaults=defaults, module=module)
2981    nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
2982    return nm_tpl
2983
2984
2985# attributes prohibited to set in NamedTuple class syntax
2986_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
2987                         '_fields', '_field_defaults',
2988                         '_make', '_replace', '_asdict', '_source'})
2989
2990_special = frozenset({'__module__', '__name__', '__annotations__'})
2991
2992
2993class NamedTupleMeta(type):
2994    def __new__(cls, typename, bases, ns):
2995        assert _NamedTuple in bases
2996        for base in bases:
2997            if base is not _NamedTuple and base is not Generic:
2998                raise TypeError(
2999                    'can only inherit from a NamedTuple type and Generic')
3000        bases = tuple(tuple if base is _NamedTuple else base for base in bases)
3001        types = ns.get('__annotations__', {})
3002        default_names = []
3003        for field_name in types:
3004            if field_name in ns:
3005                default_names.append(field_name)
3006            elif default_names:
3007                raise TypeError(f"Non-default namedtuple field {field_name} "
3008                                f"cannot follow default field"
3009                                f"{'s' if len(default_names) > 1 else ''} "
3010                                f"{', '.join(default_names)}")
3011        nm_tpl = _make_nmtuple(typename, types.items(),
3012                               defaults=[ns[n] for n in default_names],
3013                               module=ns['__module__'])
3014        nm_tpl.__bases__ = bases
3015        if Generic in bases:
3016            class_getitem = _generic_class_getitem
3017            nm_tpl.__class_getitem__ = classmethod(class_getitem)
3018        # update from user namespace without overriding special namedtuple attributes
3019        for key, val in ns.items():
3020            if key in _prohibited:
3021                raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
3022            elif key not in _special:
3023                if key not in nm_tpl._fields:
3024                    setattr(nm_tpl, key, val)
3025                try:
3026                    set_name = type(val).__set_name__
3027                except AttributeError:
3028                    pass
3029                else:
3030                    try:
3031                        set_name(val, nm_tpl, key)
3032                    except BaseException as e:
3033                        e.add_note(
3034                            f"Error calling __set_name__ on {type(val).__name__!r} "
3035                            f"instance {key!r} in {typename!r}"
3036                        )
3037                        raise
3038
3039        if Generic in bases:
3040            nm_tpl.__init_subclass__()
3041        return nm_tpl
3042
3043
3044def NamedTuple(typename, fields=_sentinel, /, **kwargs):
3045    """Typed version of namedtuple.
3046
3047    Usage::
3048
3049        class Employee(NamedTuple):
3050            name: str
3051            id: int
3052
3053    This is equivalent to::
3054
3055        Employee = collections.namedtuple('Employee', ['name', 'id'])
3056
3057    The resulting class has an extra __annotations__ attribute, giving a
3058    dict that maps field names to types.  (The field names are also in
3059    the _fields attribute, which is part of the namedtuple API.)
3060    An alternative equivalent functional syntax is also accepted::
3061
3062        Employee = NamedTuple('Employee', [('name', str), ('id', int)])
3063    """
3064    if fields is _sentinel:
3065        if kwargs:
3066            deprecated_thing = "Creating NamedTuple classes using keyword arguments"
3067            deprecation_msg = (
3068                "{name} is deprecated and will be disallowed in Python {remove}. "
3069                "Use the class-based or functional syntax instead."
3070            )
3071        else:
3072            deprecated_thing = "Failing to pass a value for the 'fields' parameter"
3073            example = f"`{typename} = NamedTuple({typename!r}, [])`"
3074            deprecation_msg = (
3075                "{name} is deprecated and will be disallowed in Python {remove}. "
3076                "To create a NamedTuple class with 0 fields "
3077                "using the functional syntax, "
3078                "pass an empty list, e.g. "
3079            ) + example + "."
3080    elif fields is None:
3081        if kwargs:
3082            raise TypeError(
3083                "Cannot pass `None` as the 'fields' parameter "
3084                "and also specify fields using keyword arguments"
3085            )
3086        else:
3087            deprecated_thing = "Passing `None` as the 'fields' parameter"
3088            example = f"`{typename} = NamedTuple({typename!r}, [])`"
3089            deprecation_msg = (
3090                "{name} is deprecated and will be disallowed in Python {remove}. "
3091                "To create a NamedTuple class with 0 fields "
3092                "using the functional syntax, "
3093                "pass an empty list, e.g. "
3094            ) + example + "."
3095    elif kwargs:
3096        raise TypeError("Either list of fields or keywords"
3097                        " can be provided to NamedTuple, not both")
3098    if fields is _sentinel or fields is None:
3099        import warnings
3100        warnings._deprecated(deprecated_thing, message=deprecation_msg, remove=(3, 15))
3101        fields = kwargs.items()
3102    nt = _make_nmtuple(typename, fields, module=_caller())
3103    nt.__orig_bases__ = (NamedTuple,)
3104    return nt
3105
3106_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
3107
3108def _namedtuple_mro_entries(bases):
3109    assert NamedTuple in bases
3110    return (_NamedTuple,)
3111
3112NamedTuple.__mro_entries__ = _namedtuple_mro_entries
3113
3114
3115def _get_typeddict_qualifiers(annotation_type):
3116    while True:
3117        annotation_origin = get_origin(annotation_type)
3118        if annotation_origin is Annotated:
3119            annotation_args = get_args(annotation_type)
3120            if annotation_args:
3121                annotation_type = annotation_args[0]
3122            else:
3123                break
3124        elif annotation_origin is Required:
3125            yield Required
3126            (annotation_type,) = get_args(annotation_type)
3127        elif annotation_origin is NotRequired:
3128            yield NotRequired
3129            (annotation_type,) = get_args(annotation_type)
3130        elif annotation_origin is ReadOnly:
3131            yield ReadOnly
3132            (annotation_type,) = get_args(annotation_type)
3133        else:
3134            break
3135
3136
3137class _TypedDictMeta(type):
3138    def __new__(cls, name, bases, ns, total=True):
3139        """Create a new typed dict class object.
3140
3141        This method is called when TypedDict is subclassed,
3142        or when TypedDict is instantiated. This way
3143        TypedDict supports all three syntax forms described in its docstring.
3144        Subclasses and instances of TypedDict return actual dictionaries.
3145        """
3146        for base in bases:
3147            if type(base) is not _TypedDictMeta and base is not Generic:
3148                raise TypeError('cannot inherit from both a TypedDict type '
3149                                'and a non-TypedDict base class')
3150
3151        if any(issubclass(b, Generic) for b in bases):
3152            generic_base = (Generic,)
3153        else:
3154            generic_base = ()
3155
3156        tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns)
3157
3158        if not hasattr(tp_dict, '__orig_bases__'):
3159            tp_dict.__orig_bases__ = bases
3160
3161        annotations = {}
3162        own_annotations = ns.get('__annotations__', {})
3163        msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
3164        own_annotations = {
3165            n: _type_check(tp, msg, module=tp_dict.__module__)
3166            for n, tp in own_annotations.items()
3167        }
3168        required_keys = set()
3169        optional_keys = set()
3170        readonly_keys = set()
3171        mutable_keys = set()
3172
3173        for base in bases:
3174            annotations.update(base.__dict__.get('__annotations__', {}))
3175
3176            base_required = base.__dict__.get('__required_keys__', set())
3177            required_keys |= base_required
3178            optional_keys -= base_required
3179
3180            base_optional = base.__dict__.get('__optional_keys__', set())
3181            required_keys -= base_optional
3182            optional_keys |= base_optional
3183
3184            readonly_keys.update(base.__dict__.get('__readonly_keys__', ()))
3185            mutable_keys.update(base.__dict__.get('__mutable_keys__', ()))
3186
3187        annotations.update(own_annotations)
3188        for annotation_key, annotation_type in own_annotations.items():
3189            qualifiers = set(_get_typeddict_qualifiers(annotation_type))
3190            if Required in qualifiers:
3191                is_required = True
3192            elif NotRequired in qualifiers:
3193                is_required = False
3194            else:
3195                is_required = total
3196
3197            if is_required:
3198                required_keys.add(annotation_key)
3199                optional_keys.discard(annotation_key)
3200            else:
3201                optional_keys.add(annotation_key)
3202                required_keys.discard(annotation_key)
3203
3204            if ReadOnly in qualifiers:
3205                if annotation_key in mutable_keys:
3206                    raise TypeError(
3207                        f"Cannot override mutable key {annotation_key!r}"
3208                        " with read-only key"
3209                    )
3210                readonly_keys.add(annotation_key)
3211            else:
3212                mutable_keys.add(annotation_key)
3213                readonly_keys.discard(annotation_key)
3214
3215        assert required_keys.isdisjoint(optional_keys), (
3216            f"Required keys overlap with optional keys in {name}:"
3217            f" {required_keys=}, {optional_keys=}"
3218        )
3219        tp_dict.__annotations__ = annotations
3220        tp_dict.__required_keys__ = frozenset(required_keys)
3221        tp_dict.__optional_keys__ = frozenset(optional_keys)
3222        tp_dict.__readonly_keys__ = frozenset(readonly_keys)
3223        tp_dict.__mutable_keys__ = frozenset(mutable_keys)
3224        tp_dict.__total__ = total
3225        return tp_dict
3226
3227    __call__ = dict  # static method
3228
3229    def __subclasscheck__(cls, other):
3230        # Typed dicts are only for static structural subtyping.
3231        raise TypeError('TypedDict does not support instance and class checks')
3232
3233    __instancecheck__ = __subclasscheck__
3234
3235
3236def TypedDict(typename, fields=_sentinel, /, *, total=True):
3237    """A simple typed namespace. At runtime it is equivalent to a plain dict.
3238
3239    TypedDict creates a dictionary type such that a type checker will expect all
3240    instances to have a certain set of keys, where each key is
3241    associated with a value of a consistent type. This expectation
3242    is not checked at runtime.
3243
3244    Usage::
3245
3246        >>> class Point2D(TypedDict):
3247        ...     x: int
3248        ...     y: int
3249        ...     label: str
3250        ...
3251        >>> a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
3252        >>> b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
3253        >>> Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
3254        True
3255
3256    The type info can be accessed via the Point2D.__annotations__ dict, and
3257    the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
3258    TypedDict supports an additional equivalent form::
3259
3260        Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
3261
3262    By default, all keys must be present in a TypedDict. It is possible
3263    to override this by specifying totality::
3264
3265        class Point2D(TypedDict, total=False):
3266            x: int
3267            y: int
3268
3269    This means that a Point2D TypedDict can have any of the keys omitted. A type
3270    checker is only expected to support a literal False or True as the value of
3271    the total argument. True is the default, and makes all items defined in the
3272    class body be required.
3273
3274    The Required and NotRequired special forms can also be used to mark
3275    individual keys as being required or not required::
3276
3277        class Point2D(TypedDict):
3278            x: int               # the "x" key must always be present (Required is the default)
3279            y: NotRequired[int]  # the "y" key can be omitted
3280
3281    See PEP 655 for more details on Required and NotRequired.
3282
3283    The ReadOnly special form can be used
3284    to mark individual keys as immutable for type checkers::
3285
3286        class DatabaseUser(TypedDict):
3287            id: ReadOnly[int]  # the "id" key must not be modified
3288            username: str      # the "username" key can be changed
3289
3290    """
3291    if fields is _sentinel or fields is None:
3292        import warnings
3293
3294        if fields is _sentinel:
3295            deprecated_thing = "Failing to pass a value for the 'fields' parameter"
3296        else:
3297            deprecated_thing = "Passing `None` as the 'fields' parameter"
3298
3299        example = f"`{typename} = TypedDict({typename!r}, {{{{}}}})`"
3300        deprecation_msg = (
3301            "{name} is deprecated and will be disallowed in Python {remove}. "
3302            "To create a TypedDict class with 0 fields "
3303            "using the functional syntax, "
3304            "pass an empty dictionary, e.g. "
3305        ) + example + "."
3306        warnings._deprecated(deprecated_thing, message=deprecation_msg, remove=(3, 15))
3307        fields = {}
3308
3309    ns = {'__annotations__': dict(fields)}
3310    module = _caller()
3311    if module is not None:
3312        # Setting correct module is necessary to make typed dict classes pickleable.
3313        ns['__module__'] = module
3314
3315    td = _TypedDictMeta(typename, (), ns, total=total)
3316    td.__orig_bases__ = (TypedDict,)
3317    return td
3318
3319_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
3320TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
3321
3322
3323@_SpecialForm
3324def Required(self, parameters):
3325    """Special typing construct to mark a TypedDict key as required.
3326
3327    This is mainly useful for total=False TypedDicts.
3328
3329    For example::
3330
3331        class Movie(TypedDict, total=False):
3332            title: Required[str]
3333            year: int
3334
3335        m = Movie(
3336            title='The Matrix',  # typechecker error if key is omitted
3337            year=1999,
3338        )
3339
3340    There is no runtime checking that a required key is actually provided
3341    when instantiating a related TypedDict.
3342    """
3343    item = _type_check(parameters, f'{self._name} accepts only a single type.')
3344    return _GenericAlias(self, (item,))
3345
3346
3347@_SpecialForm
3348def NotRequired(self, parameters):
3349    """Special typing construct to mark a TypedDict key as potentially missing.
3350
3351    For example::
3352
3353        class Movie(TypedDict):
3354            title: str
3355            year: NotRequired[int]
3356
3357        m = Movie(
3358            title='The Matrix',  # typechecker error if key is omitted
3359            year=1999,
3360        )
3361    """
3362    item = _type_check(parameters, f'{self._name} accepts only a single type.')
3363    return _GenericAlias(self, (item,))
3364
3365
3366@_SpecialForm
3367def ReadOnly(self, parameters):
3368    """A special typing construct to mark an item of a TypedDict as read-only.
3369
3370    For example::
3371
3372        class Movie(TypedDict):
3373            title: ReadOnly[str]
3374            year: int
3375
3376        def mutate_movie(m: Movie) -> None:
3377            m["year"] = 1992  # allowed
3378            m["title"] = "The Matrix"  # typechecker error
3379
3380    There is no runtime checking for this property.
3381    """
3382    item = _type_check(parameters, f'{self._name} accepts only a single type.')
3383    return _GenericAlias(self, (item,))
3384
3385
3386class NewType:
3387    """NewType creates simple unique types with almost zero runtime overhead.
3388
3389    NewType(name, tp) is considered a subtype of tp
3390    by static type checkers. At runtime, NewType(name, tp) returns
3391    a dummy callable that simply returns its argument.
3392
3393    Usage::
3394
3395        UserId = NewType('UserId', int)
3396
3397        def name_by_id(user_id: UserId) -> str:
3398            ...
3399
3400        UserId('user')          # Fails type check
3401
3402        name_by_id(42)          # Fails type check
3403        name_by_id(UserId(42))  # OK
3404
3405        num = UserId(5) + 1     # type: int
3406    """
3407
3408    __call__ = _idfunc
3409
3410    def __init__(self, name, tp):
3411        self.__qualname__ = name
3412        if '.' in name:
3413            name = name.rpartition('.')[-1]
3414        self.__name__ = name
3415        self.__supertype__ = tp
3416        def_mod = _caller()
3417        if def_mod != 'typing':
3418            self.__module__ = def_mod
3419
3420    def __mro_entries__(self, bases):
3421        # We defined __mro_entries__ to get a better error message
3422        # if a user attempts to subclass a NewType instance. bpo-46170
3423        superclass_name = self.__name__
3424
3425        class Dummy:
3426            def __init_subclass__(cls):
3427                subclass_name = cls.__name__
3428                raise TypeError(
3429                    f"Cannot subclass an instance of NewType. Perhaps you were looking for: "
3430                    f"`{subclass_name} = NewType({subclass_name!r}, {superclass_name})`"
3431                )
3432
3433        return (Dummy,)
3434
3435    def __repr__(self):
3436        return f'{self.__module__}.{self.__qualname__}'
3437
3438    def __reduce__(self):
3439        return self.__qualname__
3440
3441    def __or__(self, other):
3442        return Union[self, other]
3443
3444    def __ror__(self, other):
3445        return Union[other, self]
3446
3447
3448# Python-version-specific alias (Python 2: unicode; Python 3: str)
3449Text = str
3450
3451
3452# Constant that's True when type checking, but False here.
3453TYPE_CHECKING = False
3454
3455
3456class IO(Generic[AnyStr]):
3457    """Generic base class for TextIO and BinaryIO.
3458
3459    This is an abstract, generic version of the return of open().
3460
3461    NOTE: This does not distinguish between the different possible
3462    classes (text vs. binary, read vs. write vs. read/write,
3463    append-only, unbuffered).  The TextIO and BinaryIO subclasses
3464    below capture the distinctions between text vs. binary, which is
3465    pervasive in the interface; however we currently do not offer a
3466    way to track the other distinctions in the type system.
3467    """
3468
3469    __slots__ = ()
3470
3471    @property
3472    @abstractmethod
3473    def mode(self) -> str:
3474        pass
3475
3476    @property
3477    @abstractmethod
3478    def name(self) -> str:
3479        pass
3480
3481    @abstractmethod
3482    def close(self) -> None:
3483        pass
3484
3485    @property
3486    @abstractmethod
3487    def closed(self) -> bool:
3488        pass
3489
3490    @abstractmethod
3491    def fileno(self) -> int:
3492        pass
3493
3494    @abstractmethod
3495    def flush(self) -> None:
3496        pass
3497
3498    @abstractmethod
3499    def isatty(self) -> bool:
3500        pass
3501
3502    @abstractmethod
3503    def read(self, n: int = -1) -> AnyStr:
3504        pass
3505
3506    @abstractmethod
3507    def readable(self) -> bool:
3508        pass
3509
3510    @abstractmethod
3511    def readline(self, limit: int = -1) -> AnyStr:
3512        pass
3513
3514    @abstractmethod
3515    def readlines(self, hint: int = -1) -> List[AnyStr]:
3516        pass
3517
3518    @abstractmethod
3519    def seek(self, offset: int, whence: int = 0) -> int:
3520        pass
3521
3522    @abstractmethod
3523    def seekable(self) -> bool:
3524        pass
3525
3526    @abstractmethod
3527    def tell(self) -> int:
3528        pass
3529
3530    @abstractmethod
3531    def truncate(self, size: int = None) -> int:
3532        pass
3533
3534    @abstractmethod
3535    def writable(self) -> bool:
3536        pass
3537
3538    @abstractmethod
3539    def write(self, s: AnyStr) -> int:
3540        pass
3541
3542    @abstractmethod
3543    def writelines(self, lines: List[AnyStr]) -> None:
3544        pass
3545
3546    @abstractmethod
3547    def __enter__(self) -> 'IO[AnyStr]':
3548        pass
3549
3550    @abstractmethod
3551    def __exit__(self, type, value, traceback) -> None:
3552        pass
3553
3554
3555class BinaryIO(IO[bytes]):
3556    """Typed version of the return of open() in binary mode."""
3557
3558    __slots__ = ()
3559
3560    @abstractmethod
3561    def write(self, s: Union[bytes, bytearray]) -> int:
3562        pass
3563
3564    @abstractmethod
3565    def __enter__(self) -> 'BinaryIO':
3566        pass
3567
3568
3569class TextIO(IO[str]):
3570    """Typed version of the return of open() in text mode."""
3571
3572    __slots__ = ()
3573
3574    @property
3575    @abstractmethod
3576    def buffer(self) -> BinaryIO:
3577        pass
3578
3579    @property
3580    @abstractmethod
3581    def encoding(self) -> str:
3582        pass
3583
3584    @property
3585    @abstractmethod
3586    def errors(self) -> Optional[str]:
3587        pass
3588
3589    @property
3590    @abstractmethod
3591    def line_buffering(self) -> bool:
3592        pass
3593
3594    @property
3595    @abstractmethod
3596    def newlines(self) -> Any:
3597        pass
3598
3599    @abstractmethod
3600    def __enter__(self) -> 'TextIO':
3601        pass
3602
3603
3604def reveal_type[T](obj: T, /) -> T:
3605    """Ask a static type checker to reveal the inferred type of an expression.
3606
3607    When a static type checker encounters a call to ``reveal_type()``,
3608    it will emit the inferred type of the argument::
3609
3610        x: int = 1
3611        reveal_type(x)
3612
3613    Running a static type checker (e.g., mypy) on this example
3614    will produce output similar to 'Revealed type is "builtins.int"'.
3615
3616    At runtime, the function prints the runtime type of the
3617    argument and returns the argument unchanged.
3618    """
3619    print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
3620    return obj
3621
3622
3623class _IdentityCallable(Protocol):
3624    def __call__[T](self, arg: T, /) -> T:
3625        ...
3626
3627
3628def dataclass_transform(
3629    *,
3630    eq_default: bool = True,
3631    order_default: bool = False,
3632    kw_only_default: bool = False,
3633    frozen_default: bool = False,
3634    field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (),
3635    **kwargs: Any,
3636) -> _IdentityCallable:
3637    """Decorator to mark an object as providing dataclass-like behaviour.
3638
3639    The decorator can be applied to a function, class, or metaclass.
3640
3641    Example usage with a decorator function::
3642
3643        @dataclass_transform()
3644        def create_model[T](cls: type[T]) -> type[T]:
3645            ...
3646            return cls
3647
3648        @create_model
3649        class CustomerModel:
3650            id: int
3651            name: str
3652
3653    On a base class::
3654
3655        @dataclass_transform()
3656        class ModelBase: ...
3657
3658        class CustomerModel(ModelBase):
3659            id: int
3660            name: str
3661
3662    On a metaclass::
3663
3664        @dataclass_transform()
3665        class ModelMeta(type): ...
3666
3667        class ModelBase(metaclass=ModelMeta): ...
3668
3669        class CustomerModel(ModelBase):
3670            id: int
3671            name: str
3672
3673    The ``CustomerModel`` classes defined above will
3674    be treated by type checkers similarly to classes created with
3675    ``@dataclasses.dataclass``.
3676    For example, type checkers will assume these classes have
3677    ``__init__`` methods that accept ``id`` and ``name``.
3678
3679    The arguments to this decorator can be used to customize this behavior:
3680    - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
3681        ``True`` or ``False`` if it is omitted by the caller.
3682    - ``order_default`` indicates whether the ``order`` parameter is
3683        assumed to be True or False if it is omitted by the caller.
3684    - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
3685        assumed to be True or False if it is omitted by the caller.
3686    - ``frozen_default`` indicates whether the ``frozen`` parameter is
3687        assumed to be True or False if it is omitted by the caller.
3688    - ``field_specifiers`` specifies a static list of supported classes
3689        or functions that describe fields, similar to ``dataclasses.field()``.
3690    - Arbitrary other keyword arguments are accepted in order to allow for
3691        possible future extensions.
3692
3693    At runtime, this decorator records its arguments in the
3694    ``__dataclass_transform__`` attribute on the decorated object.
3695    It has no other runtime effect.
3696
3697    See PEP 681 for more details.
3698    """
3699    def decorator(cls_or_fn):
3700        cls_or_fn.__dataclass_transform__ = {
3701            "eq_default": eq_default,
3702            "order_default": order_default,
3703            "kw_only_default": kw_only_default,
3704            "frozen_default": frozen_default,
3705            "field_specifiers": field_specifiers,
3706            "kwargs": kwargs,
3707        }
3708        return cls_or_fn
3709    return decorator
3710
3711
3712type _Func = Callable[..., Any]
3713
3714
3715def override[F: _Func](method: F, /) -> F:
3716    """Indicate that a method is intended to override a method in a base class.
3717
3718    Usage::
3719
3720        class Base:
3721            def method(self) -> None:
3722                pass
3723
3724        class Child(Base):
3725            @override
3726            def method(self) -> None:
3727                super().method()
3728
3729    When this decorator is applied to a method, the type checker will
3730    validate that it overrides a method or attribute with the same name on a
3731    base class.  This helps prevent bugs that may occur when a base class is
3732    changed without an equivalent change to a child class.
3733
3734    There is no runtime checking of this property. The decorator attempts to
3735    set the ``__override__`` attribute to ``True`` on the decorated object to
3736    allow runtime introspection.
3737
3738    See PEP 698 for details.
3739    """
3740    try:
3741        method.__override__ = True
3742    except (AttributeError, TypeError):
3743        # Skip the attribute silently if it is not writable.
3744        # AttributeError happens if the object has __slots__ or a
3745        # read-only property, TypeError if it's a builtin class.
3746        pass
3747    return method
3748
3749
3750def is_protocol(tp: type, /) -> bool:
3751    """Return True if the given type is a Protocol.
3752
3753    Example::
3754
3755        >>> from typing import Protocol, is_protocol
3756        >>> class P(Protocol):
3757        ...     def a(self) -> str: ...
3758        ...     b: int
3759        >>> is_protocol(P)
3760        True
3761        >>> is_protocol(int)
3762        False
3763    """
3764    return (
3765        isinstance(tp, type)
3766        and getattr(tp, '_is_protocol', False)
3767        and tp != Protocol
3768    )
3769
3770
3771def get_protocol_members(tp: type, /) -> frozenset[str]:
3772    """Return the set of members defined in a Protocol.
3773
3774    Example::
3775
3776        >>> from typing import Protocol, get_protocol_members
3777        >>> class P(Protocol):
3778        ...     def a(self) -> str: ...
3779        ...     b: int
3780        >>> get_protocol_members(P) == frozenset({'a', 'b'})
3781        True
3782
3783    Raise a TypeError for arguments that are not Protocols.
3784    """
3785    if not is_protocol(tp):
3786        raise TypeError(f'{tp!r} is not a Protocol')
3787    return frozenset(tp.__protocol_attrs__)
3788
3789
3790def __getattr__(attr):
3791    """Improve the import time of the typing module.
3792
3793    Soft-deprecated objects which are costly to create
3794    are only created on-demand here.
3795    """
3796    if attr in {"Pattern", "Match"}:
3797        import re
3798        obj = _alias(getattr(re, attr), 1)
3799    elif attr in {"ContextManager", "AsyncContextManager"}:
3800        import contextlib
3801        obj = _alias(getattr(contextlib, f"Abstract{attr}"), 2, name=attr, defaults=(bool | None,))
3802    elif attr == "_collect_parameters":
3803        import warnings
3804
3805        depr_message = (
3806            "The private _collect_parameters function is deprecated and will be"
3807            " removed in a future version of Python. Any use of private functions"
3808            " is discouraged and may break in the future."
3809        )
3810        warnings.warn(depr_message, category=DeprecationWarning, stacklevel=2)
3811        obj = _collect_type_parameters
3812    else:
3813        raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")
3814    globals()[attr] = obj
3815    return obj
3816