"""Semantic analysis of types"""

from __future__ import annotations

import itertools
from collections.abc import Iterable, Iterator, Sequence
from contextlib import contextmanager
from typing import Callable, Final, Protocol, TypeVar

from mypy import errorcodes as codes, message_registry, nodes
from mypy.errorcodes import ErrorCode
from mypy.expandtype import expand_type
from mypy.message_registry import (
    INVALID_PARAM_SPEC_LOCATION,
    INVALID_PARAM_SPEC_LOCATION_NOTE,
    TYPEDDICT_OVERRIDE_MERGE,
)
from mypy.messages import (
    MessageBuilder,
    format_type,
    format_type_bare,
    quote_type_string,
    wrong_type_arg_count,
)
from mypy.nodes import (
    ARG_NAMED,
    ARG_NAMED_OPT,
    ARG_OPT,
    ARG_POS,
    ARG_STAR,
    ARG_STAR2,
    MISSING_FALLBACK,
    SYMBOL_FUNCBASE_TYPES,
    ArgKind,
    Context,
    Decorator,
    ImportFrom,
    MypyFile,
    ParamSpecExpr,
    PlaceholderNode,
    SymbolTableNode,
    TypeAlias,
    TypeInfo,
    TypeVarExpr,
    TypeVarLikeExpr,
    TypeVarTupleExpr,
    Var,
    check_arg_kinds,
    check_arg_names,
    get_nongen_builtins,
)
from mypy.options import INLINE_TYPEDDICT, Options
from mypy.plugin import AnalyzeTypeContext, Plugin, TypeAnalyzerPluginInterface
from mypy.semanal_shared import (
    SemanticAnalyzerCoreInterface,
    SemanticAnalyzerInterface,
    paramspec_args,
    paramspec_kwargs,
)
from mypy.state import state
from mypy.tvar_scope import TypeVarLikeScope
from mypy.types import (
    ANNOTATED_TYPE_NAMES,
    ANY_STRATEGY,
    CONCATENATE_TYPE_NAMES,
    FINAL_TYPE_NAMES,
    LITERAL_TYPE_NAMES,
    NEVER_NAMES,
    TYPE_ALIAS_NAMES,
    UNPACK_TYPE_NAMES,
    AnyType,
    BoolTypeQuery,
    CallableArgument,
    CallableType,
    DeletedType,
    EllipsisType,
    ErasedType,
    Instance,
    LiteralType,
    NoneType,
    Overloaded,
    Parameters,
    ParamSpecFlavor,
    ParamSpecType,
    PartialType,
    PlaceholderType,
    ProperType,
    RawExpressionType,
    ReadOnlyType,
    RequiredType,
    SyntheticTypeVisitor,
    TrivialSyntheticTypeTranslator,
    TupleType,
    Type,
    TypeAliasType,
    TypedDictType,
    TypeList,
    TypeOfAny,
    TypeQuery,
    TypeType,
    TypeVarId,
    TypeVarLikeType,
    TypeVarTupleType,
    TypeVarType,
    UnboundType,
    UninhabitedType,
    UnionType,
    UnpackType,
    callable_with_ellipsis,
    find_unpack_in_list,
    flatten_nested_tuples,
    get_proper_type,
    has_type_vars,
)
from mypy.types_utils import get_bad_type_type_item
from mypy.typevars import fill_typevars

T = TypeVar("T")

type_constructors: Final = {
    "typing.Callable",
    "typing.Optional",
    "typing.Tuple",
    "typing.Type",
    "typing.Union",
    *LITERAL_TYPE_NAMES,
    *ANNOTATED_TYPE_NAMES,
}

ARG_KINDS_BY_CONSTRUCTOR: Final = {
    "mypy_extensions.Arg": ARG_POS,
    "mypy_extensions.DefaultArg": ARG_OPT,
    "mypy_extensions.NamedArg": ARG_NAMED,
    "mypy_extensions.DefaultNamedArg": ARG_NAMED_OPT,
    "mypy_extensions.VarArg": ARG_STAR,
    "mypy_extensions.KwArg": ARG_STAR2,
}

GENERIC_STUB_NOT_AT_RUNTIME_TYPES: Final = {
    "queue.Queue",
    "builtins._PathLike",
    "asyncio.futures.Future",
}

SELF_TYPE_NAMES: Final = {"typing.Self", "typing_extensions.Self"}


def analyze_type_alias(
    type: Type,
    api: SemanticAnalyzerCoreInterface,
    tvar_scope: TypeVarLikeScope,
    plugin: Plugin,
    options: Options,
    cur_mod_node: MypyFile,
    is_typeshed_stub: bool,
    allow_placeholder: bool = False,
    in_dynamic_func: bool = False,
    global_scope: bool = True,
    allowed_alias_tvars: list[TypeVarLikeType] | None = None,
    alias_type_params_names: list[str] | None = None,
    python_3_12_type_alias: bool = False,
) -> tuple[Type, set[str]]:
    """Analyze r.h.s. of a (potential) type alias definition.

    If `node` is valid as a type alias rvalue, return the resulting type and a set of
    full names of type aliases it depends on (directly or indirectly).
    'node' must have been semantically analyzed.
    """
    analyzer = TypeAnalyser(
        api,
        tvar_scope,
        plugin,
        options,
        cur_mod_node,
        is_typeshed_stub,
        defining_alias=True,
        allow_placeholder=allow_placeholder,
        prohibit_self_type="type alias target",
        allowed_alias_tvars=allowed_alias_tvars,
        alias_type_params_names=alias_type_params_names,
        python_3_12_type_alias=python_3_12_type_alias,
    )
    analyzer.in_dynamic_func = in_dynamic_func
    analyzer.global_scope = global_scope
    res = analyzer.anal_type(type, nested=False)
    return res, analyzer.aliases_used


def no_subscript_builtin_alias(name: str, propose_alt: bool = True) -> str:
    class_name = name.split(".")[-1]
    msg = f'"{class_name}" is not subscriptable'
    # This should never be called if the python_version is 3.9 or newer
    nongen_builtins = get_nongen_builtins((3, 8))
    replacement = nongen_builtins[name]
    if replacement and propose_alt:
        msg += f', use "{replacement}" instead'
    return msg


class TypeAnalyser(SyntheticTypeVisitor[Type], TypeAnalyzerPluginInterface):
    """Semantic analyzer for types.

    Converts unbound types into bound types. This is a no-op for already
    bound types.

    If an incomplete reference is encountered, this does a defer. The
    caller never needs to defer.
    """

    # Is this called from an untyped function definition?
    in_dynamic_func: bool = False
    # Is this called from global scope?
    global_scope: bool = True

    def __init__(
        self,
        api: SemanticAnalyzerCoreInterface,
        tvar_scope: TypeVarLikeScope,
        plugin: Plugin,
        options: Options,
        cur_mod_node: MypyFile,
        is_typeshed_stub: bool,
        *,
        defining_alias: bool = False,
        python_3_12_type_alias: bool = False,
        allow_tuple_literal: bool = False,
        allow_unbound_tvars: bool = False,
        allow_placeholder: bool = False,
        allow_typed_dict_special_forms: bool = False,
        allow_final: bool = True,
        allow_param_spec_literals: bool = False,
        allow_unpack: bool = False,
        report_invalid_types: bool = True,
        prohibit_self_type: str | None = None,
        prohibit_special_class_field_types: str | None = None,
        allowed_alias_tvars: list[TypeVarLikeType] | None = None,
        allow_type_any: bool = False,
        alias_type_params_names: list[str] | None = None,
    ) -> None:
        self.api = api
        self.fail_func = api.fail
        self.note_func = api.note
        self.tvar_scope = tvar_scope
        # Are we analysing a type alias definition rvalue?
        self.defining_alias = defining_alias
        self.python_3_12_type_alias = python_3_12_type_alias
        self.allow_tuple_literal = allow_tuple_literal
        # Positive if we are analyzing arguments of another (outer) type
        self.nesting_level = 0
        # Should we allow new type syntax when targeting older Python versions
        # like 'list[int]' or 'X | Y' (allowed in stubs and with `__future__` import)?
        self.always_allow_new_syntax = self.api.is_stub_file or self.api.is_future_flag_set(
            "annotations"
        )
        # Should we accept unbound type variables? This is currently used for class bases,
        # and alias right hand sides (before they are analyzed as type aliases).
        self.allow_unbound_tvars = allow_unbound_tvars
        if allowed_alias_tvars is None:
            allowed_alias_tvars = []
        self.allowed_alias_tvars = allowed_alias_tvars
        self.alias_type_params_names = alias_type_params_names
        # If false, record incomplete ref if we generate PlaceholderType.
        self.allow_placeholder = allow_placeholder
        # Are we in a context where Required[] is allowed?
        self.allow_typed_dict_special_forms = allow_typed_dict_special_forms
        # Set True when we analyze ClassVar else False
        self.allow_final = allow_final
        # Are we in a context where ParamSpec literals are allowed?
        self.allow_param_spec_literals = allow_param_spec_literals
        # Are we in context where literal "..." specifically is allowed?
        self.allow_ellipsis = False
        # Should we report an error whenever we encounter a RawExpressionType outside
        # of a Literal context: e.g. whenever we encounter an invalid type? Normally,
        # we want to report an error, but the caller may want to do more specialized
        # error handling.
        self.report_invalid_types = report_invalid_types
        self.plugin = plugin
        self.options = options
        self.cur_mod_node = cur_mod_node
        self.is_typeshed_stub = is_typeshed_stub
        # Names of type aliases encountered while analysing a type will be collected here.
        self.aliases_used: set[str] = set()
        self.prohibit_self_type = prohibit_self_type
        # Set when we analyze TypedDicts or NamedTuples, since they are special:
        self.prohibit_special_class_field_types = prohibit_special_class_field_types
        # Allow variables typed as Type[Any] and type (useful for base classes).
        self.allow_type_any = allow_type_any
        self.allow_type_var_tuple = False
        self.allow_unpack = allow_unpack

    def lookup_qualified(
        self, name: str, ctx: Context, suppress_errors: bool = False
    ) -> SymbolTableNode | None:
        return self.api.lookup_qualified(name, ctx, suppress_errors)

    def lookup_fully_qualified(self, fullname: str) -> SymbolTableNode:
        return self.api.lookup_fully_qualified(fullname)

    def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type:
        typ = self.visit_unbound_type_nonoptional(t, defining_literal)
        if t.optional:
            # We don't need to worry about double-wrapping Optionals or
            # wrapping Anys: Union simplification will take care of that.
            return make_optional_type(typ)
        return typ

    def not_declared_in_type_params(self, tvar_name: str) -> bool:
        return (
            self.alias_type_params_names is not None
            and tvar_name not in self.alias_type_params_names
        )

    def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) -> Type:
        sym = self.lookup_qualified(t.name, t)
        param_spec_name = None
        if t.name.endswith((".args", ".kwargs")):
            param_spec_name = t.name.rsplit(".", 1)[0]
            maybe_param_spec = self.lookup_qualified(param_spec_name, t)
            if maybe_param_spec and isinstance(maybe_param_spec.node, ParamSpecExpr):
                sym = maybe_param_spec
            else:
                param_spec_name = None

        if sym is not None:
            node = sym.node
            if isinstance(node, PlaceholderNode):
                if node.becomes_typeinfo:
                    # Reference to placeholder type.
                    if self.api.final_iteration:
                        self.cannot_resolve_type(t)
                        return AnyType(TypeOfAny.from_error)
                    elif self.allow_placeholder:
                        self.api.defer()
                    else:
                        self.api.record_incomplete_ref()
                    # Always allow ParamSpec for placeholders, if they are actually not valid,
                    # they will be reported later, after we resolve placeholders.
                    return PlaceholderType(
                        node.fullname,
                        self.anal_array(
                            t.args,
                            allow_param_spec=True,
                            allow_param_spec_literals=True,
                            allow_unpack=True,
                        ),
                        t.line,
                    )
                else:
                    if self.api.final_iteration:
                        self.cannot_resolve_type(t)
                        return AnyType(TypeOfAny.from_error)
                    else:
                        # Reference to an unknown placeholder node.
                        self.api.record_incomplete_ref()
                        return AnyType(TypeOfAny.special_form)
            if node is None:
                self.fail(f"Internal error (node is None, kind={sym.kind})", t)
                return AnyType(TypeOfAny.special_form)
            fullname = node.fullname
            hook = self.plugin.get_type_analyze_hook(fullname)
            if hook is not None:
                return hook(AnalyzeTypeContext(t, t, self))
            if (
                fullname in get_nongen_builtins(self.options.python_version)
                and t.args
                and not self.always_allow_new_syntax
            ):
                self.fail(
                    no_subscript_builtin_alias(fullname, propose_alt=not self.defining_alias), t
                )
            tvar_def = self.tvar_scope.get_binding(sym)
            if isinstance(sym.node, ParamSpecExpr):
                if tvar_def is None:
                    if self.allow_unbound_tvars:
                        return t
                    name = param_spec_name or t.name
                    if self.defining_alias and self.not_declared_in_type_params(t.name):
                        msg = f'ParamSpec "{name}" is not included in type_params'
                    else:
                        msg = f'ParamSpec "{name}" is unbound'
                    self.fail(msg, t, code=codes.VALID_TYPE)
                    return AnyType(TypeOfAny.from_error)
                assert isinstance(tvar_def, ParamSpecType)
                if len(t.args) > 0:
                    self.fail(
                        f'ParamSpec "{t.name}" used with arguments', t, code=codes.VALID_TYPE
                    )
                if param_spec_name is not None and not self.allow_param_spec_literals:
                    self.fail(
                        "ParamSpec components are not allowed here", t, code=codes.VALID_TYPE
                    )
                    return AnyType(TypeOfAny.from_error)
                # Change the line number
                return ParamSpecType(
                    tvar_def.name,
                    tvar_def.fullname,
                    tvar_def.id,
                    tvar_def.flavor,
                    tvar_def.upper_bound,
                    tvar_def.default,
                    line=t.line,
                    column=t.column,
                )
            if (
                isinstance(sym.node, TypeVarExpr)
                and self.defining_alias
                and not defining_literal
                and (tvar_def is None or tvar_def not in self.allowed_alias_tvars)
            ):
                if self.not_declared_in_type_params(t.name):
                    if self.python_3_12_type_alias:
                        msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format(
                            f'"{t.name}"'
                        )
                    else:
                        msg = f'Type variable "{t.name}" is not included in type_params'
                else:
                    msg = f'Can\'t use bound type variable "{t.name}" to define generic alias'
                self.fail(msg, t, code=codes.VALID_TYPE)
                return AnyType(TypeOfAny.from_error)
            if isinstance(sym.node, TypeVarExpr) and tvar_def is not None:
                assert isinstance(tvar_def, TypeVarType)
                if len(t.args) > 0:
                    self.fail(
                        f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE
                    )
                # Change the line number
                return tvar_def.copy_modified(line=t.line, column=t.column)
            if isinstance(sym.node, TypeVarTupleExpr) and (
                tvar_def is not None
                and self.defining_alias
                and tvar_def not in self.allowed_alias_tvars
            ):
                if self.not_declared_in_type_params(t.name):
                    msg = f'Type variable "{t.name}" is not included in type_params'
                else:
                    msg = f'Can\'t use bound type variable "{t.name}" to define generic alias'
                self.fail(msg, t, code=codes.VALID_TYPE)
                return AnyType(TypeOfAny.from_error)
            if isinstance(sym.node, TypeVarTupleExpr):
                if tvar_def is None:
                    if self.allow_unbound_tvars:
                        return t
                    if self.defining_alias and self.not_declared_in_type_params(t.name):
                        if self.python_3_12_type_alias:
                            msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format(
                                f'"{t.name}"'
                            )
                        else:
                            msg = f'TypeVarTuple "{t.name}" is not included in type_params'
                    else:
                        msg = f'TypeVarTuple "{t.name}" is unbound'
                    self.fail(msg, t, code=codes.VALID_TYPE)
                    return AnyType(TypeOfAny.from_error)
                assert isinstance(tvar_def, TypeVarTupleType)
                if not self.allow_type_var_tuple:
                    self.fail(
                        f'TypeVarTuple "{t.name}" is only valid with an unpack',
                        t,
                        code=codes.VALID_TYPE,
                    )
                    return AnyType(TypeOfAny.from_error)
                if len(t.args) > 0:
                    self.fail(
                        f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE
                    )

                # Change the line number
                return TypeVarTupleType(
                    tvar_def.name,
                    tvar_def.fullname,
                    tvar_def.id,
                    tvar_def.upper_bound,
                    sym.node.tuple_fallback,
                    tvar_def.default,
                    line=t.line,
                    column=t.column,
                )
            special = self.try_analyze_special_unbound_type(t, fullname)
            if special is not None:
                return special
            if isinstance(node, TypeAlias):
                self.aliases_used.add(fullname)
                an_args = self.anal_array(
                    t.args,
                    allow_param_spec=True,
                    allow_param_spec_literals=node.has_param_spec_type,
                    allow_unpack=True,  # Fixed length unpacks can be used for non-variadic aliases.
                )
                if node.has_param_spec_type and len(node.alias_tvars) == 1:
                    an_args = self.pack_paramspec_args(an_args)

                disallow_any = self.options.disallow_any_generics and not self.is_typeshed_stub
                res = instantiate_type_alias(
                    node,
                    an_args,
                    self.fail,
                    node.no_args,
                    t,
                    self.options,
                    unexpanded_type=t,
                    disallow_any=disallow_any,
                    empty_tuple_index=t.empty_tuple_index,
                )
                # The only case where instantiate_type_alias() can return an incorrect instance is
                # when it is top-level instance, so no need to recurse.
                if (
                    isinstance(res, ProperType)
                    and isinstance(res, Instance)
                    and not (self.defining_alias and self.nesting_level == 0)
                    and not validate_instance(res, self.fail, t.empty_tuple_index)
                ):
                    fix_instance(
                        res,
                        self.fail,
                        self.note,
                        disallow_any=disallow_any,
                        options=self.options,
                        use_generic_error=True,
                        unexpanded_type=t,
                    )
                if node.eager:
                    res = get_proper_type(res)
                return res
            elif isinstance(node, TypeInfo):
                return self.analyze_type_with_type_info(node, t.args, t, t.empty_tuple_index)
            elif node.fullname in TYPE_ALIAS_NAMES:
                return AnyType(TypeOfAny.special_form)
            # Concatenate is an operator, no need for a proper type
            elif node.fullname in CONCATENATE_TYPE_NAMES:
                # We check the return type further up the stack for valid use locations
                return self.apply_concatenate_operator(t)
            else:
                return self.analyze_unbound_type_without_type_info(t, sym, defining_literal)
        else:  # sym is None
            return AnyType(TypeOfAny.special_form)

    def pack_paramspec_args(self, an_args: Sequence[Type]) -> list[Type]:
        # "Aesthetic" ParamSpec literals for single ParamSpec: C[int, str] -> C[[int, str]].
        # These do not support mypy_extensions VarArgs, etc. as they were already analyzed
        # TODO: should these be re-analyzed to get rid of this inconsistency?
        count = len(an_args)
        if count == 0:
            return []
        if count == 1 and isinstance(get_proper_type(an_args[0]), AnyType):
            # Single Any is interpreted as ..., rather that a single argument with Any type.
            # I didn't find this in the PEP, but it sounds reasonable.
            return list(an_args)
        if any(isinstance(a, (Parameters, ParamSpecType)) for a in an_args):
            if len(an_args) > 1:
                first_wrong = next(
                    arg for arg in an_args if isinstance(arg, (Parameters, ParamSpecType))
                )
                self.fail(
                    "Nested parameter specifications are not allowed",
                    first_wrong,
                    code=codes.VALID_TYPE,
                )
                return [AnyType(TypeOfAny.from_error)]
            return list(an_args)
        first = an_args[0]
        return [
            Parameters(
                an_args, [ARG_POS] * count, [None] * count, line=first.line, column=first.column
            )
        ]

    def cannot_resolve_type(self, t: UnboundType) -> None:
        # TODO: Move error message generation to messages.py. We'd first
        #       need access to MessageBuilder here. Also move the similar
        #       message generation logic in semanal.py.
        self.api.fail(f'Cannot resolve name "{t.name}" (possible cyclic definition)', t)
        if self.api.is_func_scope():
            self.note("Recursive types are not allowed at function scope", t)

    def apply_concatenate_operator(self, t: UnboundType) -> Type:
        if len(t.args) == 0:
            self.api.fail("Concatenate needs type arguments", t, code=codes.VALID_TYPE)
            return AnyType(TypeOfAny.from_error)

        # Last argument has to be ParamSpec or Ellipsis.
        ps = self.anal_type(t.args[-1], allow_param_spec=True, allow_ellipsis=True)
        if not isinstance(ps, (ParamSpecType, Parameters)):
            if isinstance(ps, UnboundType) and self.allow_unbound_tvars:
                sym = self.lookup_qualified(ps.name, t)
                if sym is not None and isinstance(sym.node, ParamSpecExpr):
                    return ps
            self.api.fail(
                "The last parameter to Concatenate needs to be a ParamSpec",
                t,
                code=codes.VALID_TYPE,
            )
            return AnyType(TypeOfAny.from_error)
        elif isinstance(ps, ParamSpecType) and ps.prefix.arg_types:
            self.api.fail("Nested Concatenates are invalid", t, code=codes.VALID_TYPE)

        args = self.anal_array(t.args[:-1])
        pre = ps.prefix if isinstance(ps, ParamSpecType) else ps

        # mypy can't infer this :(
        names: list[str | None] = [None] * len(args)

        pre = Parameters(
            args + pre.arg_types,
            [ARG_POS] * len(args) + pre.arg_kinds,
            names + pre.arg_names,
            line=t.line,
            column=t.column,
        )
        return ps.copy_modified(prefix=pre) if isinstance(ps, ParamSpecType) else pre

    def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Type | None:
        """Bind special type that is recognized through magic name such as 'typing.Any'.

        Return the bound type if successful, and return None if the type is a normal type.
        """
        if fullname == "builtins.None":
            return NoneType()
        elif fullname == "typing.Any":
            return AnyType(TypeOfAny.explicit, line=t.line, column=t.column)
        elif fullname in FINAL_TYPE_NAMES:
            if self.prohibit_special_class_field_types:
                self.fail(
                    f"Final[...] can't be used inside a {self.prohibit_special_class_field_types}",
                    t,
                    code=codes.VALID_TYPE,
                )
            else:
                if not self.allow_final:
                    self.fail(
                        "Final can be only used as an outermost qualifier in a variable annotation",
                        t,
                        code=codes.VALID_TYPE,
                    )
            return AnyType(TypeOfAny.from_error)
        elif fullname == "typing.Tuple" or (
            fullname == "builtins.tuple"
            and (self.always_allow_new_syntax or self.options.python_version >= (3, 9))
        ):
            # Tuple is special because it is involved in builtin import cycle
            # and may be not ready when used.
            sym = self.api.lookup_fully_qualified_or_none("builtins.tuple")
            if not sym or isinstance(sym.node, PlaceholderNode):
                if self.api.is_incomplete_namespace("builtins"):
                    self.api.record_incomplete_ref()
                else:
                    self.fail('Name "tuple" is not defined', t)
                return AnyType(TypeOfAny.special_form)
            if len(t.args) == 0 and not t.empty_tuple_index:
                # Bare 'Tuple' is same as 'tuple'
                any_type = self.get_omitted_any(t)
                return self.named_type("builtins.tuple", [any_type], line=t.line, column=t.column)
            if len(t.args) == 2 and isinstance(t.args[1], EllipsisType):
                # Tuple[T, ...] (uniform, variable-length tuple)
                instance = self.named_type("builtins.tuple", [self.anal_type(t.args[0])])
                instance.line = t.line
                return instance
            return self.tuple_type(
                self.anal_array(t.args, allow_unpack=True), line=t.line, column=t.column
            )
        elif fullname == "typing.Union":
            items = self.anal_array(t.args)
            return UnionType.make_union(items)
        elif fullname == "typing.Optional":
            if len(t.args) != 1:
                self.fail(
                    "Optional[...] must have exactly one type argument", t, code=codes.VALID_TYPE
                )
                return AnyType(TypeOfAny.from_error)
            item = self.anal_type(t.args[0])
            return make_optional_type(item)
        elif fullname == "typing.Callable":
            return self.analyze_callable_type(t)
        elif fullname == "typing.Type" or (
            fullname == "builtins.type"
            and (self.always_allow_new_syntax or self.options.python_version >= (3, 9))
        ):
            if len(t.args) == 0:
                if fullname == "typing.Type":
                    any_type = self.get_omitted_any(t)
                    return TypeType(any_type, line=t.line, column=t.column)
                else:
                    # To prevent assignment of 'builtins.type' inferred as 'builtins.object'
                    # See https://github.com/python/mypy/issues/9476 for more information
                    return None
            type_str = "Type[...]" if fullname == "typing.Type" else "type[...]"
            if len(t.args) != 1:
                self.fail(
                    f"{type_str} must have exactly one type argument", t, code=codes.VALID_TYPE
                )
            item = self.anal_type(t.args[0])
            bad_item_name = get_bad_type_type_item(item)
            if bad_item_name:
                self.fail(f'{type_str} can\'t contain "{bad_item_name}"', t, code=codes.VALID_TYPE)
                item = AnyType(TypeOfAny.from_error)
            return TypeType.make_normalized(item, line=t.line, column=t.column)
        elif fullname == "typing.ClassVar":
            if self.nesting_level > 0:
                self.fail(
                    "Invalid type: ClassVar nested inside other type", t, code=codes.VALID_TYPE
                )
            if self.prohibit_special_class_field_types:
                self.fail(
                    f"ClassVar[...] can't be used inside a {self.prohibit_special_class_field_types}",
                    t,
                    code=codes.VALID_TYPE,
                )
            if len(t.args) == 0:
                return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column)
            if len(t.args) != 1:
                self.fail(
                    "ClassVar[...] must have at most one type argument", t, code=codes.VALID_TYPE
                )
                return AnyType(TypeOfAny.from_error)
            return self.anal_type(t.args[0], allow_final=self.options.python_version >= (3, 13))
        elif fullname in NEVER_NAMES:
            return UninhabitedType()
        elif fullname in LITERAL_TYPE_NAMES:
            return self.analyze_literal_type(t)
        elif fullname in ANNOTATED_TYPE_NAMES:
            if len(t.args) < 2:
                self.fail(
                    "Annotated[...] must have exactly one type argument"
                    " and at least one annotation",
                    t,
                    code=codes.VALID_TYPE,
                )
                return AnyType(TypeOfAny.from_error)
            return self.anal_type(
                t.args[0], allow_typed_dict_special_forms=self.allow_typed_dict_special_forms
            )
        elif fullname in ("typing_extensions.Required", "typing.Required"):
            if not self.allow_typed_dict_special_forms:
                self.fail(
                    "Required[] can be only used in a TypedDict definition",
                    t,
                    code=codes.VALID_TYPE,
                )
                return AnyType(TypeOfAny.from_error)
            if len(t.args) != 1:
                self.fail(
                    "Required[] must have exactly one type argument", t, code=codes.VALID_TYPE
                )
                return AnyType(TypeOfAny.from_error)
            return RequiredType(
                self.anal_type(t.args[0], allow_typed_dict_special_forms=True), required=True
            )
        elif fullname in ("typing_extensions.NotRequired", "typing.NotRequired"):
            if not self.allow_typed_dict_special_forms:
                self.fail(
                    "NotRequired[] can be only used in a TypedDict definition",
                    t,
                    code=codes.VALID_TYPE,
                )
                return AnyType(TypeOfAny.from_error)
            if len(t.args) != 1:
                self.fail(
                    "NotRequired[] must have exactly one type argument", t, code=codes.VALID_TYPE
                )
                return AnyType(TypeOfAny.from_error)
            return RequiredType(
                self.anal_type(t.args[0], allow_typed_dict_special_forms=True), required=False
            )
        elif fullname in ("typing_extensions.ReadOnly", "typing.ReadOnly"):
            if not self.allow_typed_dict_special_forms:
                self.fail(
                    "ReadOnly[] can be only used in a TypedDict definition",
                    t,
                    code=codes.VALID_TYPE,
                )
                return AnyType(TypeOfAny.from_error)
            if len(t.args) != 1:
                self.fail(
                    '"ReadOnly[]" must have exactly one type argument', t, code=codes.VALID_TYPE
                )
                return AnyType(TypeOfAny.from_error)
            return ReadOnlyType(self.anal_type(t.args[0], allow_typed_dict_special_forms=True))
        elif (
            self.anal_type_guard_arg(t, fullname) is not None
            or self.anal_type_is_arg(t, fullname) is not None
        ):
            # In most contexts, TypeGuard[...] acts as an alias for bool (ignoring its args)
            return self.named_type("builtins.bool")
        elif fullname in UNPACK_TYPE_NAMES:
            if len(t.args) != 1:
                self.fail("Unpack[...] requires exactly one type argument", t)
                return AnyType(TypeOfAny.from_error)
            if not self.allow_unpack:
                self.fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE)
                return AnyType(TypeOfAny.from_error)
            self.allow_type_var_tuple = True
            result = UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column)
            self.allow_type_var_tuple = False
            return result
        elif fullname in SELF_TYPE_NAMES:
            if t.args:
                self.fail("Self type cannot have type arguments", t)
            if self.prohibit_self_type is not None:
                self.fail(f"Self type cannot be used in {self.prohibit_self_type}", t)
                return AnyType(TypeOfAny.from_error)
            if self.api.type is None:
                self.fail("Self type is only allowed in annotations within class definition", t)
                return AnyType(TypeOfAny.from_error)
            if self.api.type.has_base("builtins.type"):
                self.fail("Self type cannot be used in a metaclass", t)
            if self.api.type.self_type is not None:
                if self.api.type.is_final:
                    return fill_typevars(self.api.type)
                return self.api.type.self_type.copy_modified(line=t.line, column=t.column)
            # TODO: verify this is unreachable and replace with an assert?
            self.fail("Unexpected Self type", t)
            return AnyType(TypeOfAny.from_error)
        return None

    def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType:
        disallow_any = not self.is_typeshed_stub and self.options.disallow_any_generics
        return get_omitted_any(disallow_any, self.fail, self.note, typ, self.options, fullname)

    def check_and_warn_deprecated(self, info: TypeInfo, ctx: Context) -> None:
        """Similar logic to `TypeChecker.check_deprecated` and `TypeChecker.warn_deprecated."""

        if (
            (deprecated := info.deprecated)
            and not self.is_typeshed_stub
            and not (self.api.type and (self.api.type.fullname == info.fullname))
            and not any(
                info.fullname == p or info.fullname.startswith(f"{p}.")
                for p in self.options.deprecated_calls_exclude
            )
        ):
            for imp in self.cur_mod_node.imports:
                if isinstance(imp, ImportFrom) and any(info.name == n[0] for n in imp.names):
                    break
            else:
                warn = self.note if self.options.report_deprecated_as_note else self.fail
                warn(deprecated, ctx, code=codes.DEPRECATED)

    def analyze_type_with_type_info(
        self, info: TypeInfo, args: Sequence[Type], ctx: Context, empty_tuple_index: bool
    ) -> Type:
        """Bind unbound type when were able to find target TypeInfo.

        This handles simple cases like 'int', 'modname.UserClass[str]', etc.
        """

        self.check_and_warn_deprecated(info, ctx)

        if len(args) > 0 and info.fullname == "builtins.tuple":
            fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line)
            return TupleType(self.anal_array(args, allow_unpack=True), fallback, ctx.line)

        # Analyze arguments and (usually) construct Instance type. The
        # number of type arguments and their values are
        # checked only later, since we do not always know the
        # valid count at this point. Thus we may construct an
        # Instance with an invalid number of type arguments.
        #
        # We allow ParamSpec literals based on a heuristic: it will be
        # checked later anyways but the error message may be worse.
        instance = Instance(
            info,
            self.anal_array(
                args,
                allow_param_spec=True,
                allow_param_spec_literals=info.has_param_spec_type,
                allow_unpack=True,  # Fixed length tuples can be used for non-variadic types.
            ),
            ctx.line,
            ctx.column,
        )
        instance.end_line = ctx.end_line
        instance.end_column = ctx.end_column
        if len(info.type_vars) == 1 and info.has_param_spec_type:
            instance.args = tuple(self.pack_paramspec_args(instance.args))

        # Check type argument count.
        instance.args = tuple(flatten_nested_tuples(instance.args))
        if not (self.defining_alias and self.nesting_level == 0) and not validate_instance(
            instance, self.fail, empty_tuple_index
        ):
            fix_instance(
                instance,
                self.fail,
                self.note,
                disallow_any=self.options.disallow_any_generics and not self.is_typeshed_stub,
                options=self.options,
            )

        tup = info.tuple_type
        if tup is not None:
            # The class has a Tuple[...] base class so it will be
            # represented as a tuple type.
            if info.special_alias:
                return instantiate_type_alias(
                    info.special_alias,
                    # TODO: should we allow NamedTuples generic in ParamSpec?
                    self.anal_array(args, allow_unpack=True),
                    self.fail,
                    False,
                    ctx,
                    self.options,
                    use_standard_error=True,
                )
            return tup.copy_modified(
                items=self.anal_array(tup.items, allow_unpack=True), fallback=instance
            )
        td = info.typeddict_type
        if td is not None:
            # The class has a TypedDict[...] base class so it will be
            # represented as a typeddict type.
            if info.special_alias:
                return instantiate_type_alias(
                    info.special_alias,
                    # TODO: should we allow TypedDicts generic in ParamSpec?
                    self.anal_array(args, allow_unpack=True),
                    self.fail,
                    False,
                    ctx,
                    self.options,
                    use_standard_error=True,
                )
            # Create a named TypedDictType
            return td.copy_modified(
                item_types=self.anal_array(list(td.items.values())), fallback=instance
            )

        if info.fullname == "types.NoneType":
            self.fail(
                "NoneType should not be used as a type, please use None instead",
                ctx,
                code=codes.VALID_TYPE,
            )
            return NoneType(ctx.line, ctx.column)

        return instance

    def analyze_unbound_type_without_type_info(
        self, t: UnboundType, sym: SymbolTableNode, defining_literal: bool
    ) -> Type:
        """Figure out what an unbound type that doesn't refer to a TypeInfo node means.

        This is something unusual. We try our best to find out what it is.
        """
        name = sym.fullname
        if name is None:
            assert sym.node is not None
            name = sym.node.name
        # Option 1:
        # Something with an Any type -- make it an alias for Any in a type
        # context. This is slightly problematic as it allows using the type 'Any'
        # as a base class -- however, this will fail soon at runtime so the problem
        # is pretty minor.
        if isinstance(sym.node, Var):
            typ = get_proper_type(sym.node.type)
            if isinstance(typ, AnyType):
                return AnyType(
                    TypeOfAny.from_unimported_type, missing_import_name=typ.missing_import_name
                )
            elif self.allow_type_any:
                if isinstance(typ, Instance) and typ.type.fullname == "builtins.type":
                    return AnyType(TypeOfAny.special_form)
                if isinstance(typ, TypeType) and isinstance(typ.item, AnyType):
                    return AnyType(TypeOfAny.from_another_any, source_any=typ.item)
        # Option 2:
        # Unbound type variable. Currently these may be still valid,
        # for example when defining a generic type alias.
        unbound_tvar = (
            isinstance(sym.node, (TypeVarExpr, TypeVarTupleExpr))
            and self.tvar_scope.get_binding(sym) is None
        )
        if self.allow_unbound_tvars and unbound_tvar:
            return t

        # Option 3:
        # Enum value. Note: we only want to return a LiteralType when
        # we're using this enum value specifically within context of
        # a "Literal[...]" type. So, if `defining_literal` is not set,
        # we bail out early with an error.
        #
        # If, in the distant future, we decide to permit things like
        # `def foo(x: Color.RED) -> None: ...`, we can remove that
        # check entirely.
        if (
            isinstance(sym.node, Var)
            and sym.node.info
            and sym.node.info.is_enum
            and not sym.node.name.startswith("__")
        ):
            value = sym.node.name
            base_enum_short_name = sym.node.info.name
            if not defining_literal:
                msg = message_registry.INVALID_TYPE_RAW_ENUM_VALUE.format(
                    base_enum_short_name, value
                )
                self.fail(msg.value, t, code=msg.code)
                return AnyType(TypeOfAny.from_error)
            return LiteralType(
                value=value,
                fallback=Instance(sym.node.info, [], line=t.line, column=t.column),
                line=t.line,
                column=t.column,
            )

        # None of the above options worked. We parse the args (if there are any)
        # to make sure there are no remaining semanal-only types, then give up.
        t = t.copy_modified(args=self.anal_array(t.args))
        # TODO: Move this message building logic to messages.py.
        notes: list[str] = []
        error_code = codes.VALID_TYPE
        if isinstance(sym.node, Var):
            notes.append(
                "See https://mypy.readthedocs.io/en/"
                "stable/common_issues.html#variables-vs-type-aliases"
            )
            message = 'Variable "{}" is not valid as a type'
        elif isinstance(sym.node, (SYMBOL_FUNCBASE_TYPES, Decorator)):
            message = 'Function "{}" is not valid as a type'
            if name == "builtins.any":
                notes.append('Perhaps you meant "typing.Any" instead of "any"?')
            elif name == "builtins.callable":
                notes.append('Perhaps you meant "typing.Callable" instead of "callable"?')
            else:
                notes.append('Perhaps you need "Callable[...]" or a callback protocol?')
        elif isinstance(sym.node, MypyFile):
            message = 'Module "{}" is not valid as a type'
            notes.append("Perhaps you meant to use a protocol matching the module structure?")
        elif unbound_tvar:
            assert isinstance(sym.node, TypeVarLikeExpr)
            if sym.node.is_new_style:
                # PEP 695 type parameters are never considered unbound -- they are undefined
                # in contexts where they aren't valid, such as in argument default values.
                message = 'Name "{}" is not defined'
                name = name.split(".")[-1]
                error_code = codes.NAME_DEFINED
            else:
                message = 'Type variable "{}" is unbound'
                short = name.split(".")[-1]
                notes.append(
                    f'(Hint: Use "Generic[{short}]" or "Protocol[{short}]" base class'
                    f' to bind "{short}" inside a class)'
                )
                notes.append(
                    f'(Hint: Use "{short}" in function signature '
                    f'to bind "{short}" inside a function)'
                )
        else:
            message = 'Cannot interpret reference "{}" as a type'
        if not defining_literal:
            # Literal check already gives a custom error. Avoid duplicating errors.
            self.fail(message.format(name), t, code=error_code)
            for note in notes:
                self.note(note, t, code=error_code)

        # TODO: Would it be better to always return Any instead of UnboundType
        # in case of an error? On one hand, UnboundType has a name so error messages
        # are more detailed, on the other hand, some of them may be bogus,
        # see https://github.com/python/mypy/issues/4987.
        return t

    def visit_any(self, t: AnyType) -> Type:
        return t

    def visit_none_type(self, t: NoneType) -> Type:
        return t

    def visit_uninhabited_type(self, t: UninhabitedType) -> Type:
        return t

    def visit_erased_type(self, t: ErasedType) -> Type:
        # This type should exist only temporarily during type inference
        assert False, "Internal error: Unexpected erased type"

    def visit_deleted_type(self, t: DeletedType) -> Type:
        return t

    def visit_type_list(self, t: TypeList) -> Type:
        # Parameters literal (Z[[int, str, Whatever]])
        if self.allow_param_spec_literals:
            params = self.analyze_callable_args(t)
            if params:
                ts, kinds, names = params
                # bind these types
                return Parameters(self.anal_array(ts), kinds, names, line=t.line, column=t.column)
            else:
                return AnyType(TypeOfAny.from_error)
        else:
            self.fail(
                'Bracketed expression "[...]" is not valid as a type', t, code=codes.VALID_TYPE
            )
            if len(t.items) == 1:
                self.note('Did you mean "List[...]"?', t)
            return AnyType(TypeOfAny.from_error)

    def visit_callable_argument(self, t: CallableArgument) -> Type:
        self.fail("Invalid type", t, code=codes.VALID_TYPE)
        return AnyType(TypeOfAny.from_error)

    def visit_instance(self, t: Instance) -> Type:
        return t

    def visit_type_alias_type(self, t: TypeAliasType) -> Type:
        # TODO: should we do something here?
        return t

    def visit_type_var(self, t: TypeVarType) -> Type:
        return t

    def visit_param_spec(self, t: ParamSpecType) -> Type:
        return t

    def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type:
        return t

    def visit_unpack_type(self, t: UnpackType) -> Type:
        if not self.allow_unpack:
            self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE)
            return AnyType(TypeOfAny.from_error)
        self.allow_type_var_tuple = True
        result = UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax)
        self.allow_type_var_tuple = False
        return result

    def visit_parameters(self, t: Parameters) -> Type:
        raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars")

    def visit_callable_type(
        self, t: CallableType, nested: bool = True, namespace: str = ""
    ) -> Type:
        # Every Callable can bind its own type variables, if they're not in the outer scope
        # TODO: attach namespace for nested free type variables (these appear in return type only).
        with self.tvar_scope_frame(namespace=namespace):
            unpacked_kwargs = t.unpack_kwargs
            if self.defining_alias:
                variables = t.variables
            else:
                variables, _ = self.bind_function_type_variables(t, t)
            type_guard = self.anal_type_guard(t.ret_type)
            type_is = self.anal_type_is(t.ret_type)

            arg_kinds = t.arg_kinds
            arg_types = []
            param_spec_with_args = param_spec_with_kwargs = None
            param_spec_invalid = False
            for kind, ut in zip(arg_kinds, t.arg_types):
                if kind == ARG_STAR:
                    param_spec_with_args, at = self.anal_star_arg_type(ut, kind, nested=nested)
                elif kind == ARG_STAR2:
                    param_spec_with_kwargs, at = self.anal_star_arg_type(ut, kind, nested=nested)
                else:
                    if param_spec_with_args:
                        param_spec_invalid = True
                        self.fail(
                            "Arguments not allowed after ParamSpec.args", t, code=codes.VALID_TYPE
                        )
                    at = self.anal_type(ut, nested=nested, allow_unpack=False)
                arg_types.append(at)

            if nested and arg_types:
                # If we've got a Callable[[Unpack[SomeTypedDict]], None], make sure
                # Unpack is interpreted as `**` and not as `*`.
                last = arg_types[-1]
                if isinstance(last, UnpackType):
                    # TODO: it would be better to avoid this get_proper_type() call.
                    p_at = get_proper_type(last.type)
                    if isinstance(p_at, TypedDictType) and not last.from_star_syntax:
                        # Automatically detect Unpack[Foo] in Callable as backwards
                        # compatible syntax for **Foo, if Foo is a TypedDict.
                        arg_kinds[-1] = ARG_STAR2
                        arg_types[-1] = p_at
                        unpacked_kwargs = True
                arg_types = self.check_unpacks_in_list(arg_types)

            if not param_spec_invalid and param_spec_with_args != param_spec_with_kwargs:
                # If already invalid, do not report more errors - definition has
                # to be fixed anyway
                name = param_spec_with_args or param_spec_with_kwargs
                self.fail(
                    f'ParamSpec must have "*args" typed as "{name}.args" and "**kwargs" typed as "{name}.kwargs"',
                    t,
                    code=codes.VALID_TYPE,
                )
                param_spec_invalid = True

            if param_spec_invalid:
                if ARG_STAR in arg_kinds:
                    arg_types[arg_kinds.index(ARG_STAR)] = AnyType(TypeOfAny.from_error)
                if ARG_STAR2 in arg_kinds:
                    arg_types[arg_kinds.index(ARG_STAR2)] = AnyType(TypeOfAny.from_error)

            # If there were multiple (invalid) unpacks, the arg types list will become shorter,
            # we need to trim the kinds/names as well to avoid crashes.
            arg_kinds = t.arg_kinds[: len(arg_types)]
            arg_names = t.arg_names[: len(arg_types)]

            ret = t.copy_modified(
                arg_types=arg_types,
                arg_kinds=arg_kinds,
                arg_names=arg_names,
                ret_type=self.anal_type(t.ret_type, nested=nested),
                # If the fallback isn't filled in yet,
                # its type will be the falsey FakeInfo
                fallback=(t.fallback if t.fallback.type else self.named_type("builtins.function")),
                variables=self.anal_var_defs(variables),
                type_guard=type_guard,
                type_is=type_is,
                unpack_kwargs=unpacked_kwargs,
            )
        return ret

    def anal_type_guard(self, t: Type) -> Type | None:
        if isinstance(t, UnboundType):
            sym = self.lookup_qualified(t.name, t)
            if sym is not None and sym.node is not None:
                return self.anal_type_guard_arg(t, sym.node.fullname)
        # TODO: What if it's an Instance? Then use t.type.fullname?
        return None

    def anal_type_guard_arg(self, t: UnboundType, fullname: str) -> Type | None:
        if fullname in ("typing_extensions.TypeGuard", "typing.TypeGuard"):
            if len(t.args) != 1:
                self.fail(
                    "TypeGuard must have exactly one type argument", t, code=codes.VALID_TYPE
                )
                return AnyType(TypeOfAny.from_error)
            return self.anal_type(t.args[0])
        return None

    def anal_type_is(self, t: Type) -> Type | None:
        if isinstance(t, UnboundType):
            sym = self.lookup_qualified(t.name, t)
            if sym is not None and sym.node is not None:
                return self.anal_type_is_arg(t, sym.node.fullname)
        # TODO: What if it's an Instance? Then use t.type.fullname?
        return None

    def anal_type_is_arg(self, t: UnboundType, fullname: str) -> Type | None:
        if fullname in ("typing_extensions.TypeIs", "typing.TypeIs"):
            if len(t.args) != 1:
                self.fail("TypeIs must have exactly one type argument", t, code=codes.VALID_TYPE)
                return AnyType(TypeOfAny.from_error)
            return self.anal_type(t.args[0])
        return None

    def anal_star_arg_type(self, t: Type, kind: ArgKind, nested: bool) -> tuple[str | None, Type]:
        """Analyze signature argument type for *args and **kwargs argument."""
        if isinstance(t, UnboundType) and t.name and "." in t.name and not t.args:
            components = t.name.split(".")
            tvar_name = ".".join(components[:-1])
            sym = self.lookup_qualified(tvar_name, t)
            if sym is not None and isinstance(sym.node, ParamSpecExpr):
                tvar_def = self.tvar_scope.get_binding(sym)
                if isinstance(tvar_def, ParamSpecType):
                    if kind == ARG_STAR:
                        make_paramspec = paramspec_args
                        if components[-1] != "args":
                            self.fail(
                                f'Use "{tvar_name}.args" for variadic "*" parameter',
                                t,
                                code=codes.VALID_TYPE,
                            )
                    elif kind == ARG_STAR2:
                        make_paramspec = paramspec_kwargs
                        if components[-1] != "kwargs":
                            self.fail(
                                f'Use "{tvar_name}.kwargs" for variadic "**" parameter',
                                t,
                                code=codes.VALID_TYPE,
                            )
                    else:
                        assert False, kind
                    return tvar_name, make_paramspec(
                        tvar_def.name,
                        tvar_def.fullname,
                        tvar_def.id,
                        named_type_func=self.named_type,
                        line=t.line,
                        column=t.column,
                    )
        return None, self.anal_type(t, nested=nested, allow_unpack=True)

    def visit_overloaded(self, t: Overloaded) -> Type:
        # Overloaded types are manually constructed in semanal.py by analyzing the
        # AST and combining together the Callable types this visitor converts.
        #
        # So if we're ever asked to reanalyze an Overloaded type, we know it's
        # fine to just return it as-is.
        return t

    def visit_tuple_type(self, t: TupleType) -> Type:
        # Types such as (t1, t2, ...) only allowed in assignment statements. They'll
        # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead.
        if t.implicit and not self.allow_tuple_literal:
            self.fail("Syntax error in type annotation", t, code=codes.SYNTAX)
            if len(t.items) == 0:
                self.note(
                    "Suggestion: Use Tuple[()] instead of () for an empty tuple, or "
                    "None for a function without a return value",
                    t,
                    code=codes.SYNTAX,
                )
            elif len(t.items) == 1:
                self.note("Suggestion: Is there a spurious trailing comma?", t, code=codes.SYNTAX)
            else:
                self.note(
                    "Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn)",
                    t,
                    code=codes.SYNTAX,
                )
            return AnyType(TypeOfAny.from_error)

        any_type = AnyType(TypeOfAny.special_form)
        # If the fallback isn't filled in yet, its type will be the falsey FakeInfo
        fallback = (
            t.partial_fallback
            if t.partial_fallback.type
            else self.named_type("builtins.tuple", [any_type])
        )
        return TupleType(self.anal_array(t.items, allow_unpack=True), fallback, t.line)

    def visit_typeddict_type(self, t: TypedDictType) -> Type:
        req_keys = set()
        readonly_keys = set()
        items = {}
        for item_name, item_type in t.items.items():
            # TODO: rework
            analyzed = self.anal_type(item_type, allow_typed_dict_special_forms=True)
            if isinstance(analyzed, RequiredType):
                if analyzed.required:
                    req_keys.add(item_name)
                analyzed = analyzed.item
            else:
                # Keys are required by default.
                req_keys.add(item_name)
            if isinstance(analyzed, ReadOnlyType):
                readonly_keys.add(item_name)
                analyzed = analyzed.item
            items[item_name] = analyzed
        if t.fallback.type is MISSING_FALLBACK:  # anonymous/inline TypedDict
            if INLINE_TYPEDDICT not in self.options.enable_incomplete_feature:
                self.fail(
                    "Inline TypedDict is experimental,"
                    " must be enabled with --enable-incomplete-feature=InlineTypedDict",
                    t,
                )
            required_keys = req_keys
            fallback = self.named_type("typing._TypedDict")
            for typ in t.extra_items_from:
                analyzed = self.analyze_type(typ)
                p_analyzed = get_proper_type(analyzed)
                if not isinstance(p_analyzed, TypedDictType):
                    if not isinstance(p_analyzed, (AnyType, PlaceholderType)):
                        self.fail("Can only merge-in other TypedDict", t, code=codes.VALID_TYPE)
                    continue
                for sub_item_name, sub_item_type in p_analyzed.items.items():
                    if sub_item_name in items:
                        self.fail(TYPEDDICT_OVERRIDE_MERGE.format(sub_item_name), t)
                        continue
                    items[sub_item_name] = sub_item_type
                    if sub_item_name in p_analyzed.required_keys:
                        req_keys.add(sub_item_name)
                    if sub_item_name in p_analyzed.readonly_keys:
                        readonly_keys.add(sub_item_name)
        else:
            required_keys = t.required_keys
            fallback = t.fallback
        return TypedDictType(items, required_keys, readonly_keys, fallback, t.line, t.column)

    def visit_raw_expression_type(self, t: RawExpressionType) -> Type:
        # We should never see a bare Literal. We synthesize these raw literals
        # in the earlier stages of semantic analysis, but those
        # "fake literals" should always be wrapped in an UnboundType
        # corresponding to 'Literal'.
        #
        # Note: if at some point in the distant future, we decide to
        # make signatures like "foo(x: 20) -> None" legal, we can change
        # this method so it generates and returns an actual LiteralType
        # instead.

        if self.report_invalid_types:
            if t.base_type_name in ("builtins.int", "builtins.bool"):
                # The only time it makes sense to use an int or bool is inside of
                # a literal type.
                msg = f"Invalid type: try using Literal[{repr(t.literal_value)}] instead?"
            elif t.base_type_name in ("builtins.float", "builtins.complex"):
                # We special-case warnings for floats and complex numbers.
                msg = f"Invalid type: {t.simple_name()} literals cannot be used as a type"
            else:
                # And in all other cases, we default to a generic error message.
                # Note: the reason why we use a generic error message for strings
                # but not ints or bools is because whenever we see an out-of-place
                # string, it's unclear if the user meant to construct a literal type
                # or just misspelled a regular type. So we avoid guessing.
                msg = "Invalid type comment or annotation"

            self.fail(msg, t, code=codes.VALID_TYPE)
            if t.note is not None:
                self.note(t.note, t, code=codes.VALID_TYPE)

        return AnyType(TypeOfAny.from_error, line=t.line, column=t.column)

    def visit_literal_type(self, t: LiteralType) -> Type:
        return t

    def visit_union_type(self, t: UnionType) -> Type:
        if (
            t.uses_pep604_syntax is True
            and t.is_evaluated is True
            and not self.always_allow_new_syntax
            and not self.options.python_version >= (3, 10)
        ):
            self.fail("X | Y syntax for unions requires Python 3.10", t, code=codes.SYNTAX)
        return UnionType(self.anal_array(t.items), t.line, uses_pep604_syntax=t.uses_pep604_syntax)

    def visit_partial_type(self, t: PartialType) -> Type:
        assert False, "Internal error: Unexpected partial type"

    def visit_ellipsis_type(self, t: EllipsisType) -> Type:
        if self.allow_ellipsis or self.allow_param_spec_literals:
            any_type = AnyType(TypeOfAny.explicit)
            return Parameters(
                [any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None], is_ellipsis_args=True
            )
        else:
            self.fail('Unexpected "..."', t)
            return AnyType(TypeOfAny.from_error)

    def visit_type_type(self, t: TypeType) -> Type:
        return TypeType.make_normalized(self.anal_type(t.item), line=t.line)

    def visit_placeholder_type(self, t: PlaceholderType) -> Type:
        n = (
            None
            # No dot in fullname indicates we are at function scope, and recursive
            # types are not supported there anyway, so we just give up.
            if not t.fullname or "." not in t.fullname
            else self.api.lookup_fully_qualified(t.fullname)
        )
        if not n or isinstance(n.node, PlaceholderNode):
            self.api.defer()  # Still incomplete
            return t
        else:
            # TODO: Handle non-TypeInfo
            assert isinstance(n.node, TypeInfo)
            return self.analyze_type_with_type_info(n.node, t.args, t, False)

    def analyze_callable_args_for_paramspec(
        self, callable_args: Type, ret_type: Type, fallback: Instance
    ) -> CallableType | None:
        """Construct a 'Callable[P, RET]', where P is ParamSpec, return None if we cannot."""
        if not isinstance(callable_args, UnboundType):
            return None
        sym = self.lookup_qualified(callable_args.name, callable_args)
        if sym is None:
            return None
        tvar_def = self.tvar_scope.get_binding(sym)
        if not isinstance(tvar_def, ParamSpecType):
            if (
                tvar_def is None
                and self.allow_unbound_tvars
                and isinstance(sym.node, ParamSpecExpr)
            ):
                # We are analyzing this type in runtime context (e.g. as type application).
                # If it is not valid as a type in this position an error will be given later.
                return callable_with_ellipsis(
                    AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback
                )
            return None
        elif (
            self.defining_alias
            and self.not_declared_in_type_params(tvar_def.name)
            and tvar_def not in self.allowed_alias_tvars
        ):
            if self.python_3_12_type_alias:
                msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format(
                    f'"{tvar_def.name}"'
                )
            else:
                msg = f'ParamSpec "{tvar_def.name}" is not included in type_params'
            self.fail(msg, callable_args, code=codes.VALID_TYPE)
            return callable_with_ellipsis(
                AnyType(TypeOfAny.special_form), ret_type=ret_type, fallback=fallback
            )

        return CallableType(
            [
                paramspec_args(
                    tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type
                ),
                paramspec_kwargs(
                    tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type
                ),
            ],
            [nodes.ARG_STAR, nodes.ARG_STAR2],
            [None, None],
            ret_type=ret_type,
            fallback=fallback,
        )

    def analyze_callable_args_for_concatenate(
        self, callable_args: Type, ret_type: Type, fallback: Instance
    ) -> CallableType | AnyType | None:
        """Construct a 'Callable[C, RET]', where C is Concatenate[..., P], returning None if we
        cannot.
        """
        if not isinstance(callable_args, UnboundType):
            return None
        sym = self.lookup_qualified(callable_args.name, callable_args)
        if sym is None:
            return None
        if sym.node is None:
            return None
        if sym.node.fullname not in CONCATENATE_TYPE_NAMES:
            return None

        tvar_def = self.anal_type(callable_args, allow_param_spec=True)
        if not isinstance(tvar_def, (ParamSpecType, Parameters)):
            if self.allow_unbound_tvars and isinstance(tvar_def, UnboundType):
                sym = self.lookup_qualified(tvar_def.name, callable_args)
                if sym is not None and isinstance(sym.node, ParamSpecExpr):
                    # We are analyzing this type in runtime context (e.g. as type application).
                    # If it is not valid as a type in this position an error will be given later.
                    return callable_with_ellipsis(
                        AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback
                    )
            # Error was already given, so prevent further errors.
            return AnyType(TypeOfAny.from_error)
        if isinstance(tvar_def, Parameters):
            # This comes from Concatenate[int, ...]
            return CallableType(
                arg_types=tvar_def.arg_types,
                arg_names=tvar_def.arg_names,
                arg_kinds=tvar_def.arg_kinds,
                ret_type=ret_type,
                fallback=fallback,
                from_concatenate=True,
            )

        # ick, CallableType should take ParamSpecType
        prefix = tvar_def.prefix
        # we don't set the prefix here as generic arguments will get updated at some point
        # in the future. CallableType.param_spec() accounts for this.
        return CallableType(
            [
                *prefix.arg_types,
                paramspec_args(
                    tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type
                ),
                paramspec_kwargs(
                    tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type
                ),
            ],
            [*prefix.arg_kinds, nodes.ARG_STAR, nodes.ARG_STAR2],
            [*prefix.arg_names, None, None],
            ret_type=ret_type,
            fallback=fallback,
            from_concatenate=True,
        )

    def analyze_callable_type(self, t: UnboundType) -> Type:
        fallback = self.named_type("builtins.function")
        if len(t.args) == 0:
            # Callable (bare). Treat as Callable[..., Any].
            any_type = self.get_omitted_any(t)
            ret = callable_with_ellipsis(any_type, any_type, fallback)
        elif len(t.args) == 2:
            callable_args = t.args[0]
            ret_type = t.args[1]
            if isinstance(callable_args, TypeList):
                # Callable[[ARG, ...], RET] (ordinary callable type)
                analyzed_args = self.analyze_callable_args(callable_args)
                if analyzed_args is None:
                    return AnyType(TypeOfAny.from_error)
                args, kinds, names = analyzed_args
                ret = CallableType(args, kinds, names, ret_type=ret_type, fallback=fallback)
            elif isinstance(callable_args, EllipsisType):
                # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments)
                ret = callable_with_ellipsis(
                    AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback
                )
            else:
                # Callable[P, RET] (where P is ParamSpec)
                with self.tvar_scope_frame(namespace=""):
                    # Temporarily bind ParamSpecs to allow code like this:
                    #     my_fun: Callable[Q, Foo[Q]]
                    # We usually do this later in visit_callable_type(), but the analysis
                    # below happens at very early stage.
                    variables = []
                    for name, tvar_expr in self.find_type_var_likes(callable_args):
                        variables.append(self.tvar_scope.bind_new(name, tvar_expr))
                    maybe_ret = self.analyze_callable_args_for_paramspec(
                        callable_args, ret_type, fallback
                    ) or self.analyze_callable_args_for_concatenate(
                        callable_args, ret_type, fallback
                    )
                    if isinstance(maybe_ret, CallableType):
                        maybe_ret = maybe_ret.copy_modified(variables=variables)
                if maybe_ret is None:
                    # Callable[?, RET] (where ? is something invalid)
                    self.fail(
                        "The first argument to Callable must be a "
                        'list of types, parameter specification, or "..."',
                        t,
                        code=codes.VALID_TYPE,
                    )
                    self.note(
                        "See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas",
                        t,
                    )
                    return AnyType(TypeOfAny.from_error)
                elif isinstance(maybe_ret, AnyType):
                    return maybe_ret
                ret = maybe_ret
        else:
            if self.options.disallow_any_generics:
                self.fail('Please use "Callable[[<parameters>], <return type>]"', t)
            else:
                self.fail('Please use "Callable[[<parameters>], <return type>]" or "Callable"', t)
            return AnyType(TypeOfAny.from_error)
        assert isinstance(ret, CallableType)
        return ret.accept(self)

    def refers_to_full_names(self, arg: UnboundType, names: Sequence[str]) -> bool:
        sym = self.lookup_qualified(arg.name, arg)
        if sym is not None:
            if sym.fullname in names:
                return True
        return False

    def analyze_callable_args(
        self, arglist: TypeList
    ) -> tuple[list[Type], list[ArgKind], list[str | None]] | None:
        args: list[Type] = []
        kinds: list[ArgKind] = []
        names: list[str | None] = []
        seen_unpack = False
        unpack_types: list[Type] = []
        invalid_unpacks: list[Type] = []
        second_unpack_last = False
        for i, arg in enumerate(arglist.items):
            if isinstance(arg, CallableArgument):
                args.append(arg.typ)
                names.append(arg.name)
                if arg.constructor is None:
                    return None
                found = self.lookup_qualified(arg.constructor, arg)
                if found is None:
                    # Looking it up already put an error message in
                    return None
                elif found.fullname not in ARG_KINDS_BY_CONSTRUCTOR:
                    self.fail(f'Invalid argument constructor "{found.fullname}"', arg)
                    return None
                else:
                    assert found.fullname is not None
                    kind = ARG_KINDS_BY_CONSTRUCTOR[found.fullname]
                    kinds.append(kind)
                    if arg.name is not None and kind.is_star():
                        self.fail(f"{arg.constructor} arguments should not have names", arg)
                        return None
            elif (
                isinstance(arg, UnboundType)
                and self.refers_to_full_names(arg, UNPACK_TYPE_NAMES)
                or isinstance(arg, UnpackType)
            ):
                if seen_unpack:
                    # Multiple unpacks, preserve them, so we can give an error later.
                    if i == len(arglist.items) - 1 and not invalid_unpacks:
                        # Special case: if there are just two unpacks, and the second one appears
                        # as last type argument, it can be still valid, if the second unpacked type
                        # is a TypedDict. This should be checked by the caller.
                        second_unpack_last = True
                    invalid_unpacks.append(arg)
                    continue
                seen_unpack = True
                unpack_types.append(arg)
            else:
                if seen_unpack:
                    unpack_types.append(arg)
                else:
                    args.append(arg)
                    kinds.append(ARG_POS)
                    names.append(None)
        if seen_unpack:
            if len(unpack_types) == 1:
                args.append(unpack_types[0])
            else:
                first = unpack_types[0]
                if isinstance(first, UnpackType):
                    # UnpackType doesn't have its own line/column numbers,
                    # so use the unpacked type for error messages.
                    first = first.type
                args.append(
                    UnpackType(self.tuple_type(unpack_types, line=first.line, column=first.column))
                )
            kinds.append(ARG_STAR)
            names.append(None)
        for arg in invalid_unpacks:
            args.append(arg)
            kinds.append(ARG_STAR2 if second_unpack_last else ARG_STAR)
            names.append(None)
        # Note that arglist below is only used for error context.
        check_arg_names(names, [arglist] * len(args), self.fail, "Callable")
        check_arg_kinds(kinds, [arglist] * len(args), self.fail)
        return args, kinds, names

    def analyze_literal_type(self, t: UnboundType) -> Type:
        if len(t.args) == 0:
            self.fail("Literal[...] must have at least one parameter", t, code=codes.VALID_TYPE)
            return AnyType(TypeOfAny.from_error)

        output: list[Type] = []
        for i, arg in enumerate(t.args):
            analyzed_types = self.analyze_literal_param(i + 1, arg, t)
            if analyzed_types is None:
                return AnyType(TypeOfAny.from_error)
            else:
                output.extend(analyzed_types)
        return UnionType.make_union(output, line=t.line)

    def analyze_literal_param(self, idx: int, arg: Type, ctx: Context) -> list[Type] | None:
        # This UnboundType was originally defined as a string.
        if (
            isinstance(arg, ProperType)
            and isinstance(arg, (UnboundType, UnionType))
            and arg.original_str_expr is not None
        ):
            assert arg.original_str_fallback is not None
            return [
                LiteralType(
                    value=arg.original_str_expr,
                    fallback=self.named_type(arg.original_str_fallback),
                    line=arg.line,
                    column=arg.column,
                )
            ]

        # If arg is an UnboundType that was *not* originally defined as
        # a string, try expanding it in case it's a type alias or something.
        if isinstance(arg, UnboundType):
            self.nesting_level += 1
            try:
                arg = self.visit_unbound_type(arg, defining_literal=True)
            finally:
                self.nesting_level -= 1

        # Literal[...] cannot contain Any. Give up and add an error message
        # (if we haven't already).
        arg = get_proper_type(arg)
        if isinstance(arg, AnyType):
            # Note: We can encounter Literals containing 'Any' under three circumstances:
            #
            # 1. If the user attempts use an explicit Any as a parameter
            # 2. If the user is trying to use an enum value imported from a module with
            #    no type hints, giving it an implicit type of 'Any'
            # 3. If there's some other underlying problem with the parameter.
            #
            # We report an error in only the first two cases. In the third case, we assume
            # some other region of the code has already reported a more relevant error.
            #
            # TODO: Once we start adding support for enums, make sure we report a custom
            # error for case 2 as well.
            if arg.type_of_any not in (TypeOfAny.from_error, TypeOfAny.special_form):
                self.fail(
                    f'Parameter {idx} of Literal[...] cannot be of type "Any"',
                    ctx,
                    code=codes.VALID_TYPE,
                )
            return None
        elif isinstance(arg, RawExpressionType):
            # A raw literal. Convert it directly into a literal if we can.
            if arg.literal_value is None:
                name = arg.simple_name()
                if name in ("float", "complex"):
                    msg = f'Parameter {idx} of Literal[...] cannot be of type "{name}"'
                else:
                    msg = "Invalid type: Literal[...] cannot contain arbitrary expressions"
                self.fail(msg, ctx, code=codes.VALID_TYPE)
                # Note: we deliberately ignore arg.note here: the extra info might normally be
                # helpful, but it generally won't make sense in the context of a Literal[...].
                return None

            # Remap bytes and unicode into the appropriate type for the correct Python version
            fallback = self.named_type(arg.base_type_name)
            assert isinstance(fallback, Instance)
            return [LiteralType(arg.literal_value, fallback, line=arg.line, column=arg.column)]
        elif isinstance(arg, (NoneType, LiteralType)):
            # Types that we can just add directly to the literal/potential union of literals.
            return [arg]
        elif isinstance(arg, Instance) and arg.last_known_value is not None:
            # Types generated from declarations like "var: Final = 4".
            return [arg.last_known_value]
        elif isinstance(arg, UnionType):
            out = []
            for union_arg in arg.items:
                union_result = self.analyze_literal_param(idx, union_arg, ctx)
                if union_result is None:
                    return None
                out.extend(union_result)
            return out
        else:
            self.fail(f"Parameter {idx} of Literal[...] is invalid", ctx, code=codes.VALID_TYPE)
            return None

    def analyze_type(self, typ: Type) -> Type:
        return typ.accept(self)

    def fail(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None:
        self.fail_func(msg, ctx, code=code)

    def note(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None:
        self.note_func(msg, ctx, code=code)

    @contextmanager
    def tvar_scope_frame(self, namespace: str) -> Iterator[None]:
        old_scope = self.tvar_scope
        self.tvar_scope = self.tvar_scope.method_frame(namespace)
        yield
        self.tvar_scope = old_scope

    def find_type_var_likes(self, t: Type) -> TypeVarLikeList:
        visitor = FindTypeVarVisitor(self.api, self.tvar_scope)
        t.accept(visitor)
        return visitor.type_var_likes

    def infer_type_variables(
        self, type: CallableType
    ) -> tuple[list[tuple[str, TypeVarLikeExpr]], bool]:
        """Infer type variables from a callable.

        Return tuple with these items:
         - list of unique type variables referred to in a callable
         - whether there is a reference to the Self type
        """
        visitor = FindTypeVarVisitor(self.api, self.tvar_scope)
        for arg in type.arg_types:
            arg.accept(visitor)

        # When finding type variables in the return type of a function, don't
        # look inside Callable types.  Type variables only appearing in
        # functions in the return type belong to those functions, not the
        # function we're currently analyzing.
        visitor.include_callables = False
        type.ret_type.accept(visitor)

        return visitor.type_var_likes, visitor.has_self_type

    def bind_function_type_variables(
        self, fun_type: CallableType, defn: Context
    ) -> tuple[Sequence[TypeVarLikeType], bool]:
        """Find the type variables of the function type and bind them in our tvar_scope"""
        has_self_type = False
        if fun_type.variables:
            defs = []
            for var in fun_type.variables:
                if self.api.type and self.api.type.self_type and var == self.api.type.self_type:
                    has_self_type = True
                    continue
                var_node = self.lookup_qualified(var.name, defn)
                assert var_node, "Binding for function type variable not found within function"
                var_expr = var_node.node
                assert isinstance(var_expr, TypeVarLikeExpr)
                binding = self.tvar_scope.bind_new(var.name, var_expr)
                defs.append(binding)
            return defs, has_self_type
        typevars, has_self_type = self.infer_type_variables(fun_type)
        # Do not define a new type variable if already defined in scope.
        typevars = [
            (name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn)
        ]
        defs = []
        for name, tvar in typevars:
            if not self.tvar_scope.allow_binding(tvar.fullname):
                self.fail(
                    f'Type variable "{name}" is bound by an outer class',
                    defn,
                    code=codes.VALID_TYPE,
                )
            binding = self.tvar_scope.bind_new(name, tvar)
            defs.append(binding)

        return defs, has_self_type

    def is_defined_type_var(self, tvar: str, context: Context) -> bool:
        tvar_node = self.lookup_qualified(tvar, context)
        if not tvar_node:
            return False
        return self.tvar_scope.get_binding(tvar_node) is not None

    def anal_array(
        self,
        a: Iterable[Type],
        nested: bool = True,
        *,
        allow_param_spec: bool = False,
        allow_param_spec_literals: bool = False,
        allow_unpack: bool = False,
    ) -> list[Type]:
        old_allow_param_spec_literals = self.allow_param_spec_literals
        self.allow_param_spec_literals = allow_param_spec_literals
        res: list[Type] = []
        for t in a:
            res.append(
                self.anal_type(
                    t, nested, allow_param_spec=allow_param_spec, allow_unpack=allow_unpack
                )
            )
        self.allow_param_spec_literals = old_allow_param_spec_literals
        return self.check_unpacks_in_list(res)

    def anal_type(
        self,
        t: Type,
        nested: bool = True,
        *,
        allow_param_spec: bool = False,
        allow_unpack: bool = False,
        allow_ellipsis: bool = False,
        allow_typed_dict_special_forms: bool = False,
        allow_final: bool = False,
    ) -> Type:
        if nested:
            self.nesting_level += 1
        old_allow_typed_dict_special_forms = self.allow_typed_dict_special_forms
        self.allow_typed_dict_special_forms = allow_typed_dict_special_forms
        self.allow_final = allow_final
        old_allow_ellipsis = self.allow_ellipsis
        self.allow_ellipsis = allow_ellipsis
        old_allow_unpack = self.allow_unpack
        self.allow_unpack = allow_unpack
        try:
            analyzed = t.accept(self)
        finally:
            if nested:
                self.nesting_level -= 1
            self.allow_typed_dict_special_forms = old_allow_typed_dict_special_forms
            self.allow_ellipsis = old_allow_ellipsis
            self.allow_unpack = old_allow_unpack
        if (
            not allow_param_spec
            and isinstance(analyzed, ParamSpecType)
            and analyzed.flavor == ParamSpecFlavor.BARE
        ):
            if analyzed.prefix.arg_types:
                self.fail("Invalid location for Concatenate", t, code=codes.VALID_TYPE)
                self.note("You can use Concatenate as the first argument to Callable", t)
                analyzed = AnyType(TypeOfAny.from_error)
            else:
                self.fail(
                    INVALID_PARAM_SPEC_LOCATION.format(format_type(analyzed, self.options)),
                    t,
                    code=codes.VALID_TYPE,
                )
                self.note(
                    INVALID_PARAM_SPEC_LOCATION_NOTE.format(analyzed.name),
                    t,
                    code=codes.VALID_TYPE,
                )
                analyzed = AnyType(TypeOfAny.from_error)
        return analyzed

    def anal_var_def(self, var_def: TypeVarLikeType) -> TypeVarLikeType:
        if isinstance(var_def, TypeVarType):
            return TypeVarType(
                name=var_def.name,
                fullname=var_def.fullname,
                id=var_def.id,
                values=self.anal_array(var_def.values),
                upper_bound=var_def.upper_bound.accept(self),
                default=var_def.default.accept(self),
                variance=var_def.variance,
                line=var_def.line,
                column=var_def.column,
            )
        else:
            return var_def

    def anal_var_defs(self, var_defs: Sequence[TypeVarLikeType]) -> list[TypeVarLikeType]:
        return [self.anal_var_def(vd) for vd in var_defs]

    def named_type(
        self, fullname: str, args: list[Type] | None = None, line: int = -1, column: int = -1
    ) -> Instance:
        node = self.lookup_fully_qualified(fullname)
        assert isinstance(node.node, TypeInfo)
        any_type = AnyType(TypeOfAny.special_form)
        if args is not None:
            args = self.check_unpacks_in_list(args)
        return Instance(
            node.node, args or [any_type] * len(node.node.defn.type_vars), line=line, column=column
        )

    def check_unpacks_in_list(self, items: list[Type]) -> list[Type]:
        new_items: list[Type] = []
        num_unpacks = 0
        final_unpack = None
        for item in items:
            # TODO: handle forward references here, they appear as Unpack[Any].
            if isinstance(item, UnpackType) and not isinstance(
                get_proper_type(item.type), TupleType
            ):
                if not num_unpacks:
                    new_items.append(item)
                num_unpacks += 1
                final_unpack = item
            else:
                new_items.append(item)

        if num_unpacks > 1:
            assert final_unpack is not None
            self.fail("More than one Unpack in a type is not allowed", final_unpack.type)
        return new_items

    def tuple_type(self, items: list[Type], line: int, column: int) -> TupleType:
        any_type = AnyType(TypeOfAny.special_form)
        return TupleType(
            items, fallback=self.named_type("builtins.tuple", [any_type]), line=line, column=column
        )


TypeVarLikeList = list[tuple[str, TypeVarLikeExpr]]


class MsgCallback(Protocol):
    def __call__(self, __msg: str, __ctx: Context, *, code: ErrorCode | None = None) -> None: ...


def get_omitted_any(
    disallow_any: bool,
    fail: MsgCallback,
    note: MsgCallback,
    orig_type: Type,
    options: Options,
    fullname: str | None = None,
    unexpanded_type: Type | None = None,
) -> AnyType:
    if disallow_any:
        nongen_builtins = get_nongen_builtins(options.python_version)
        if fullname in nongen_builtins:
            typ = orig_type
            # We use a dedicated error message for builtin generics (as the most common case).
            alternative = nongen_builtins[fullname]
            fail(
                message_registry.IMPLICIT_GENERIC_ANY_BUILTIN.format(alternative),
                typ,
                code=codes.TYPE_ARG,
            )
        else:
            typ = unexpanded_type or orig_type
            type_str = typ.name if isinstance(typ, UnboundType) else format_type_bare(typ, options)

            fail(
                message_registry.BARE_GENERIC.format(quote_type_string(type_str)),
                typ,
                code=codes.TYPE_ARG,
            )
            base_type = get_proper_type(orig_type)
            base_fullname = (
                base_type.type.fullname if isinstance(base_type, Instance) else fullname
            )
            # Ideally, we'd check whether the type is quoted or `from __future__ annotations`
            # is set before issuing this note
            if (
                options.python_version < (3, 9)
                and base_fullname in GENERIC_STUB_NOT_AT_RUNTIME_TYPES
            ):
                # Recommend `from __future__ import annotations` or to put type in quotes
                # (string literal escaping) for classes not generic at runtime
                note(
                    "Subscripting classes that are not generic at runtime may require "
                    "escaping, see https://mypy.readthedocs.io/en/stable/runtime_troubles.html"
                    "#not-generic-runtime",
                    typ,
                    code=codes.TYPE_ARG,
                )

        any_type = AnyType(TypeOfAny.from_error, line=typ.line, column=typ.column)
    else:
        any_type = AnyType(
            TypeOfAny.from_omitted_generics, line=orig_type.line, column=orig_type.column
        )
    return any_type


def fix_type_var_tuple_argument(t: Instance) -> None:
    if t.type.has_type_var_tuple_type:
        args = list(t.args)
        assert t.type.type_var_tuple_prefix is not None
        tvt = t.type.defn.type_vars[t.type.type_var_tuple_prefix]
        assert isinstance(tvt, TypeVarTupleType)
        args[t.type.type_var_tuple_prefix] = UnpackType(
            Instance(tvt.tuple_fallback.type, [args[t.type.type_var_tuple_prefix]])
        )
        t.args = tuple(args)


def fix_instance(
    t: Instance,
    fail: MsgCallback,
    note: MsgCallback,
    disallow_any: bool,
    options: Options,
    use_generic_error: bool = False,
    unexpanded_type: Type | None = None,
) -> None:
    """Fix a malformed instance by replacing all type arguments with TypeVar default or Any.

    Also emit a suitable error if this is not due to implicit Any's.
    """
    arg_count = len(t.args)
    min_tv_count = sum(not tv.has_default() for tv in t.type.defn.type_vars)
    max_tv_count = len(t.type.type_vars)
    if arg_count < min_tv_count or arg_count > max_tv_count:
        # Don't use existing args if arg_count doesn't match
        if arg_count > max_tv_count:
            # Already wrong arg count error, don't emit missing type parameters error as well.
            disallow_any = False
        t.args = ()
        arg_count = 0

    args: list[Type] = [*(t.args[:max_tv_count])]
    any_type: AnyType | None = None
    env: dict[TypeVarId, Type] = {}

    for tv, arg in itertools.zip_longest(t.type.defn.type_vars, t.args, fillvalue=None):
        if tv is None:
            continue
        if arg is None:
            if tv.has_default():
                arg = tv.default
            else:
                if any_type is None:
                    fullname = None if use_generic_error else t.type.fullname
                    any_type = get_omitted_any(
                        disallow_any, fail, note, t, options, fullname, unexpanded_type
                    )
                arg = any_type
            args.append(arg)
        env[tv.id] = arg
    t.args = tuple(args)
    fix_type_var_tuple_argument(t)
    if not t.type.has_type_var_tuple_type:
        with state.strict_optional_set(options.strict_optional):
            fixed = expand_type(t, env)
        assert isinstance(fixed, Instance)
        t.args = fixed.args


def instantiate_type_alias(
    node: TypeAlias,
    args: list[Type],
    fail: MsgCallback,
    no_args: bool,
    ctx: Context,
    options: Options,
    *,
    unexpanded_type: Type | None = None,
    disallow_any: bool = False,
    use_standard_error: bool = False,
    empty_tuple_index: bool = False,
) -> Type:
    """Create an instance of a (generic) type alias from alias node and type arguments.

    We are following the rules outlined in TypeAlias docstring.
    Here:
        node: type alias node (definition)
        args: type arguments (types to be substituted in place of type variables
              when expanding the alias)
        fail: error reporter callback
        no_args: whether original definition used a bare generic `A = List`
        ctx: context where expansion happens
        unexpanded_type, disallow_any, use_standard_error: used to customize error messages
    """
    # Type aliases are special, since they can be expanded during semantic analysis,
    # so we need to normalize them as soon as possible.
    # TODO: can this cause an infinite recursion?
    args = flatten_nested_tuples(args)
    if any(unknown_unpack(a) for a in args):
        # This type is not ready to be validated, because of unknown total count.
        # Note that we keep the kind of Any for consistency.
        return set_any_tvars(node, [], ctx.line, ctx.column, options, special_form=True)

    max_tv_count = len(node.alias_tvars)
    act_len = len(args)
    if (
        max_tv_count > 0
        and act_len == 0
        and not (empty_tuple_index and node.tvar_tuple_index is not None)
    ):
        # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...]
        return set_any_tvars(
            node,
            args,
            ctx.line,
            ctx.column,
            options,
            disallow_any=disallow_any,
            fail=fail,
            unexpanded_type=unexpanded_type,
        )
    if max_tv_count == 0 and act_len == 0:
        if no_args:
            assert isinstance(node.target, Instance)  # type: ignore[misc]
            # Note: this is the only case where we use an eager expansion. See more info about
            # no_args aliases like L = List in the docstring for TypeAlias class.
            return Instance(node.target.type, [], line=ctx.line, column=ctx.column)
        return TypeAliasType(node, [], line=ctx.line, column=ctx.column)
    if (
        max_tv_count == 0
        and act_len > 0
        and isinstance(node.target, Instance)  # type: ignore[misc]
        and no_args
    ):
        tp = Instance(node.target.type, args)
        tp.line = ctx.line
        tp.column = ctx.column
        tp.end_line = ctx.end_line
        tp.end_column = ctx.end_column
        return tp
    if node.tvar_tuple_index is None:
        if any(isinstance(a, UnpackType) for a in args):
            # A variadic unpack in fixed size alias (fixed unpacks must be flattened by the caller)
            fail(message_registry.INVALID_UNPACK_POSITION, ctx, code=codes.VALID_TYPE)
            return set_any_tvars(node, [], ctx.line, ctx.column, options, from_error=True)
        min_tv_count = sum(not tv.has_default() for tv in node.alias_tvars)
        fill_typevars = act_len != max_tv_count
        correct = min_tv_count <= act_len <= max_tv_count
    else:
        min_tv_count = sum(
            not tv.has_default() and not isinstance(tv, TypeVarTupleType)
            for tv in node.alias_tvars
        )
        correct = act_len >= min_tv_count
        for a in args:
            if isinstance(a, UnpackType):
                unpacked = get_proper_type(a.type)
                if isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple":
                    # Variadic tuple is always correct.
                    correct = True
        fill_typevars = not correct
    if fill_typevars:
        if not correct:
            if use_standard_error:
                # This is used if type alias is an internal representation of another type,
                # for example a generic TypedDict or NamedTuple.
                msg = wrong_type_arg_count(max_tv_count, max_tv_count, str(act_len), node.name)
            else:
                if node.tvar_tuple_index is not None:
                    msg = (
                        "Bad number of arguments for type alias,"
                        f" expected at least {min_tv_count}, given {act_len}"
                    )
                elif min_tv_count != max_tv_count:
                    msg = (
                        "Bad number of arguments for type alias,"
                        f" expected between {min_tv_count} and {max_tv_count}, given {act_len}"
                    )
                else:
                    msg = (
                        "Bad number of arguments for type alias,"
                        f" expected {min_tv_count}, given {act_len}"
                    )
            fail(msg, ctx, code=codes.TYPE_ARG)
            args = []
        return set_any_tvars(node, args, ctx.line, ctx.column, options, from_error=True)
    elif node.tvar_tuple_index is not None:
        # We also need to check if we are not performing a type variable tuple split.
        unpack = find_unpack_in_list(args)
        if unpack is not None:
            unpack_arg = args[unpack]
            assert isinstance(unpack_arg, UnpackType)
            if isinstance(unpack_arg.type, TypeVarTupleType):
                exp_prefix = node.tvar_tuple_index
                act_prefix = unpack
                exp_suffix = len(node.alias_tvars) - node.tvar_tuple_index - 1
                act_suffix = len(args) - unpack - 1
                if act_prefix < exp_prefix or act_suffix < exp_suffix:
                    fail("TypeVarTuple cannot be split", ctx, code=codes.TYPE_ARG)
                    return set_any_tvars(node, [], ctx.line, ctx.column, options, from_error=True)
    # TODO: we need to check args validity w.r.t alias.alias_tvars.
    # Otherwise invalid instantiations will be allowed in runtime context.
    # Note: in type context, these will be still caught by semanal_typeargs.
    typ = TypeAliasType(node, args, ctx.line, ctx.column)
    assert typ.alias is not None
    # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here.
    if (
        isinstance(typ.alias.target, Instance)  # type: ignore[misc]
        and typ.alias.target.type.fullname == "mypy_extensions.FlexibleAlias"
    ):
        exp = get_proper_type(typ)
        assert isinstance(exp, Instance)
        return exp.args[-1]
    return typ


def set_any_tvars(
    node: TypeAlias,
    args: list[Type],
    newline: int,
    newcolumn: int,
    options: Options,
    *,
    from_error: bool = False,
    disallow_any: bool = False,
    special_form: bool = False,
    fail: MsgCallback | None = None,
    unexpanded_type: Type | None = None,
) -> TypeAliasType:
    if from_error or disallow_any:
        type_of_any = TypeOfAny.from_error
    elif special_form:
        type_of_any = TypeOfAny.special_form
    else:
        type_of_any = TypeOfAny.from_omitted_generics
    any_type = AnyType(type_of_any, line=newline, column=newcolumn)

    env: dict[TypeVarId, Type] = {}
    used_any_type = False
    has_type_var_tuple_type = False
    for tv, arg in itertools.zip_longest(node.alias_tvars, args, fillvalue=None):
        if tv is None:
            continue
        if arg is None:
            if tv.has_default():
                arg = tv.default
            else:
                arg = any_type
                used_any_type = True
            if isinstance(tv, TypeVarTupleType):
                # TODO Handle TypeVarTuple defaults
                has_type_var_tuple_type = True
                arg = UnpackType(Instance(tv.tuple_fallback.type, [any_type]))
            args.append(arg)
        env[tv.id] = arg
    t = TypeAliasType(node, args, newline, newcolumn)
    if not has_type_var_tuple_type:
        with state.strict_optional_set(options.strict_optional):
            fixed = expand_type(t, env)
        assert isinstance(fixed, TypeAliasType)
        t.args = fixed.args

    if used_any_type and disallow_any and node.alias_tvars:
        assert fail is not None
        if unexpanded_type:
            type_str = (
                unexpanded_type.name
                if isinstance(unexpanded_type, UnboundType)
                else format_type_bare(unexpanded_type, options)
            )
        else:
            type_str = node.name

        fail(
            message_registry.BARE_GENERIC.format(quote_type_string(type_str)),
            Context(newline, newcolumn),
            code=codes.TYPE_ARG,
        )
    return t


class DivergingAliasDetector(TrivialSyntheticTypeTranslator):
    """See docstring of detect_diverging_alias() for details."""

    # TODO: this doesn't really need to be a translator, but we don't have a trivial visitor.
    def __init__(
        self,
        seen_nodes: set[TypeAlias],
        lookup: Callable[[str, Context], SymbolTableNode | None],
        scope: TypeVarLikeScope,
    ) -> None:
        super().__init__()
        self.seen_nodes = seen_nodes
        self.lookup = lookup
        self.scope = scope
        self.diverging = False

    def visit_type_alias_type(self, t: TypeAliasType) -> Type:
        assert t.alias is not None, f"Unfixed type alias {t.type_ref}"
        if t.alias in self.seen_nodes:
            for arg in t.args:
                if not (
                    isinstance(arg, TypeVarLikeType)
                    or isinstance(arg, UnpackType)
                    and isinstance(arg.type, TypeVarLikeType)
                ) and has_type_vars(arg):
                    self.diverging = True
                    return t
            # All clear for this expansion chain.
            return t
        new_nodes = self.seen_nodes | {t.alias}
        visitor = DivergingAliasDetector(new_nodes, self.lookup, self.scope)
        _ = get_proper_type(t).accept(visitor)
        if visitor.diverging:
            self.diverging = True
        return t


def detect_diverging_alias(
    node: TypeAlias,
    target: Type,
    lookup: Callable[[str, Context], SymbolTableNode | None],
    scope: TypeVarLikeScope,
) -> bool:
    """This detects type aliases that will diverge during type checking.

    For example F = Something[..., F[List[T]]]. At each expansion step this will produce
    *new* type aliases: e.g. F[List[int]], F[List[List[int]]], etc. So we can't detect
    recursion. It is a known problem in the literature, recursive aliases and generic types
    don't always go well together. It looks like there is no known systematic solution yet.

    # TODO: should we handle such aliases using type_recursion counter and some large limit?
    They may be handy in rare cases, e.g. to express a union of non-mixed nested lists:
    Nested = Union[T, Nested[List[T]]] ~> Union[T, List[T], List[List[T]], ...]
    """
    visitor = DivergingAliasDetector({node}, lookup, scope)
    _ = target.accept(visitor)
    return visitor.diverging


def check_for_explicit_any(
    typ: Type | None,
    options: Options,
    is_typeshed_stub: bool,
    msg: MessageBuilder,
    context: Context,
) -> None:
    if options.disallow_any_explicit and not is_typeshed_stub and typ and has_explicit_any(typ):
        msg.explicit_any(context)


def has_explicit_any(t: Type) -> bool:
    """
    Whether this type is or type it contains is an Any coming from explicit type annotation
    """
    return t.accept(HasExplicitAny())


class HasExplicitAny(TypeQuery[bool]):
    def __init__(self) -> None:
        super().__init__(any)

    def visit_any(self, t: AnyType) -> bool:
        return t.type_of_any == TypeOfAny.explicit

    def visit_typeddict_type(self, t: TypedDictType) -> bool:
        # typeddict is checked during TypedDict declaration, so don't typecheck it here.
        return False


def has_any_from_unimported_type(t: Type) -> bool:
    """Return true if this type is Any because an import was not followed.

    If type t is such Any type or has type arguments that contain such Any type
    this function will return true.
    """
    return t.accept(HasAnyFromUnimportedType())


class HasAnyFromUnimportedType(BoolTypeQuery):
    def __init__(self) -> None:
        super().__init__(ANY_STRATEGY)

    def visit_any(self, t: AnyType) -> bool:
        return t.type_of_any == TypeOfAny.from_unimported_type

    def visit_typeddict_type(self, t: TypedDictType) -> bool:
        # typeddict is checked during TypedDict declaration, so don't typecheck it here
        return False


def collect_all_inner_types(t: Type) -> list[Type]:
    """
    Return all types that `t` contains
    """
    return t.accept(CollectAllInnerTypesQuery())


class CollectAllInnerTypesQuery(TypeQuery[list[Type]]):
    def __init__(self) -> None:
        super().__init__(self.combine_lists_strategy)

    def query_types(self, types: Iterable[Type]) -> list[Type]:
        return self.strategy([t.accept(self) for t in types]) + list(types)

    @classmethod
    def combine_lists_strategy(cls, it: Iterable[list[Type]]) -> list[Type]:
        return list(itertools.chain.from_iterable(it))


def make_optional_type(t: Type) -> Type:
    """Return the type corresponding to Optional[t].

    Note that we can't use normal union simplification, since this function
    is called during semantic analysis and simplification only works during
    type checking.
    """
    if isinstance(t, ProperType) and isinstance(t, NoneType):
        return t
    elif isinstance(t, ProperType) and isinstance(t, UnionType):
        # Eagerly expanding aliases is not safe during semantic analysis.
        items = [item for item in t.items if not isinstance(get_proper_type(item), NoneType)]
        return UnionType(items + [NoneType()], t.line, t.column)
    else:
        return UnionType([t, NoneType()], t.line, t.column)


def validate_instance(t: Instance, fail: MsgCallback, empty_tuple_index: bool) -> bool:
    """Check if this is a well-formed instance with respect to argument count/positions."""
    # TODO: combine logic with instantiate_type_alias().
    if any(unknown_unpack(a) for a in t.args):
        # This type is not ready to be validated, because of unknown total count.
        # TODO: is it OK to fill with TypeOfAny.from_error instead of special form?
        return False
    if t.type.has_type_var_tuple_type:
        min_tv_count = sum(
            not tv.has_default() and not isinstance(tv, TypeVarTupleType)
            for tv in t.type.defn.type_vars
        )
        correct = len(t.args) >= min_tv_count
        if any(
            isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance)
            for a in t.args
        ):
            correct = True
        if not t.args:
            if not (empty_tuple_index and len(t.type.type_vars) == 1):
                # The Any arguments should be set by the caller.
                if empty_tuple_index and min_tv_count:
                    fail(
                        f"At least {min_tv_count} type argument(s) expected, none given",
                        t,
                        code=codes.TYPE_ARG,
                    )
                return False
        elif not correct:
            fail(
                f"Bad number of arguments, expected: at least {min_tv_count}, given: {len(t.args)}",
                t,
                code=codes.TYPE_ARG,
            )
            return False
        else:
            # We also need to check if we are not performing a type variable tuple split.
            unpack = find_unpack_in_list(t.args)
            if unpack is not None:
                unpack_arg = t.args[unpack]
                assert isinstance(unpack_arg, UnpackType)
                if isinstance(unpack_arg.type, TypeVarTupleType):
                    assert t.type.type_var_tuple_prefix is not None
                    assert t.type.type_var_tuple_suffix is not None
                    exp_prefix = t.type.type_var_tuple_prefix
                    act_prefix = unpack
                    exp_suffix = t.type.type_var_tuple_suffix
                    act_suffix = len(t.args) - unpack - 1
                    if act_prefix < exp_prefix or act_suffix < exp_suffix:
                        fail("TypeVarTuple cannot be split", t, code=codes.TYPE_ARG)
                        return False
    elif any(isinstance(a, UnpackType) for a in t.args):
        # A variadic unpack in fixed size instance (fixed unpacks must be flattened by the caller)
        fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE)
        t.args = ()
        return False
    elif len(t.args) != len(t.type.type_vars):
        # Invalid number of type parameters.
        arg_count = len(t.args)
        min_tv_count = sum(not tv.has_default() for tv in t.type.defn.type_vars)
        max_tv_count = len(t.type.type_vars)
        if arg_count and (arg_count < min_tv_count or arg_count > max_tv_count):
            fail(
                wrong_type_arg_count(min_tv_count, max_tv_count, str(arg_count), t.type.name),
                t,
                code=codes.TYPE_ARG,
            )
            t.invalid = True
        return False
    return True


def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) -> bool:
    return typ.accept(HasSelfType(lookup))


class HasSelfType(BoolTypeQuery):
    def __init__(self, lookup: Callable[[str], SymbolTableNode | None]) -> None:
        self.lookup = lookup
        super().__init__(ANY_STRATEGY)

    def visit_unbound_type(self, t: UnboundType) -> bool:
        sym = self.lookup(t.name)
        if sym and sym.fullname in SELF_TYPE_NAMES:
            return True
        return super().visit_unbound_type(t)


def unknown_unpack(t: Type) -> bool:
    """Check if a given type is an unpack of an unknown type.

    Unfortunately, there is no robust way to distinguish forward references from
    genuine undefined names here. But this worked well so far, although it looks
    quite fragile.
    """
    if isinstance(t, UnpackType):
        unpacked = get_proper_type(t.type)
        if isinstance(unpacked, AnyType) and unpacked.type_of_any == TypeOfAny.special_form:
            return True
    return False


class FindTypeVarVisitor(SyntheticTypeVisitor[None]):
    """Type visitor that looks for type variable types and self types."""

    def __init__(self, api: SemanticAnalyzerCoreInterface, scope: TypeVarLikeScope) -> None:
        self.api = api
        self.scope = scope
        self.type_var_likes: list[tuple[str, TypeVarLikeExpr]] = []
        self.has_self_type = False
        self.seen_aliases: set[TypeAliasType] | None = None
        self.include_callables = True

    def _seems_like_callable(self, type: UnboundType) -> bool:
        if not type.args:
            return False
        return isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType))

    def visit_unbound_type(self, t: UnboundType) -> None:
        name = t.name
        node = self.api.lookup_qualified(name, t)
        if node and node.fullname in SELF_TYPE_NAMES:
            self.has_self_type = True
        if (
            node
            and isinstance(node.node, TypeVarLikeExpr)
            and self.scope.get_binding(node) is None
        ):
            if (name, node.node) not in self.type_var_likes:
                self.type_var_likes.append((name, node.node))
        elif not self.include_callables and self._seems_like_callable(t):
            if find_self_type(
                t, lambda name: self.api.lookup_qualified(name, t, suppress_errors=True)
            ):
                self.has_self_type = True
            return
        elif node and node.fullname in LITERAL_TYPE_NAMES:
            return
        elif node and node.fullname in ANNOTATED_TYPE_NAMES and t.args:
            # Don't query the second argument to Annotated for TypeVars
            self.process_types([t.args[0]])
        elif t.args:
            self.process_types(t.args)

    def visit_type_list(self, t: TypeList) -> None:
        self.process_types(t.items)

    def visit_callable_argument(self, t: CallableArgument) -> None:
        t.typ.accept(self)

    def visit_any(self, t: AnyType) -> None:
        pass

    def visit_uninhabited_type(self, t: UninhabitedType) -> None:
        pass

    def visit_none_type(self, t: NoneType) -> None:
        pass

    def visit_erased_type(self, t: ErasedType) -> None:
        pass

    def visit_deleted_type(self, t: DeletedType) -> None:
        pass

    def visit_type_var(self, t: TypeVarType) -> None:
        self.process_types([t.upper_bound, t.default] + t.values)

    def visit_param_spec(self, t: ParamSpecType) -> None:
        self.process_types([t.upper_bound, t.default])

    def visit_type_var_tuple(self, t: TypeVarTupleType) -> None:
        self.process_types([t.upper_bound, t.default])

    def visit_unpack_type(self, t: UnpackType) -> None:
        self.process_types([t.type])

    def visit_parameters(self, t: Parameters) -> None:
        self.process_types(t.arg_types)

    def visit_partial_type(self, t: PartialType) -> None:
        pass

    def visit_instance(self, t: Instance) -> None:
        self.process_types(t.args)

    def visit_callable_type(self, t: CallableType) -> None:
        # FIX generics
        self.process_types(t.arg_types)
        t.ret_type.accept(self)

    def visit_tuple_type(self, t: TupleType) -> None:
        self.process_types(t.items)

    def visit_typeddict_type(self, t: TypedDictType) -> None:
        self.process_types(list(t.items.values()))

    def visit_raw_expression_type(self, t: RawExpressionType) -> None:
        pass

    def visit_literal_type(self, t: LiteralType) -> None:
        pass

    def visit_union_type(self, t: UnionType) -> None:
        self.process_types(t.items)

    def visit_overloaded(self, t: Overloaded) -> None:
        self.process_types(t.items)  # type: ignore[arg-type]

    def visit_type_type(self, t: TypeType) -> None:
        t.item.accept(self)

    def visit_ellipsis_type(self, t: EllipsisType) -> None:
        pass

    def visit_placeholder_type(self, t: PlaceholderType) -> None:
        return self.process_types(t.args)

    def visit_type_alias_type(self, t: TypeAliasType) -> None:
        # Skip type aliases in already visited types to avoid infinite recursion.
        if self.seen_aliases is None:
            self.seen_aliases = set()
        elif t in self.seen_aliases:
            return
        self.seen_aliases.add(t)
        self.process_types(t.args)

    def process_types(self, types: list[Type] | tuple[Type, ...]) -> None:
        # Redundant type check helps mypyc.
        if isinstance(types, list):
            for t in types:
                t.accept(self)
        else:
            for t in types:
                t.accept(self)


class TypeVarDefaultTranslator(TrivialSyntheticTypeTranslator):
    """Type translate visitor that replaces UnboundTypes with in-scope TypeVars."""

    def __init__(
        self, api: SemanticAnalyzerInterface, tvar_expr_name: str, context: Context
    ) -> None:
        super().__init__()
        self.api = api
        self.tvar_expr_name = tvar_expr_name
        self.context = context

    def visit_unbound_type(self, t: UnboundType) -> Type:
        sym = self.api.lookup_qualified(t.name, t, suppress_errors=True)
        if sym is not None:
            if type_var := self.api.tvar_scope.get_binding(sym):
                return type_var
            if isinstance(sym.node, TypeVarLikeExpr):
                self.api.fail(
                    f'Type parameter "{self.tvar_expr_name}" has a default type '
                    "that refers to one or more type variables that are out of scope",
                    self.context,
                )
                return AnyType(TypeOfAny.from_error)
        return super().visit_unbound_type(t)

    def visit_type_alias_type(self, t: TypeAliasType) -> Type:
        # TypeAliasTypes are analyzed separately already, just return it
        return t
