[ty] Distinguish "unconstrained" from "constrained to any type" (#21539)

Before, we would collapse any constraint of the form `Never ≤ T ≤
object` down to the "always true" constraint set. This is correct in
terms of BDD semantics, but loses information, since "not constraining a
typevar at all" is different than "constraining a typevar to take on any
type". Once we get to specialization inference, we should fall back on
the typevar's default for the former, but not for the latter.

This is much easier to support now that we have a sequent map, since we
need to treat `¬(Never ≤ T ≤ object)` as being impossible, and prune it
when we walk through BDD paths, just like we do for other impossible
combinations.
This commit is contained in:
Douglas Creager 2025-11-24 15:23:09 -05:00 committed by GitHub
parent d379f3826f
commit 7e277667d1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 162 additions and 86 deletions

View File

@ -22,8 +22,10 @@ from ty_extensions import ConstraintSet, generic_context
# fmt: off
def unbounded[T]():
# revealed: ty_extensions.Specialization[T@unbounded = object]
# revealed: ty_extensions.Specialization[T@unbounded = Unknown]
reveal_type(generic_context(unbounded).specialize_constrained(ConstraintSet.always()))
# revealed: ty_extensions.Specialization[T@unbounded = object]
reveal_type(generic_context(unbounded).specialize_constrained(ConstraintSet.range(Never, T, object)))
# revealed: None
reveal_type(generic_context(unbounded).specialize_constrained(ConstraintSet.never()))
@ -88,6 +90,7 @@ that makes the test succeed.
from typing import Any
def bounded_by_gradual[T: Any]():
# TODO: revealed: ty_extensions.Specialization[T@bounded_by_gradual = Any]
# revealed: ty_extensions.Specialization[T@bounded_by_gradual = object]
reveal_type(generic_context(bounded_by_gradual).specialize_constrained(ConstraintSet.always()))
# revealed: None
@ -168,12 +171,16 @@ from typing import Any
# fmt: off
def constrained_by_gradual[T: (Base, Any)]():
# TODO: revealed: ty_extensions.Specialization[T@constrained_by_gradual = Unknown]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = Base]
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.always()))
# TODO: revealed: ty_extensions.Specialization[T@constrained_by_gradual = Any]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = object]
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.always()))
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.range(Never, T, object)))
# revealed: None
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.never()))
# TODO: revealed: ty_extensions.Specialization[T@constrained_by_gradual = Any]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = Base]
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.range(Never, T, Base)))
# TODO: revealed: ty_extensions.Specialization[T@constrained_by_gradual = Any]
@ -181,14 +188,14 @@ def constrained_by_gradual[T: (Base, Any)]():
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.range(Never, T, Unrelated)))
# TODO: revealed: ty_extensions.Specialization[T@constrained_by_gradual = Any]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = Super]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = Base]
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.range(Never, T, Super)))
# TODO: revealed: ty_extensions.Specialization[T@constrained_by_gradual = Any]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = Super]
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.range(Super, T, Super)))
# TODO: revealed: ty_extensions.Specialization[T@constrained_by_gradual = Any]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = object]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = Base]
reveal_type(generic_context(constrained_by_gradual).specialize_constrained(ConstraintSet.range(Sub, T, object)))
# TODO: revealed: ty_extensions.Specialization[T@constrained_by_gradual = Any]
# revealed: ty_extensions.Specialization[T@constrained_by_gradual = Sub]
@ -288,7 +295,7 @@ class Unrelated: ...
# fmt: off
def mutually_bound[T: Base, U]():
# revealed: ty_extensions.Specialization[T@mutually_bound = Base, U@mutually_bound = object]
# revealed: ty_extensions.Specialization[T@mutually_bound = Base, U@mutually_bound = Unknown]
reveal_type(generic_context(mutually_bound).specialize_constrained(ConstraintSet.always()))
# revealed: None
reveal_type(generic_context(mutually_bound).specialize_constrained(ConstraintSet.never()))
@ -296,7 +303,7 @@ def mutually_bound[T: Base, U]():
# revealed: ty_extensions.Specialization[T@mutually_bound = Base, U@mutually_bound = Base]
reveal_type(generic_context(mutually_bound).specialize_constrained(ConstraintSet.range(Never, U, T)))
# revealed: ty_extensions.Specialization[T@mutually_bound = Sub, U@mutually_bound = object]
# revealed: ty_extensions.Specialization[T@mutually_bound = Sub, U@mutually_bound = Unknown]
reveal_type(generic_context(mutually_bound).specialize_constrained(ConstraintSet.range(Never, T, Sub)))
# revealed: ty_extensions.Specialization[T@mutually_bound = Sub, U@mutually_bound = Sub]
reveal_type(generic_context(mutually_bound).specialize_constrained(ConstraintSet.range(Never, T, Sub) & ConstraintSet.range(Never, U, T)))

View File

@ -66,12 +66,15 @@ def _[T]() -> None:
reveal_type(ConstraintSet.range(Base, T, object))
```
And a range constraint with _both_ a lower bound of `Never` and an upper bound of `object` does not
constrain the typevar at all.
And a range constraint with a lower bound of `Never` and an upper bound of `object` allows the
typevar to take on any type. We treat this differently than the `always` constraint set. During
specialization inference, that allows us to distinguish between not constraining a typevar (and
therefore falling back on its default specialization) and explicitly constraining it to any subtype
of `object`.
```py
def _[T]() -> None:
# revealed: ty_extensions.ConstraintSet[always]
# revealed: ty_extensions.ConstraintSet[(T@_ = *)]
reveal_type(ConstraintSet.range(Never, T, object))
```
@ -156,7 +159,7 @@ cannot be satisfied at all.
```py
def _[T]() -> None:
# revealed: ty_extensions.ConstraintSet[never]
# revealed: ty_extensions.ConstraintSet[(T@_ ≠ *)]
reveal_type(~ConstraintSet.range(Never, T, object))
```
@ -654,7 +657,7 @@ def _[T]() -> None:
reveal_type(~ConstraintSet.range(Never, T, Base))
# revealed: ty_extensions.ConstraintSet[¬(Sub ≤ T@_)]
reveal_type(~ConstraintSet.range(Sub, T, object))
# revealed: ty_extensions.ConstraintSet[never]
# revealed: ty_extensions.ConstraintSet[(T@_ ≠ *)]
reveal_type(~ConstraintSet.range(Never, T, object))
```
@ -811,7 +814,7 @@ def f[T]():
# "domain", which maps valid inputs to `true` and invalid inputs to `false`. This means that two
# constraint sets that are both always satisfied will not be identical if they have different
# domains!
always = ConstraintSet.range(Never, T, object)
always = ConstraintSet.always()
# revealed: ty_extensions.ConstraintSet[always]
reveal_type(always)
static_assert(always)
@ -846,11 +849,11 @@ from typing import Never
from ty_extensions import ConstraintSet
def same_typevar[T]():
# revealed: ty_extensions.ConstraintSet[always]
# revealed: ty_extensions.ConstraintSet[(T@same_typevar = *)]
reveal_type(ConstraintSet.range(Never, T, T))
# revealed: ty_extensions.ConstraintSet[always]
# revealed: ty_extensions.ConstraintSet[(T@same_typevar = *)]
reveal_type(ConstraintSet.range(T, T, object))
# revealed: ty_extensions.ConstraintSet[always]
# revealed: ty_extensions.ConstraintSet[(T@same_typevar = *)]
reveal_type(ConstraintSet.range(T, T, T))
```
@ -862,11 +865,11 @@ as shown above.)
from ty_extensions import Intersection
def same_typevar[T]():
# revealed: ty_extensions.ConstraintSet[always]
# revealed: ty_extensions.ConstraintSet[(T@same_typevar = *)]
reveal_type(ConstraintSet.range(Never, T, T | None))
# revealed: ty_extensions.ConstraintSet[always]
# revealed: ty_extensions.ConstraintSet[(T@same_typevar = *)]
reveal_type(ConstraintSet.range(Intersection[T, None], T, object))
# revealed: ty_extensions.ConstraintSet[always]
# revealed: ty_extensions.ConstraintSet[(T@same_typevar = *)]
reveal_type(ConstraintSet.range(Intersection[T, None], T, T | None))
```
@ -877,8 +880,8 @@ constraint set can never be satisfied, since every type is disjoint with its neg
from ty_extensions import Not
def same_typevar[T]():
# revealed: ty_extensions.ConstraintSet[never]
# revealed: ty_extensions.ConstraintSet[(T@same_typevar ≠ *)]
reveal_type(ConstraintSet.range(Intersection[Not[T], None], T, object))
# revealed: ty_extensions.ConstraintSet[never]
# revealed: ty_extensions.ConstraintSet[(T@same_typevar ≠ *)]
reveal_type(ConstraintSet.range(Not[T], T, object))
```

View File

@ -8306,7 +8306,7 @@ impl<'db> KnownInstanceType<'db> {
write!(
f,
"ty_extensions.Specialization{}",
specialization.normalized(self.db).display_full(self.db)
specialization.display_full(self.db)
)
}
KnownInstanceType::UnionType(_) => f.write_str("types.UnionType"),

View File

@ -494,7 +494,11 @@ impl<'db> ConstrainedTypeVar<'db> {
})
}) =>
{
return Node::AlwaysFalse;
return Node::new_constraint(
db,
ConstrainedTypeVar::new(db, typevar, Type::Never, Type::object()),
)
.negate(db);
}
_ => {}
}
@ -522,12 +526,6 @@ impl<'db> ConstrainedTypeVar<'db> {
return Node::AlwaysFalse;
}
// If the requested constraint is `Never ≤ T ≤ object`, then the typevar can be specialized
// to _any_ type, and the constraint does nothing.
if lower.is_never() && upper.is_object() {
return Node::AlwaysTrue;
}
// We have an (arbitrary) ordering for typevars. If the upper and/or lower bounds are
// typevars, we have to ensure that the bounds are "later" according to that order than the
// typevar being constrained.
@ -574,13 +572,21 @@ impl<'db> ConstrainedTypeVar<'db> {
db,
ConstrainedTypeVar::new(db, lower, Type::Never, Type::TypeVar(typevar)),
);
let upper = Self::new_node(db, typevar, Type::Never, upper);
let upper = if upper.is_object() {
Node::AlwaysTrue
} else {
Self::new_node(db, typevar, Type::Never, upper)
};
lower.and(db, upper)
}
// L ≤ T ≤ U == (L ≤ [T]) && (T ≤ [U])
(_, Type::TypeVar(upper)) if typevar.can_be_bound_for(db, upper) => {
let lower = Self::new_node(db, typevar, lower, Type::object());
let lower = if lower.is_never() {
Node::AlwaysTrue
} else {
Self::new_node(db, typevar, lower, Type::object())
};
let upper = Node::new_constraint(
db,
ConstrainedTypeVar::new(db, upper, Type::TypeVar(typevar), Type::object()),
@ -703,6 +709,15 @@ impl<'db> ConstrainedTypeVar<'db> {
);
}
if lower.is_never() && upper.is_object() {
return write!(
f,
"({} {} *)",
typevar.identity(self.db).display(self.db),
if self.negated { "" } else { "=" }
);
}
if self.negated {
f.write_str("¬")?;
}
@ -1127,27 +1142,30 @@ impl<'db> Node<'db> {
/// Invokes a callback for each of the representative types of a particular typevar for this
/// constraint set.
///
/// There is a representative type for each distinct path from the BDD root to the `AlwaysTrue`
/// We first abstract the BDD so that it only mentions constraints on the requested typevar. We
/// then invoke your callback for each distinct path from the BDD root to the `AlwaysTrue`
/// terminal. Each of those paths can be viewed as the conjunction of the individual
/// constraints of each internal node that we traverse as we walk that path. We provide the
/// lower/upper bound of this conjunction to your callback, allowing you to choose any suitable
/// type in the range.
///
/// If the abstracted BDD does not mention the typevar at all (i.e., it leaves the typevar
/// completely unconstrained), we will invoke your callback once with `None`.
fn find_representative_types(
self,
db: &'db dyn Db,
bound_typevar: BoundTypeVarIdentity<'db>,
mut f: impl FnMut(Type<'db>, Type<'db>),
mut f: impl FnMut(Option<(Type<'db>, Type<'db>)>),
) {
self.retain_one(db, bound_typevar)
.find_representative_types_inner(db, Type::Never, Type::object(), &mut f);
.find_representative_types_inner(db, None, &mut f);
}
fn find_representative_types_inner(
self,
db: &'db dyn Db,
greatest_lower_bound: Type<'db>,
least_upper_bound: Type<'db>,
f: &mut dyn FnMut(Type<'db>, Type<'db>),
current_bounds: Option<(Type<'db>, Type<'db>)>,
f: &mut dyn FnMut(Option<(Type<'db>, Type<'db>)>),
) {
match self {
Node::AlwaysTrue => {
@ -1157,12 +1175,16 @@ impl<'db> Node<'db> {
// If `lower ≰ upper`, then this path somehow represents in invalid specialization.
// That should have been removed from the BDD domain as part of the simplification
// process.
debug_assert!(greatest_lower_bound.is_subtype_of(db, least_upper_bound));
debug_assert!(current_bounds.is_none_or(
|(greatest_lower_bound, least_upper_bound)| {
greatest_lower_bound.is_subtype_of(db, least_upper_bound)
}
));
// We've been tracking the lower and upper bound that the types for this path must
// satisfy. Pass those bounds along and let the caller choose a representative type
// from within that range.
f(greatest_lower_bound, least_upper_bound);
f(current_bounds);
}
Node::AlwaysFalse => {
@ -1171,6 +1193,9 @@ impl<'db> Node<'db> {
}
Node::Interior(interior) => {
let (greatest_lower_bound, least_upper_bound) =
current_bounds.unwrap_or((Type::Never, Type::object()));
// For an interior node, there are two outgoing paths: one for the `if_true`
// branch, and one for the `if_false` branch.
//
@ -1185,8 +1210,7 @@ impl<'db> Node<'db> {
IntersectionType::from_elements(db, [least_upper_bound, constraint.upper(db)]);
interior.if_true(db).find_representative_types_inner(
db,
new_greatest_lower_bound,
new_least_upper_bound,
Some((new_greatest_lower_bound, new_least_upper_bound)),
f,
);
@ -1202,8 +1226,7 @@ impl<'db> Node<'db> {
// path.
interior.if_false(db).find_representative_types_inner(
db,
greatest_lower_bound,
least_upper_bound,
Some((greatest_lower_bound, least_upper_bound)),
f,
);
}
@ -2239,6 +2262,9 @@ impl<'db> ConstraintAssignment<'db> {
///
/// We support several kinds of sequent:
///
/// - `¬C₁ → false`: This indicates that `C₁` is always true. Any path that assumes it is false is
/// impossible and can be pruned.
///
/// - `C₁ ∧ C₂ → false`: This indicates that `C₁` and `C₂` are disjoint: it is not possible for
/// both to hold. Any path that assumes both is impossible and can be pruned.
///
@ -2250,8 +2276,10 @@ impl<'db> ConstraintAssignment<'db> {
/// holds but `D` does _not_ is impossible and can be pruned.
#[derive(Debug, Default, Eq, PartialEq, get_size2::GetSize, salsa::Update)]
struct SequentMap<'db> {
/// Sequents of the form `¬C₁ → false`
single_tautologies: FxHashSet<ConstrainedTypeVar<'db>>,
/// Sequents of the form `C₁ ∧ C₂ → false`
impossibilities: FxHashSet<(ConstrainedTypeVar<'db>, ConstrainedTypeVar<'db>)>,
pair_impossibilities: FxHashSet<(ConstrainedTypeVar<'db>, ConstrainedTypeVar<'db>)>,
/// Sequents of the form `C₁ ∧ C₂ → D`
pair_implications: FxHashMap<
(ConstrainedTypeVar<'db>, ConstrainedTypeVar<'db>),
@ -2310,13 +2338,17 @@ impl<'db> SequentMap<'db> {
}
}
fn add_impossibility(
fn add_single_tautology(&mut self, ante: ConstrainedTypeVar<'db>) {
self.single_tautologies.insert(ante);
}
fn add_pair_impossibility(
&mut self,
db: &'db dyn Db,
ante1: ConstrainedTypeVar<'db>,
ante2: ConstrainedTypeVar<'db>,
) {
self.impossibilities
self.pair_impossibilities
.insert(Self::pair_key(db, ante1, ante2));
}
@ -2352,6 +2384,15 @@ impl<'db> SequentMap<'db> {
}
fn add_sequents_for_single(&mut self, db: &'db dyn Db, constraint: ConstrainedTypeVar<'db>) {
// If this constraint binds its typevar to `Never ≤ T ≤ object`, then the typevar can take
// on any type, and the constraint is always satisfied.
let lower = constraint.lower(db);
let upper = constraint.upper(db);
if lower.is_never() && upper.is_object() {
self.add_single_tautology(constraint);
return;
}
// If the lower or upper bound of this constraint is a typevar, we can propagate the
// constraint:
//
@ -2362,8 +2403,6 @@ impl<'db> SequentMap<'db> {
// Technically, (1) also allows `(S = T) → (S = S)`, but the rhs of that is vacuously true,
// so we don't add a sequent for that case.
let lower = constraint.lower(db);
let upper = constraint.upper(db);
let post_constraint = match (lower, upper) {
// Case 1
(Type::TypeVar(lower_typevar), Type::TypeVar(upper_typevar)) => {
@ -2568,7 +2607,7 @@ impl<'db> SequentMap<'db> {
self.enqueue_constraint(intersection_constraint);
}
None => {
self.add_impossibility(db, left_constraint, right_constraint);
self.add_pair_impossibility(db, left_constraint, right_constraint);
}
}
}
@ -2593,7 +2632,7 @@ impl<'db> SequentMap<'db> {
}
};
for (ante1, ante2) in &self.map.impossibilities {
for (ante1, ante2) in &self.map.pair_impossibilities {
maybe_write_prefix(f)?;
write!(
f,
@ -2726,7 +2765,15 @@ impl<'db> PathAssignments<'db> {
// don't anticipate the sequent maps to be very large. We might consider avoiding the
// brute-force search.
for (ante1, ante2) in &map.impossibilities {
for ante in &map.single_tautologies {
if self.assignment_holds(ante.when_false()) {
// The sequent map says (ante1) is always true, and the current path asserts that
// it's false.
return Err(PathAssignmentConflict);
}
}
for (ante1, ante2) in &map.pair_impossibilities {
if self.assignment_holds(ante1.when_true()) && self.assignment_holds(ante2.when_true())
{
// The sequent map says (ante1 ∧ ante2) is an impossible combination, and the
@ -3088,8 +3135,8 @@ impl<'db> GenericContext<'db> {
});
// Then we find all of the "representative types" for each typevar in the constraint set.
let mut types = vec![Type::Never; self.len(db)];
for (i, bound_typevar) in self.variables(db).enumerate() {
let mut error_occurred = false;
let types = self.variables(db).map(|bound_typevar| {
// Each representative type represents one of the ways that the typevar can satisfy the
// constraint, expressed as a lower/upper bound on the types that the typevar can
// specialize to.
@ -3101,40 +3148,55 @@ impl<'db> GenericContext<'db> {
// _each_ of the paths into separate specializations, but it's not clear what we would
// do with that, so instead we just report the ambiguity as a specialization failure.
let mut satisfied = false;
let mut unconstrained = false;
let mut greatest_lower_bound = Type::Never;
let mut least_upper_bound = Type::object();
abstracted.find_representative_types(
db,
bound_typevar.identity(db),
|lower_bound, upper_bound| {
satisfied = true;
greatest_lower_bound =
UnionType::from_elements(db, [greatest_lower_bound, lower_bound]);
least_upper_bound =
IntersectionType::from_elements(db, [least_upper_bound, upper_bound]);
},
);
abstracted.find_representative_types(db, bound_typevar.identity(db), |bounds| {
satisfied = true;
match bounds {
Some((lower_bound, upper_bound)) => {
greatest_lower_bound =
UnionType::from_elements(db, [greatest_lower_bound, lower_bound]);
least_upper_bound =
IntersectionType::from_elements(db, [least_upper_bound, upper_bound]);
}
None => {
unconstrained = true;
}
}
});
// If there are no satisfiable paths in the BDD, then there is no valid specialization
// for this constraint set.
if !satisfied {
// TODO: Construct a useful error here
return Err(());
error_occurred = true;
return None;
}
// The BDD is satisfiable, but the typevar is unconstrained, then we use `None` to tell
// specialize_recursive to fall back on the typevar's default.
if unconstrained {
return None;
}
// If `lower ≰ upper`, then there is no type that satisfies all of the paths in the
// BDD. That's an ambiguous specialization, as described above.
if !greatest_lower_bound.is_subtype_of(db, least_upper_bound) {
// TODO: Construct a useful error here
return Err(());
error_occurred = true;
return None;
}
// Of all of the types that satisfy all of the paths in the BDD, we choose the
// "largest" one (i.e., "closest to `object`") as the specialization.
types[i] = least_upper_bound;
}
Some(least_upper_bound)
});
Ok(self.specialize_recursive(db, types.into_boxed_slice()))
let specialization = self.specialize_recursive(db, types);
if error_occurred {
return Err(());
}
Ok(specialization)
}
}

View File

@ -522,14 +522,15 @@ impl<'db> GenericContext<'db> {
/// Creates a specialization of this generic context. Panics if the length of `types` does not
/// match the number of typevars in the generic context.
///
/// You are allowed to provide types that mention the typevars in this generic context.
pub(crate) fn specialize_recursive(
self,
db: &'db dyn Db,
mut types: Box<[Type<'db>]>,
) -> Specialization<'db> {
/// If any provided type is `None`, we will use the corresponding typevar's default type. You
/// are allowed to provide types that mention the typevars in this generic context.
pub(crate) fn specialize_recursive<I>(self, db: &'db dyn Db, types: I) -> Specialization<'db>
where
I: IntoIterator<Item = Option<Type<'db>>>,
I::IntoIter: ExactSizeIterator,
{
let mut types = self.fill_in_defaults(db, types);
let len = types.len();
assert!(self.len(db) == len);
loop {
let mut any_changed = false;
for i in 0..len {
@ -564,10 +565,7 @@ impl<'db> GenericContext<'db> {
Specialization::new(db, self, Box::from([element_type]), None, Some(tuple))
}
/// Creates a specialization of this generic context. Panics if the length of `types` does not
/// match the number of typevars in the generic context. If any provided type is `None`, we
/// will use the corresponding typevar's default type.
pub(crate) fn specialize_partial<I>(self, db: &'db dyn Db, types: I) -> Specialization<'db>
fn fill_in_defaults<I>(self, db: &'db dyn Db, types: I) -> Box<[Type<'db>]>
where
I: IntoIterator<Item = Option<Type<'db>>>,
I::IntoIter: ExactSizeIterator,
@ -610,7 +608,18 @@ impl<'db> GenericContext<'db> {
expanded[idx] = default;
}
Specialization::new(db, self, expanded.into_boxed_slice(), None, None)
expanded.into_boxed_slice()
}
/// Creates a specialization of this generic context. Panics if the length of `types` does not
/// match the number of typevars in the generic context. If any provided type is `None`, we
/// will use the corresponding typevar's default type.
pub(crate) fn specialize_partial<I>(self, db: &'db dyn Db, types: I) -> Specialization<'db>
where
I: IntoIterator<Item = Option<Type<'db>>>,
I::IntoIter: ExactSizeIterator,
{
Specialization::new(db, self, self.fill_in_defaults(db, types), None, None)
}
pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
@ -1045,11 +1054,6 @@ impl<'db> Specialization<'db> {
Specialization::new(db, self.generic_context(db), types, None, None)
}
#[must_use]
pub(crate) fn normalized(self, db: &'db dyn Db) -> Self {
self.normalized_impl(db, &NormalizedVisitor::default())
}
pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
let types: Box<[_]> = self
.types(db)