Merge branch 'dcreager/explicit-constriants' into cjm/callable-return-fixed

* dcreager/explicit-constriants:
  update expected output for graph display test
  store this in constraint, not node
  track whether constraints are explicit or not
  track source_order in PathAssignments
  [ty] Use `title` for configuration code fences in ty reference documentation (#21992)
This commit is contained in:
Carl Meyer 2025-12-15 19:50:14 -08:00
commit 0fc9e5e0e9
No known key found for this signature in database
GPG Key ID: 2D1FB7916A52E121
3 changed files with 223 additions and 92 deletions

View File

@ -166,8 +166,9 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
output.push('\n'); output.push('\n');
let _ = writeln!(output, "**Type**: `{}`", field.value_type); let _ = writeln!(output, "**Type**: `{}`", field.value_type);
output.push('\n'); output.push('\n');
output.push_str("**Example usage** (`pyproject.toml`):\n\n"); output.push_str("**Example usage**:\n\n");
output.push_str(&format_example( output.push_str(&format_example(
"pyproject.toml",
&format_header( &format_header(
field.scope, field.scope,
field.example, field.example,
@ -179,11 +180,11 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
output.push('\n'); output.push('\n');
} }
fn format_example(header: &str, content: &str) -> String { fn format_example(title: &str, header: &str, content: &str) -> String {
if header.is_empty() { if header.is_empty() {
format!("```toml\n{content}\n```\n",) format!("```toml title=\"{title}\"\n{content}\n```\n",)
} else { } else {
format!("```toml\n{header}\n{content}\n```\n",) format!("```toml title=\"{title}\"\n{header}\n{content}\n```\n",)
} }
} }

View File

@ -18,9 +18,9 @@ Valid severities are:
**Type**: `dict[RuleName, "ignore" | "warn" | "error"]` **Type**: `dict[RuleName, "ignore" | "warn" | "error"]`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.rules] [tool.ty.rules]
possibly-unresolved-reference = "warn" possibly-unresolved-reference = "warn"
division-by-zero = "ignore" division-by-zero = "ignore"
@ -45,9 +45,9 @@ configuration setting.
**Type**: `list[str]` **Type**: `list[str]`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.environment] [tool.ty.environment]
extra-paths = ["./shared/my-search-path"] extra-paths = ["./shared/my-search-path"]
``` ```
@ -76,9 +76,9 @@ This option can be used to point to virtual or system Python environments.
**Type**: `str` **Type**: `str`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.environment] [tool.ty.environment]
python = "./custom-venv-location/.venv" python = "./custom-venv-location/.venv"
``` ```
@ -103,9 +103,9 @@ If no platform is specified, ty will use the current platform:
**Type**: `"win32" | "darwin" | "android" | "ios" | "linux" | "all" | str` **Type**: `"win32" | "darwin" | "android" | "ios" | "linux" | "all" | str`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.environment] [tool.ty.environment]
# Tailor type stubs and conditionalized type definitions to windows. # Tailor type stubs and conditionalized type definitions to windows.
python-platform = "win32" python-platform = "win32"
@ -137,9 +137,9 @@ to reflect the differing contents of the standard library across Python versions
**Type**: `"3.7" | "3.8" | "3.9" | "3.10" | "3.11" | "3.12" | "3.13" | "3.14" | <major>.<minor>` **Type**: `"3.7" | "3.8" | "3.9" | "3.10" | "3.11" | "3.12" | "3.13" | "3.14" | <major>.<minor>`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.environment] [tool.ty.environment]
python-version = "3.12" python-version = "3.12"
``` ```
@ -165,9 +165,9 @@ it will also be included in the first party search path.
**Type**: `list[str]` **Type**: `list[str]`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.environment] [tool.ty.environment]
# Multiple directories (priority order) # Multiple directories (priority order)
root = ["./src", "./lib", "./vendor"] root = ["./src", "./lib", "./vendor"]
@ -185,9 +185,9 @@ bundled as a zip file in the binary
**Type**: `str` **Type**: `str`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.environment] [tool.ty.environment]
typeshed = "/path/to/custom/typeshed" typeshed = "/path/to/custom/typeshed"
``` ```
@ -240,9 +240,9 @@ If not specified, defaults to `[]` (excludes no files).
**Type**: `list[str]` **Type**: `list[str]`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[[tool.ty.overrides]] [[tool.ty.overrides]]
exclude = [ exclude = [
"generated", "generated",
@ -268,9 +268,9 @@ If not specified, defaults to `["**"]` (matches all files).
**Type**: `list[str]` **Type**: `list[str]`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[[tool.ty.overrides]] [[tool.ty.overrides]]
include = [ include = [
"src", "src",
@ -292,9 +292,9 @@ severity levels or disable them entirely.
**Type**: `dict[RuleName, "ignore" | "warn" | "error"]` **Type**: `dict[RuleName, "ignore" | "warn" | "error"]`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[[tool.ty.overrides]] [[tool.ty.overrides]]
include = ["src"] include = ["src"]
@ -358,9 +358,9 @@ to re-include `dist` use `exclude = ["!dist"]`
**Type**: `list[str]` **Type**: `list[str]`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.src] [tool.ty.src]
exclude = [ exclude = [
"generated", "generated",
@ -399,9 +399,9 @@ matches `<project_root>/src` and not `<project_root>/test/src`).
**Type**: `list[str]` **Type**: `list[str]`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.src] [tool.ty.src]
include = [ include = [
"src", "src",
@ -421,9 +421,9 @@ Enabled by default.
**Type**: `bool` **Type**: `bool`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.src] [tool.ty.src]
respect-ignore-files = false respect-ignore-files = false
``` ```
@ -450,9 +450,9 @@ it will also be included in the first party search path.
**Type**: `str` **Type**: `str`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.src] [tool.ty.src]
root = "./app" root = "./app"
``` ```
@ -471,9 +471,9 @@ Defaults to `false`.
**Type**: `bool` **Type**: `bool`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.terminal] [tool.ty.terminal]
# Error if ty emits any warning-level diagnostics. # Error if ty emits any warning-level diagnostics.
error-on-warning = true error-on-warning = true
@ -491,9 +491,9 @@ Defaults to `full`.
**Type**: `full | concise` **Type**: `full | concise`
**Example usage** (`pyproject.toml`): **Example usage**:
```toml ```toml title="pyproject.toml"
[tool.ty.terminal] [tool.ty.terminal]
output-format = "concise" output-format = "concise"
``` ```

View File

@ -69,7 +69,7 @@
use std::cell::RefCell; use std::cell::RefCell;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::fmt::Display; use std::fmt::Display;
use std::ops::Range; use std::ops::{BitAnd, BitOr, Range};
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -207,7 +207,13 @@ impl<'db> ConstraintSet<'db> {
upper: Type<'db>, upper: Type<'db>,
) -> Self { ) -> Self {
Self { Self {
node: ConstrainedTypeVar::new_node(db, typevar, lower, upper), node: ConstrainedTypeVar::new_node(
db,
typevar,
lower,
upper,
ExplicitConstraint::Explicit,
),
} }
} }
@ -511,6 +517,7 @@ pub(crate) struct ConstrainedTypeVar<'db> {
pub(crate) typevar: BoundTypeVarInstance<'db>, pub(crate) typevar: BoundTypeVarInstance<'db>,
pub(crate) lower: Type<'db>, pub(crate) lower: Type<'db>,
pub(crate) upper: Type<'db>, pub(crate) upper: Type<'db>,
pub(crate) explicit: ExplicitConstraint,
} }
// The Salsa heap is tracked separately. // The Salsa heap is tracked separately.
@ -519,13 +526,12 @@ impl get_size2::GetSize for ConstrainedTypeVar<'_> {}
#[salsa::tracked] #[salsa::tracked]
impl<'db> ConstrainedTypeVar<'db> { impl<'db> ConstrainedTypeVar<'db> {
/// Returns a new range constraint. /// Returns a new range constraint.
///
/// Panics if `lower` and `upper` are not both fully static.
fn new_node( fn new_node(
db: &'db dyn Db, db: &'db dyn Db,
typevar: BoundTypeVarInstance<'db>, typevar: BoundTypeVarInstance<'db>,
mut lower: Type<'db>, mut lower: Type<'db>,
mut upper: Type<'db>, mut upper: Type<'db>,
explicit: ExplicitConstraint,
) -> Node<'db> { ) -> Node<'db> {
// It's not useful for an upper bound to be an intersection type, or for a lower bound to // It's not useful for an upper bound to be an intersection type, or for a lower bound to
// be a union type. Because the following equivalences hold, we can break these bounds // be a union type. Because the following equivalences hold, we can break these bounds
@ -540,7 +546,7 @@ impl<'db> ConstrainedTypeVar<'db> {
for lower_element in lower_union.elements(db) { for lower_element in lower_union.elements(db) {
result = result.and_with_offset( result = result.and_with_offset(
db, db,
ConstrainedTypeVar::new_node(db, typevar, *lower_element, upper), ConstrainedTypeVar::new_node(db, typevar, *lower_element, upper, explicit),
); );
} }
return result; return result;
@ -555,13 +561,19 @@ impl<'db> ConstrainedTypeVar<'db> {
for upper_element in upper_intersection.iter_positive(db) { for upper_element in upper_intersection.iter_positive(db) {
result = result.and_with_offset( result = result.and_with_offset(
db, db,
ConstrainedTypeVar::new_node(db, typevar, lower, upper_element), ConstrainedTypeVar::new_node(db, typevar, lower, upper_element, explicit),
); );
} }
for upper_element in upper_intersection.iter_negative(db) { for upper_element in upper_intersection.iter_negative(db) {
result = result.and_with_offset( result = result.and_with_offset(
db, db,
ConstrainedTypeVar::new_node(db, typevar, lower, upper_element.negate(db)), ConstrainedTypeVar::new_node(
db,
typevar,
lower,
upper_element.negate(db),
explicit,
),
); );
} }
return result; return result;
@ -593,7 +605,7 @@ impl<'db> ConstrainedTypeVar<'db> {
{ {
return Node::new_constraint( return Node::new_constraint(
db, db,
ConstrainedTypeVar::new(db, typevar, Type::Never, Type::object()), ConstrainedTypeVar::new(db, typevar, Type::Never, Type::object(), explicit),
1, 1,
) )
.negate(db); .negate(db);
@ -645,6 +657,7 @@ impl<'db> ConstrainedTypeVar<'db> {
typevar, typevar,
Type::TypeVar(bound), Type::TypeVar(bound),
Type::TypeVar(bound), Type::TypeVar(bound),
explicit,
), ),
1, 1,
) )
@ -656,12 +669,24 @@ impl<'db> ConstrainedTypeVar<'db> {
{ {
let lower = Node::new_constraint( let lower = Node::new_constraint(
db, db,
ConstrainedTypeVar::new(db, lower, Type::Never, Type::TypeVar(typevar)), ConstrainedTypeVar::new(
db,
lower,
Type::Never,
Type::TypeVar(typevar),
explicit,
),
1, 1,
); );
let upper = Node::new_constraint( let upper = Node::new_constraint(
db, db,
ConstrainedTypeVar::new(db, upper, Type::TypeVar(typevar), Type::object()), ConstrainedTypeVar::new(
db,
upper,
Type::TypeVar(typevar),
Type::object(),
explicit,
),
1, 1,
); );
lower.and(db, upper) lower.and(db, upper)
@ -671,13 +696,19 @@ impl<'db> ConstrainedTypeVar<'db> {
(Type::TypeVar(lower), _) if typevar.can_be_bound_for(db, lower) => { (Type::TypeVar(lower), _) if typevar.can_be_bound_for(db, lower) => {
let lower = Node::new_constraint( let lower = Node::new_constraint(
db, db,
ConstrainedTypeVar::new(db, lower, Type::Never, Type::TypeVar(typevar)), ConstrainedTypeVar::new(
db,
lower,
Type::Never,
Type::TypeVar(typevar),
explicit,
),
1, 1,
); );
let upper = if upper.is_object() { let upper = if upper.is_object() {
Node::AlwaysTrue Node::AlwaysTrue
} else { } else {
Self::new_node(db, typevar, Type::Never, upper) Self::new_node(db, typevar, Type::Never, upper, explicit)
}; };
lower.and(db, upper) lower.and(db, upper)
} }
@ -687,17 +718,27 @@ impl<'db> ConstrainedTypeVar<'db> {
let lower = if lower.is_never() { let lower = if lower.is_never() {
Node::AlwaysTrue Node::AlwaysTrue
} else { } else {
Self::new_node(db, typevar, lower, Type::object()) Self::new_node(db, typevar, lower, Type::object(), explicit)
}; };
let upper = Node::new_constraint( let upper = Node::new_constraint(
db, db,
ConstrainedTypeVar::new(db, upper, Type::TypeVar(typevar), Type::object()), ConstrainedTypeVar::new(
db,
upper,
Type::TypeVar(typevar),
Type::object(),
explicit,
),
1, 1,
); );
lower.and(db, upper) lower.and(db, upper)
} }
_ => Node::new_constraint(db, ConstrainedTypeVar::new(db, typevar, lower, upper), 1), _ => Node::new_constraint(
db,
ConstrainedTypeVar::new(db, typevar, lower, upper, explicit),
1,
),
} }
} }
@ -715,6 +756,7 @@ impl<'db> ConstrainedTypeVar<'db> {
self.typevar(db), self.typevar(db),
self.lower(db).normalized(db), self.lower(db).normalized(db),
self.upper(db).normalized(db), self.upper(db).normalized(db),
self.explicit(db),
) )
} }
@ -734,6 +776,7 @@ impl<'db> ConstrainedTypeVar<'db> {
( (
self.typevar(db).binding_context(db), self.typevar(db).binding_context(db),
self.typevar(db).identity(db), self.typevar(db).identity(db),
self.explicit(db),
self.as_id(), self.as_id(),
) )
} }
@ -771,7 +814,13 @@ impl<'db> ConstrainedTypeVar<'db> {
return IntersectionResult::CannotSimplify; return IntersectionResult::CannotSimplify;
} }
IntersectionResult::Simplified(Self::new(db, self.typevar(db), lower, upper)) IntersectionResult::Simplified(Self::new(
db,
self.typevar(db),
lower,
upper,
self.explicit(db) & other.explicit(db),
))
} }
pub(crate) fn display(self, db: &'db dyn Db) -> impl Display { pub(crate) fn display(self, db: &'db dyn Db) -> impl Display {
@ -1295,12 +1344,14 @@ impl<'db> Node<'db> {
bound_typevar, bound_typevar,
Type::Never, Type::Never,
rhs.bottom_materialization(db), rhs.bottom_materialization(db),
ExplicitConstraint::Implicit,
), ),
(_, Type::TypeVar(bound_typevar)) => ConstrainedTypeVar::new_node( (_, Type::TypeVar(bound_typevar)) => ConstrainedTypeVar::new_node(
db, db,
bound_typevar, bound_typevar,
lhs.top_materialization(db), lhs.top_materialization(db),
Type::object(), Type::object(),
ExplicitConstraint::Implicit,
), ),
_ => panic!("at least one type should be a typevar"), _ => panic!("at least one type should be a typevar"),
}; };
@ -1812,10 +1863,12 @@ impl<'db> Node<'db> {
Node::AlwaysTrue => write!(f, "always"), Node::AlwaysTrue => write!(f, "always"),
Node::AlwaysFalse => write!(f, "never"), Node::AlwaysFalse => write!(f, "never"),
Node::Interior(interior) => { Node::Interior(interior) => {
let constraint = interior.constraint(self.db);
write!( write!(
f, f,
"{} {}/{}", "{} {:?} {}/{}",
interior.constraint(self.db).display(self.db), constraint.display(self.db),
constraint.explicit(self.db),
interior.source_order(self.db), interior.source_order(self.db),
interior.max_source_order(self.db), interior.max_source_order(self.db),
)?; )?;
@ -1872,6 +1925,42 @@ impl<'db> RepresentativeBounds<'db> {
} }
} }
#[derive(
Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, get_size2::GetSize, salsa::Update,
)]
pub(crate) enum ExplicitConstraint {
Implicit,
Explicit,
}
impl BitAnd for ExplicitConstraint {
type Output = Self;
fn bitand(self, other: Self) -> Self {
match (self, other) {
(ExplicitConstraint::Explicit, ExplicitConstraint::Explicit) => {
ExplicitConstraint::Explicit
}
(ExplicitConstraint::Implicit, _) | (_, ExplicitConstraint::Implicit) => {
ExplicitConstraint::Implicit
}
}
}
}
impl BitOr for ExplicitConstraint {
type Output = Self;
fn bitor(self, other: Self) -> Self {
match (self, other) {
(ExplicitConstraint::Implicit, ExplicitConstraint::Implicit) => {
ExplicitConstraint::Implicit
}
(ExplicitConstraint::Explicit, _) | (_, ExplicitConstraint::Explicit) => {
ExplicitConstraint::Explicit
}
}
}
}
/// An interior node of a BDD /// An interior node of a BDD
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] #[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
struct InteriorNode<'db> { struct InteriorNode<'db> {
@ -2079,13 +2168,7 @@ impl<'db> InteriorNode<'db> {
// //
// We also have to check if there are any derived facts that depend on the constraint // We also have to check if there are any derived facts that depend on the constraint
// we're about to remove. If so, we need to "remember" them by AND-ing them in with the // we're about to remove. If so, we need to "remember" them by AND-ing them in with the
// corresponding branch. We currently reuse the `source_order` of the constraint being // corresponding branch.
// removed when we add these derived facts.
//
// TODO: This might not be stable enough, if we add more than one derived fact for this
// constraint. If we still see inconsistent test output, we might need a more complex
// way of tracking source order for derived facts.
let self_source_order = self.source_order(db);
let if_true = path let if_true = path
.walk_edge( .walk_edge(
db, db,
@ -2168,8 +2251,7 @@ impl<'db> InteriorNode<'db> {
// NB: We cannot use `Node::new` here, because the recursive calls might introduce new // NB: We cannot use `Node::new` here, because the recursive calls might introduce new
// derived constraints into the result, and those constraints might appear before this // derived constraints into the result, and those constraints might appear before this
// one in the BDD ordering. // one in the BDD ordering.
Node::new_constraint(db, self_constraint, self.source_order(db)) Node::new_constraint(db, self_constraint, self_source_order).ite(db, if_true, if_false)
.ite(db, if_true, if_false)
} }
} }
@ -2332,8 +2414,13 @@ impl<'db> InteriorNode<'db> {
_ => continue, _ => continue,
}; };
let new_constraint = let new_constraint = ConstrainedTypeVar::new(
ConstrainedTypeVar::new(db, constrained_typevar, new_lower, new_upper); db,
constrained_typevar,
new_lower,
new_upper,
ExplicitConstraint::Implicit,
);
if seen_constraints.contains(&new_constraint) { if seen_constraints.contains(&new_constraint) {
continue; continue;
} }
@ -2956,21 +3043,35 @@ impl<'db> SequentMap<'db> {
// Case 1 // Case 1
(Type::TypeVar(lower_typevar), Type::TypeVar(upper_typevar)) => { (Type::TypeVar(lower_typevar), Type::TypeVar(upper_typevar)) => {
if !lower_typevar.is_same_typevar_as(db, upper_typevar) { if !lower_typevar.is_same_typevar_as(db, upper_typevar) {
ConstrainedTypeVar::new(db, lower_typevar, Type::Never, upper) ConstrainedTypeVar::new(
db,
lower_typevar,
Type::Never,
upper,
constraint.explicit(db),
)
} else { } else {
return; return;
} }
} }
// Case 2 // Case 2
(Type::TypeVar(lower_typevar), _) => { (Type::TypeVar(lower_typevar), _) => ConstrainedTypeVar::new(
ConstrainedTypeVar::new(db, lower_typevar, Type::Never, upper) db,
} lower_typevar,
Type::Never,
upper,
constraint.explicit(db),
),
// Case 3 // Case 3
(_, Type::TypeVar(upper_typevar)) => { (_, Type::TypeVar(upper_typevar)) => ConstrainedTypeVar::new(
ConstrainedTypeVar::new(db, upper_typevar, lower, Type::object()) db,
} upper_typevar,
lower,
Type::object(),
constraint.explicit(db),
),
_ => return, _ => return,
}; };
@ -3102,8 +3203,13 @@ impl<'db> SequentMap<'db> {
_ => return, _ => return,
}; };
let post_constraint = let post_constraint = ConstrainedTypeVar::new(
ConstrainedTypeVar::new(db, constrained_typevar, new_lower, new_upper); db,
constrained_typevar,
new_lower,
new_upper,
left_constraint.explicit(db) & right_constraint.explicit(db),
);
self.add_pair_implication(db, left_constraint, right_constraint, post_constraint); self.add_pair_implication(db, left_constraint, right_constraint, post_constraint);
self.enqueue_constraint(post_constraint); self.enqueue_constraint(post_constraint);
} }
@ -3121,6 +3227,7 @@ impl<'db> SequentMap<'db> {
let left_upper = left_constraint.upper(db); let left_upper = left_constraint.upper(db);
let right_lower = right_constraint.lower(db); let right_lower = right_constraint.lower(db);
let right_upper = right_constraint.upper(db); let right_upper = right_constraint.upper(db);
let explicit = left_constraint.explicit(db) & right_constraint.explicit(db);
let new_constraint = |bound_typevar: BoundTypeVarInstance<'db>, let new_constraint = |bound_typevar: BoundTypeVarInstance<'db>,
right_lower: Type<'db>, right_lower: Type<'db>,
right_upper: Type<'db>| { right_upper: Type<'db>| {
@ -3138,7 +3245,7 @@ impl<'db> SequentMap<'db> {
} else { } else {
right_upper right_upper
}; };
ConstrainedTypeVar::new(db, bound_typevar, right_lower, right_upper) ConstrainedTypeVar::new(db, bound_typevar, right_lower, right_upper, explicit)
}; };
let post_constraint = match (left_lower, left_upper) { let post_constraint = match (left_lower, left_upper) {
(Type::TypeVar(bound_typevar), Type::TypeVar(other_bound_typevar)) (Type::TypeVar(bound_typevar), Type::TypeVar(other_bound_typevar))
@ -3733,7 +3840,13 @@ impl<'db> BoundTypeVarInstance<'db> {
None => Node::AlwaysTrue, None => Node::AlwaysTrue,
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => { Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
let bound = bound.top_materialization(db); let bound = bound.top_materialization(db);
ConstrainedTypeVar::new_node(db, self, Type::Never, bound) ConstrainedTypeVar::new_node(
db,
self,
Type::Never,
bound,
ExplicitConstraint::Implicit,
)
} }
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
let mut specializations = Node::AlwaysFalse; let mut specializations = Node::AlwaysFalse;
@ -3742,7 +3855,13 @@ impl<'db> BoundTypeVarInstance<'db> {
let constraint_upper = constraint.top_materialization(db); let constraint_upper = constraint.top_materialization(db);
specializations = specializations.or_with_offset( specializations = specializations.or_with_offset(
db, db,
ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper), ConstrainedTypeVar::new_node(
db,
self,
constraint_lower,
constraint_upper,
ExplicitConstraint::Implicit,
),
); );
} }
specializations specializations
@ -3777,7 +3896,13 @@ impl<'db> BoundTypeVarInstance<'db> {
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => { Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
let bound = bound.bottom_materialization(db); let bound = bound.bottom_materialization(db);
( (
ConstrainedTypeVar::new_node(db, self, Type::Never, bound), ConstrainedTypeVar::new_node(
db,
self,
Type::Never,
bound,
ExplicitConstraint::Implicit,
),
Vec::new(), Vec::new(),
) )
} }
@ -3787,8 +3912,13 @@ impl<'db> BoundTypeVarInstance<'db> {
for constraint in constraints.elements(db) { for constraint in constraints.elements(db) {
let constraint_lower = constraint.bottom_materialization(db); let constraint_lower = constraint.bottom_materialization(db);
let constraint_upper = constraint.top_materialization(db); let constraint_upper = constraint.top_materialization(db);
let constraint = let constraint = ConstrainedTypeVar::new_node(
ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper); db,
self,
constraint_lower,
constraint_upper,
ExplicitConstraint::Implicit,
);
if constraint_lower == constraint_upper { if constraint_lower == constraint_upper {
non_gradual_constraints = non_gradual_constraints =
non_gradual_constraints.or_with_offset(db, constraint); non_gradual_constraints.or_with_offset(db, constraint);
@ -3957,28 +4087,28 @@ mod tests {
#[test] #[test]
fn test_display_graph_output() { fn test_display_graph_output() {
let expected = indoc! {r#" let expected = indoc! {r#"
(T = str) 3/4 (T = str) Explicit 3/4
(T = bool) 4/4 (T = bool) Explicit 4/4
(U = str) 1/2 (U = str) Explicit 1/2
(U = bool) 2/2 (U = bool) Explicit 2/2
always always
always always
(U = bool) 2/2 (U = bool) Explicit 2/2
always always
never never
(U = str) 1/2 (U = str) Explicit 1/2
(U = bool) 2/2 (U = bool) Explicit 2/2
always always
always always
(U = bool) 2/2 (U = bool) Explicit 2/2
always always
never never
(T = bool) 4/4 (T = bool) Explicit 4/4
(U = str) 1/2 (U = str) Explicit 1/2
(U = bool) 2/2 (U = bool) Explicit 2/2
always always
always always
(U = bool) 2/2 (U = bool) Explicit 2/2
always always
never never
never never