mirror of https://github.com/astral-sh/ruff
Merge remote-tracking branch 'origin/main' into dcreager/callable-return
* origin/main: (41 commits) [ty] Carry generic context through when converting class into `Callable` (#21798) [ty] Add more tests for renamings (#21810) [ty] Minor improvements to `assert_type` diagnostics (#21811) [ty] Add some attribute/method renaming test cases (#21809) Update mkdocs-material to 9.7.0 (Insiders now free) (#21797) Remove unused whitespaces in test cases (#21806) [ty] fix panic when instantiating a type variable with invalid constraints (#21663) [ty] fix build failure caused by conflicts between #21683 and #21800 (#21802) [ty] do nothing with `store_expression_type` if `inner_expression_inference_state` is `Get` (#21718) [ty] increase the limit on the number of elements in a non-recursively defined literal union (#21683) [ty] normalize typevar bounds/constraints in cycles (#21800) [ty] Update completion eval to include modules [ty] Add modules to auto-import [ty] Add support for module-only import requests [ty] Refactor auto-import symbol info [ty] Clarify the use of `SymbolKind` in auto-import [ty] Redact ranking of completions from e2e LSP tests [ty] Tweaks tests to use clearer language [ty] Update evaluation results [ty] Make auto-import ignore symbols in modules starting with a `_` ...
This commit is contained in:
commit
c0dc6cfa61
|
|
@ -75,14 +75,6 @@
|
||||||
matchManagers: ["cargo"],
|
matchManagers: ["cargo"],
|
||||||
enabled: false,
|
enabled: false,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
|
||||||
// with `mkdocs-material-insider`.
|
|
||||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
|
||||||
matchManagers: ["pip_requirements"],
|
|
||||||
matchPackageNames: ["mkdocs-material"],
|
|
||||||
enabled: false,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
groupName: "pre-commit dependencies",
|
groupName: "pre-commit dependencies",
|
||||||
matchManagers: ["pre-commit"],
|
matchManagers: ["pre-commit"],
|
||||||
|
|
|
||||||
|
|
@ -779,8 +779,6 @@ jobs:
|
||||||
name: "mkdocs"
|
name: "mkdocs"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
env:
|
|
||||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
|
|
@ -788,11 +786,6 @@ jobs:
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||||
with:
|
with:
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||||
- name: "Add SSH key"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
|
||||||
with:
|
|
||||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
|
|
@ -800,11 +793,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
python-version: 3.13
|
python-version: 3.13
|
||||||
activate-environment: true
|
activate-environment: true
|
||||||
- name: "Install Insiders dependencies"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
run: uv pip install -r docs/requirements-insiders.txt
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
|
||||||
run: uv pip install -r docs/requirements.txt
|
run: uv pip install -r docs/requirements.txt
|
||||||
- name: "Update README File"
|
- name: "Update README File"
|
||||||
run: python scripts/transform_readme.py --target mkdocs
|
run: python scripts/transform_readme.py --target mkdocs
|
||||||
|
|
@ -812,12 +801,8 @@ jobs:
|
||||||
run: python scripts/generate_mkdocs.py
|
run: python scripts/generate_mkdocs.py
|
||||||
- name: "Check docs formatting"
|
- name: "Check docs formatting"
|
||||||
run: python scripts/check_docs_formatted.py
|
run: python scripts/check_docs_formatted.py
|
||||||
- name: "Build Insiders docs"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
|
||||||
- name: "Build docs"
|
- name: "Build docs"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
run: mkdocs build --strict -f mkdocs.yml
|
||||||
run: mkdocs build --strict -f mkdocs.public.yml
|
|
||||||
|
|
||||||
check-formatter-instability-and-black-similarity:
|
check-formatter-instability-and-black-similarity:
|
||||||
name: "formatter instabilities and black similarity"
|
name: "formatter instabilities and black similarity"
|
||||||
|
|
|
||||||
|
|
@ -20,8 +20,6 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
mkdocs:
|
mkdocs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
|
||||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
|
|
@ -59,23 +57,12 @@ jobs:
|
||||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||||
|
|
||||||
- name: "Add SSH key"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
|
||||||
with:
|
|
||||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||||
|
|
||||||
- name: "Install Insiders dependencies"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
run: pip install -r docs/requirements-insiders.txt
|
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
|
||||||
run: pip install -r docs/requirements.txt
|
run: pip install -r docs/requirements.txt
|
||||||
|
|
||||||
- name: "Copy README File"
|
- name: "Copy README File"
|
||||||
|
|
@ -83,13 +70,8 @@ jobs:
|
||||||
python scripts/transform_readme.py --target mkdocs
|
python scripts/transform_readme.py --target mkdocs
|
||||||
python scripts/generate_mkdocs.py
|
python scripts/generate_mkdocs.py
|
||||||
|
|
||||||
- name: "Build Insiders docs"
|
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
|
||||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
|
||||||
|
|
||||||
- name: "Build docs"
|
- name: "Build docs"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
run: mkdocs build --strict -f mkdocs.yml
|
||||||
run: mkdocs build --strict -f mkdocs.public.yml
|
|
||||||
|
|
||||||
- name: "Clone docs repo"
|
- name: "Clone docs repo"
|
||||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,8 @@ jobs:
|
||||||
environment:
|
environment:
|
||||||
name: release
|
name: release
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # For PyPI's trusted publishing + PEP 740 attestations
|
# For PyPI's trusted publishing.
|
||||||
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||||
|
|
@ -27,8 +28,5 @@ jobs:
|
||||||
pattern: wheels-*
|
pattern: wheels-*
|
||||||
path: wheels
|
path: wheels
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
- uses: astral-sh/attest-action@2c727738cea36d6c97dd85eb133ea0e0e8fe754b # v0.0.4
|
|
||||||
with:
|
|
||||||
paths: wheels/*
|
|
||||||
- name: Publish to PyPi
|
- name: Publish to PyPi
|
||||||
run: uv publish -v wheels/*
|
run: uv publish -v wheels/*
|
||||||
|
|
|
||||||
29
CHANGELOG.md
29
CHANGELOG.md
|
|
@ -1,5 +1,34 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 0.14.8
|
||||||
|
|
||||||
|
Released on 2025-12-04.
|
||||||
|
|
||||||
|
### Preview features
|
||||||
|
|
||||||
|
- \[`flake8-bugbear`\] Catch `yield` expressions within other statements (`B901`) ([#21200](https://github.com/astral-sh/ruff/pull/21200))
|
||||||
|
- \[`flake8-use-pathlib`\] Mark fixes unsafe for return type changes (`PTH104`, `PTH105`, `PTH109`, `PTH115`) ([#21440](https://github.com/astral-sh/ruff/pull/21440))
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
- Fix syntax error false positives for `await` outside functions ([#21763](https://github.com/astral-sh/ruff/pull/21763))
|
||||||
|
- \[`flake8-simplify`\] Fix truthiness assumption for non-iterable arguments in tuple/list/set calls (`SIM222`, `SIM223`) ([#21479](https://github.com/astral-sh/ruff/pull/21479))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Suggest using `--output-file` option in GitLab integration ([#21706](https://github.com/astral-sh/ruff/pull/21706))
|
||||||
|
|
||||||
|
### Other changes
|
||||||
|
|
||||||
|
- [syntax-error] Default type parameter followed by non-default type parameter ([#21657](https://github.com/astral-sh/ruff/pull/21657))
|
||||||
|
|
||||||
|
### Contributors
|
||||||
|
|
||||||
|
- [@kieran-ryan](https://github.com/kieran-ryan)
|
||||||
|
- [@11happy](https://github.com/11happy)
|
||||||
|
- [@danparizher](https://github.com/danparizher)
|
||||||
|
- [@ntBre](https://github.com/ntBre)
|
||||||
|
|
||||||
## 0.14.7
|
## 0.14.7
|
||||||
|
|
||||||
Released on 2025-11-28.
|
Released on 2025-11-28.
|
||||||
|
|
|
||||||
|
|
@ -331,13 +331,6 @@ you addressed them.
|
||||||
|
|
||||||
## MkDocs
|
## MkDocs
|
||||||
|
|
||||||
> [!NOTE]
|
|
||||||
>
|
|
||||||
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
|
|
||||||
> This means only members of the Astral organization can preview the documentation exactly as it
|
|
||||||
> will appear in production.
|
|
||||||
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
|
|
||||||
|
|
||||||
To preview any changes to the documentation locally:
|
To preview any changes to the documentation locally:
|
||||||
|
|
||||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||||
|
|
@ -351,11 +344,7 @@ To preview any changes to the documentation locally:
|
||||||
1. Run the development server with:
|
1. Run the development server with:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
# For contributors.
|
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.yml
|
||||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
|
||||||
|
|
||||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
|
||||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The documentation should then be available locally at
|
The documentation should then be available locally at
|
||||||
|
|
|
||||||
|
|
@ -2859,7 +2859,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.14.7"
|
version = "0.14.8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"argfile",
|
"argfile",
|
||||||
|
|
@ -3117,7 +3117,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff_linter"
|
name = "ruff_linter"
|
||||||
version = "0.14.7"
|
version = "0.14.8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
|
@ -3473,7 +3473,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff_wasm"
|
name = "ruff_wasm"
|
||||||
version = "0.14.7"
|
version = "0.14.8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console_error_panic_hook",
|
"console_error_panic_hook",
|
||||||
"console_log",
|
"console_log",
|
||||||
|
|
|
||||||
|
|
@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||||
|
|
||||||
# For a specific version.
|
# For a specific version.
|
||||||
curl -LsSf https://astral.sh/ruff/0.14.7/install.sh | sh
|
curl -LsSf https://astral.sh/ruff/0.14.8/install.sh | sh
|
||||||
powershell -c "irm https://astral.sh/ruff/0.14.7/install.ps1 | iex"
|
powershell -c "irm https://astral.sh/ruff/0.14.8/install.ps1 | iex"
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||||
|
|
@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||||
```yaml
|
```yaml
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.14.7
|
rev: v0.14.8
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter.
|
# Run the linter.
|
||||||
- id: ruff-check
|
- id: ruff-check
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.14.7"
|
version = "0.14.8"
|
||||||
publish = true
|
publish = true
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -667,6 +667,13 @@ impl Deref for SystemPathBuf {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl AsRef<Path> for SystemPathBuf {
|
||||||
|
#[inline]
|
||||||
|
fn as_ref(&self) -> &Path {
|
||||||
|
self.0.as_std_path()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<P: AsRef<SystemPath>> FromIterator<P> for SystemPathBuf {
|
impl<P: AsRef<SystemPath>> FromIterator<P> for SystemPathBuf {
|
||||||
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> Self {
|
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> Self {
|
||||||
let mut buf = SystemPathBuf::new();
|
let mut buf = SystemPathBuf::new();
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,7 @@ impl ModuleImports {
|
||||||
// Resolve the imports.
|
// Resolve the imports.
|
||||||
let mut resolved_imports = ModuleImports::default();
|
let mut resolved_imports = ModuleImports::default();
|
||||||
for import in imports {
|
for import in imports {
|
||||||
for resolved in Resolver::new(db).resolve(import) {
|
for resolved in Resolver::new(db, path).resolve(import) {
|
||||||
if let Some(path) = resolved.as_system_path() {
|
if let Some(path) = resolved.as_system_path() {
|
||||||
resolved_imports.insert(path.to_path_buf());
|
resolved_imports.insert(path.to_path_buf());
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,9 @@
|
||||||
use ruff_db::files::FilePath;
|
use ruff_db::files::{File, FilePath, system_path_to_file};
|
||||||
use ty_python_semantic::{ModuleName, resolve_module, resolve_real_module};
|
use ruff_db::system::SystemPath;
|
||||||
|
use ty_python_semantic::{
|
||||||
|
ModuleName, resolve_module, resolve_module_confident, resolve_real_module,
|
||||||
|
resolve_real_module_confident,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::ModuleDb;
|
use crate::ModuleDb;
|
||||||
use crate::collector::CollectedImport;
|
use crate::collector::CollectedImport;
|
||||||
|
|
@ -7,12 +11,15 @@ use crate::collector::CollectedImport;
|
||||||
/// Collect all imports for a given Python file.
|
/// Collect all imports for a given Python file.
|
||||||
pub(crate) struct Resolver<'a> {
|
pub(crate) struct Resolver<'a> {
|
||||||
db: &'a ModuleDb,
|
db: &'a ModuleDb,
|
||||||
|
file: Option<File>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Resolver<'a> {
|
impl<'a> Resolver<'a> {
|
||||||
/// Initialize a [`Resolver`] with a given [`ModuleDb`].
|
/// Initialize a [`Resolver`] with a given [`ModuleDb`].
|
||||||
pub(crate) fn new(db: &'a ModuleDb) -> Self {
|
pub(crate) fn new(db: &'a ModuleDb, path: &SystemPath) -> Self {
|
||||||
Self { db }
|
// If we know the importing file we can potentially resolve more imports
|
||||||
|
let file = system_path_to_file(db, path).ok();
|
||||||
|
Self { db, file }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve the [`CollectedImport`] into a [`FilePath`].
|
/// Resolve the [`CollectedImport`] into a [`FilePath`].
|
||||||
|
|
@ -70,13 +77,21 @@ impl<'a> Resolver<'a> {
|
||||||
|
|
||||||
/// Resolves a module name to a module.
|
/// Resolves a module name to a module.
|
||||||
pub(crate) fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
pub(crate) fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||||
let module = resolve_module(self.db, module_name)?;
|
let module = if let Some(file) = self.file {
|
||||||
|
resolve_module(self.db, file, module_name)?
|
||||||
|
} else {
|
||||||
|
resolve_module_confident(self.db, module_name)?
|
||||||
|
};
|
||||||
Some(module.file(self.db)?.path(self.db))
|
Some(module.file(self.db)?.path(self.db))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolves a module name to a module (stubs not allowed).
|
/// Resolves a module name to a module (stubs not allowed).
|
||||||
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||||
let module = resolve_real_module(self.db, module_name)?;
|
let module = if let Some(file) = self.file {
|
||||||
|
resolve_real_module(self.db, file, module_name)?
|
||||||
|
} else {
|
||||||
|
resolve_real_module_confident(self.db, module_name)?
|
||||||
|
};
|
||||||
Some(module.file(self.db)?.path(self.db))
|
Some(module.file(self.db)?.path(self.db))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "ruff_linter"
|
name = "ruff_linter"
|
||||||
version = "0.14.7"
|
version = "0.14.8"
|
||||||
publish = false
|
publish = false
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -52,16 +52,16 @@ def not_broken5():
|
||||||
yield inner()
|
yield inner()
|
||||||
|
|
||||||
|
|
||||||
def not_broken6():
|
def broken3():
|
||||||
return (yield from [])
|
return (yield from [])
|
||||||
|
|
||||||
|
|
||||||
def not_broken7():
|
def broken4():
|
||||||
x = yield from []
|
x = yield from []
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def not_broken8():
|
def broken5():
|
||||||
x = None
|
x = None
|
||||||
|
|
||||||
def inner(ex):
|
def inner(ex):
|
||||||
|
|
@ -76,3 +76,13 @@ class NotBroken9(object):
|
||||||
def __await__(self):
|
def __await__(self):
|
||||||
yield from function()
|
yield from function()
|
||||||
return 42
|
return 42
|
||||||
|
|
||||||
|
|
||||||
|
async def broken6():
|
||||||
|
yield 1
|
||||||
|
return foo()
|
||||||
|
|
||||||
|
|
||||||
|
async def broken7():
|
||||||
|
yield 1
|
||||||
|
return [1, 2, 3]
|
||||||
|
|
|
||||||
24
crates/ruff_linter/resources/test/fixtures/syntax_errors/return_in_generator.py
vendored
Normal file
24
crates/ruff_linter/resources/test/fixtures/syntax_errors/return_in_generator.py
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
async def gen():
|
||||||
|
yield 1
|
||||||
|
return 42
|
||||||
|
|
||||||
|
def gen(): # B901 but not a syntax error - not an async generator
|
||||||
|
yield 1
|
||||||
|
return 42
|
||||||
|
|
||||||
|
async def gen(): # ok - no value in return
|
||||||
|
yield 1
|
||||||
|
return
|
||||||
|
|
||||||
|
async def gen():
|
||||||
|
yield 1
|
||||||
|
return foo()
|
||||||
|
|
||||||
|
async def gen():
|
||||||
|
yield 1
|
||||||
|
return [1, 2, 3]
|
||||||
|
|
||||||
|
async def gen():
|
||||||
|
if True:
|
||||||
|
yield 1
|
||||||
|
return 10
|
||||||
|
|
@ -69,6 +69,7 @@ use crate::noqa::NoqaMapping;
|
||||||
use crate::package::PackageRoot;
|
use crate::package::PackageRoot;
|
||||||
use crate::preview::is_undefined_export_in_dunder_init_enabled;
|
use crate::preview::is_undefined_export_in_dunder_init_enabled;
|
||||||
use crate::registry::Rule;
|
use crate::registry::Rule;
|
||||||
|
use crate::rules::flake8_bugbear::rules::ReturnInGenerator;
|
||||||
use crate::rules::pyflakes::rules::{
|
use crate::rules::pyflakes::rules::{
|
||||||
LateFutureImport, MultipleStarredExpressions, ReturnOutsideFunction,
|
LateFutureImport, MultipleStarredExpressions, ReturnOutsideFunction,
|
||||||
UndefinedLocalWithNestedImportStarUsage, YieldOutsideFunction,
|
UndefinedLocalWithNestedImportStarUsage, YieldOutsideFunction,
|
||||||
|
|
@ -729,6 +730,12 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||||
self.report_diagnostic(NonlocalWithoutBinding { name }, error.range);
|
self.report_diagnostic(NonlocalWithoutBinding { name }, error.range);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
SemanticSyntaxErrorKind::ReturnInGenerator => {
|
||||||
|
// B901
|
||||||
|
if self.is_rule_enabled(Rule::ReturnInGenerator) {
|
||||||
|
self.report_diagnostic(ReturnInGenerator, error.range);
|
||||||
|
}
|
||||||
|
}
|
||||||
SemanticSyntaxErrorKind::ReboundComprehensionVariable
|
SemanticSyntaxErrorKind::ReboundComprehensionVariable
|
||||||
| SemanticSyntaxErrorKind::DuplicateTypeParameter
|
| SemanticSyntaxErrorKind::DuplicateTypeParameter
|
||||||
| SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
|
| SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
|
||||||
|
|
|
||||||
|
|
@ -1043,6 +1043,7 @@ mod tests {
|
||||||
Rule::YieldFromInAsyncFunction,
|
Rule::YieldFromInAsyncFunction,
|
||||||
Path::new("yield_from_in_async_function.py")
|
Path::new("yield_from_in_async_function.py")
|
||||||
)]
|
)]
|
||||||
|
#[test_case(Rule::ReturnInGenerator, Path::new("return_in_generator.py"))]
|
||||||
fn test_syntax_errors(rule: Rule, path: &Path) -> Result<()> {
|
fn test_syntax_errors(rule: Rule, path: &Path) -> Result<()> {
|
||||||
let snapshot = path.to_string_lossy().to_string();
|
let snapshot = path.to_string_lossy().to_string();
|
||||||
let path = Path::new("resources/test/fixtures/syntax_errors").join(path);
|
let path = Path::new("resources/test/fixtures/syntax_errors").join(path);
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||||
use ruff_python_ast::statement_visitor;
|
use ruff_python_ast::visitor::{Visitor, walk_expr, walk_stmt};
|
||||||
use ruff_python_ast::statement_visitor::StatementVisitor;
|
|
||||||
use ruff_python_ast::{self as ast, Expr, Stmt, StmtFunctionDef};
|
use ruff_python_ast::{self as ast, Expr, Stmt, StmtFunctionDef};
|
||||||
use ruff_text_size::TextRange;
|
use ruff_text_size::TextRange;
|
||||||
|
|
||||||
|
|
@ -96,6 +95,11 @@ pub(crate) fn return_in_generator(checker: &Checker, function_def: &StmtFunction
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Async functions are flagged by the `ReturnInGenerator` semantic syntax error.
|
||||||
|
if function_def.is_async {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
let mut visitor = ReturnInGeneratorVisitor::default();
|
let mut visitor = ReturnInGeneratorVisitor::default();
|
||||||
visitor.visit_body(&function_def.body);
|
visitor.visit_body(&function_def.body);
|
||||||
|
|
||||||
|
|
@ -112,15 +116,9 @@ struct ReturnInGeneratorVisitor {
|
||||||
has_yield: bool,
|
has_yield: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StatementVisitor<'_> for ReturnInGeneratorVisitor {
|
impl Visitor<'_> for ReturnInGeneratorVisitor {
|
||||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||||
match stmt {
|
match stmt {
|
||||||
Stmt::Expr(ast::StmtExpr { value, .. }) => match **value {
|
|
||||||
Expr::Yield(_) | Expr::YieldFrom(_) => {
|
|
||||||
self.has_yield = true;
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
},
|
|
||||||
Stmt::FunctionDef(_) => {
|
Stmt::FunctionDef(_) => {
|
||||||
// Do not recurse into nested functions; they're evaluated separately.
|
// Do not recurse into nested functions; they're evaluated separately.
|
||||||
}
|
}
|
||||||
|
|
@ -130,8 +128,19 @@ impl StatementVisitor<'_> for ReturnInGeneratorVisitor {
|
||||||
node_index: _,
|
node_index: _,
|
||||||
}) => {
|
}) => {
|
||||||
self.return_ = Some(*range);
|
self.return_ = Some(*range);
|
||||||
|
walk_stmt(self, stmt);
|
||||||
}
|
}
|
||||||
_ => statement_visitor::walk_stmt(self, stmt),
|
_ => walk_stmt(self, stmt),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_expr(&mut self, expr: &Expr) {
|
||||||
|
match expr {
|
||||||
|
Expr::Lambda(_) => {}
|
||||||
|
Expr::Yield(_) | Expr::YieldFrom(_) => {
|
||||||
|
self.has_yield = true;
|
||||||
|
}
|
||||||
|
_ => walk_expr(self, expr),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -21,3 +21,46 @@ B901 Using `yield` and `return {value}` in a generator function can lead to conf
|
||||||
37 |
|
37 |
|
||||||
38 | yield from not_broken()
|
38 | yield from not_broken()
|
||||||
|
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> B901.py:56:5
|
||||||
|
|
|
||||||
|
55 | def broken3():
|
||||||
|
56 | return (yield from [])
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> B901.py:61:5
|
||||||
|
|
|
||||||
|
59 | def broken4():
|
||||||
|
60 | x = yield from []
|
||||||
|
61 | return x
|
||||||
|
| ^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> B901.py:72:5
|
||||||
|
|
|
||||||
|
71 | inner((yield from []))
|
||||||
|
72 | return x
|
||||||
|
| ^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> B901.py:83:5
|
||||||
|
|
|
||||||
|
81 | async def broken6():
|
||||||
|
82 | yield 1
|
||||||
|
83 | return foo()
|
||||||
|
| ^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> B901.py:88:5
|
||||||
|
|
|
||||||
|
86 | async def broken7():
|
||||||
|
87 | yield 1
|
||||||
|
88 | return [1, 2, 3]
|
||||||
|
| ^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,55 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_linter/src/linter.rs
|
||||||
|
---
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> resources/test/fixtures/syntax_errors/return_in_generator.py:3:5
|
||||||
|
|
|
||||||
|
1 | async def gen():
|
||||||
|
2 | yield 1
|
||||||
|
3 | return 42
|
||||||
|
| ^^^^^^^^^
|
||||||
|
4 |
|
||||||
|
5 | def gen(): # B901 but not a syntax error - not an async generator
|
||||||
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> resources/test/fixtures/syntax_errors/return_in_generator.py:7:5
|
||||||
|
|
|
||||||
|
5 | def gen(): # B901 but not a syntax error - not an async generator
|
||||||
|
6 | yield 1
|
||||||
|
7 | return 42
|
||||||
|
| ^^^^^^^^^
|
||||||
|
8 |
|
||||||
|
9 | async def gen(): # ok - no value in return
|
||||||
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> resources/test/fixtures/syntax_errors/return_in_generator.py:15:5
|
||||||
|
|
|
||||||
|
13 | async def gen():
|
||||||
|
14 | yield 1
|
||||||
|
15 | return foo()
|
||||||
|
| ^^^^^^^^^^^^
|
||||||
|
16 |
|
||||||
|
17 | async def gen():
|
||||||
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> resources/test/fixtures/syntax_errors/return_in_generator.py:19:5
|
||||||
|
|
|
||||||
|
17 | async def gen():
|
||||||
|
18 | yield 1
|
||||||
|
19 | return [1, 2, 3]
|
||||||
|
| ^^^^^^^^^^^^^^^^
|
||||||
|
20 |
|
||||||
|
21 | async def gen():
|
||||||
|
|
|
||||||
|
|
||||||
|
B901 Using `yield` and `return {value}` in a generator function can lead to confusing behavior
|
||||||
|
--> resources/test/fixtures/syntax_errors/return_in_generator.py:24:5
|
||||||
|
|
|
||||||
|
22 | if True:
|
||||||
|
23 | yield 1
|
||||||
|
24 | return 10
|
||||||
|
| ^^^^^^^^^
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,13 @@
|
||||||
//! This checker is not responsible for traversing the AST itself. Instead, its
|
//! This checker is not responsible for traversing the AST itself. Instead, its
|
||||||
//! [`SemanticSyntaxChecker::visit_stmt`] and [`SemanticSyntaxChecker::visit_expr`] methods should
|
//! [`SemanticSyntaxChecker::visit_stmt`] and [`SemanticSyntaxChecker::visit_expr`] methods should
|
||||||
//! be called in a parent `Visitor`'s `visit_stmt` and `visit_expr` methods, respectively.
|
//! be called in a parent `Visitor`'s `visit_stmt` and `visit_expr` methods, respectively.
|
||||||
|
|
||||||
use ruff_python_ast::{
|
use ruff_python_ast::{
|
||||||
self as ast, Expr, ExprContext, IrrefutablePatternKind, Pattern, PythonVersion, Stmt, StmtExpr,
|
self as ast, Expr, ExprContext, IrrefutablePatternKind, Pattern, PythonVersion, Stmt, StmtExpr,
|
||||||
StmtImportFrom,
|
StmtFunctionDef, StmtImportFrom,
|
||||||
comparable::ComparableExpr,
|
comparable::ComparableExpr,
|
||||||
helpers,
|
helpers,
|
||||||
visitor::{Visitor, walk_expr},
|
visitor::{Visitor, walk_expr, walk_stmt},
|
||||||
};
|
};
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||||
|
|
@ -739,7 +740,21 @@ impl SemanticSyntaxChecker {
|
||||||
self.seen_futures_boundary = true;
|
self.seen_futures_boundary = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Stmt::FunctionDef(_) => {
|
Stmt::FunctionDef(StmtFunctionDef { is_async, body, .. }) => {
|
||||||
|
if *is_async {
|
||||||
|
let mut visitor = ReturnVisitor::default();
|
||||||
|
visitor.visit_body(body);
|
||||||
|
|
||||||
|
if visitor.has_yield {
|
||||||
|
if let Some(return_range) = visitor.return_range {
|
||||||
|
Self::add_error(
|
||||||
|
ctx,
|
||||||
|
SemanticSyntaxErrorKind::ReturnInGenerator,
|
||||||
|
return_range,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
self.seen_futures_boundary = true;
|
self.seen_futures_boundary = true;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
|
@ -1213,6 +1228,9 @@ impl Display for SemanticSyntaxError {
|
||||||
SemanticSyntaxErrorKind::NonlocalWithoutBinding(name) => {
|
SemanticSyntaxErrorKind::NonlocalWithoutBinding(name) => {
|
||||||
write!(f, "no binding for nonlocal `{name}` found")
|
write!(f, "no binding for nonlocal `{name}` found")
|
||||||
}
|
}
|
||||||
|
SemanticSyntaxErrorKind::ReturnInGenerator => {
|
||||||
|
write!(f, "`return` with value in async generator")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1619,6 +1637,9 @@ pub enum SemanticSyntaxErrorKind {
|
||||||
|
|
||||||
/// Represents a default type parameter followed by a non-default type parameter.
|
/// Represents a default type parameter followed by a non-default type parameter.
|
||||||
TypeParameterDefaultOrder(String),
|
TypeParameterDefaultOrder(String),
|
||||||
|
|
||||||
|
/// Represents a `return` statement with a value in an asynchronous generator.
|
||||||
|
ReturnInGenerator,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, get_size2::GetSize)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||||
|
|
@ -1735,6 +1756,40 @@ impl Visitor<'_> for ReboundComprehensionVisitor<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct ReturnVisitor {
|
||||||
|
return_range: Option<TextRange>,
|
||||||
|
has_yield: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Visitor<'_> for ReturnVisitor {
|
||||||
|
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||||
|
match stmt {
|
||||||
|
// Do not recurse into nested functions; they're evaluated separately.
|
||||||
|
Stmt::FunctionDef(_) | Stmt::ClassDef(_) => {}
|
||||||
|
Stmt::Return(ast::StmtReturn {
|
||||||
|
value: Some(_),
|
||||||
|
range,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
self.return_range = Some(*range);
|
||||||
|
walk_stmt(self, stmt);
|
||||||
|
}
|
||||||
|
_ => walk_stmt(self, stmt),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_expr(&mut self, expr: &Expr) {
|
||||||
|
match expr {
|
||||||
|
Expr::Lambda(_) => {}
|
||||||
|
Expr::Yield(_) | Expr::YieldFrom(_) => {
|
||||||
|
self.has_yield = true;
|
||||||
|
}
|
||||||
|
_ => walk_expr(self, expr),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
struct MatchPatternVisitor<'a, Ctx> {
|
struct MatchPatternVisitor<'a, Ctx> {
|
||||||
names: FxHashSet<&'a ast::name::Name>,
|
names: FxHashSet<&'a ast::name::Name>,
|
||||||
ctx: &'a Ctx,
|
ctx: &'a Ctx,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "ruff_wasm"
|
name = "ruff_wasm"
|
||||||
version = "0.14.7"
|
version = "0.14.8"
|
||||||
publish = false
|
publish = false
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ use ty_project::metadata::pyproject::{PyProject, Tool};
|
||||||
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
|
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||||
use ty_project::watch::{ChangeEvent, ProjectWatcher, directory_watcher};
|
use ty_project::watch::{ChangeEvent, ProjectWatcher, directory_watcher};
|
||||||
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
|
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||||
use ty_python_semantic::{Module, ModuleName, PythonPlatform, resolve_module};
|
use ty_python_semantic::{Module, ModuleName, PythonPlatform, resolve_module_confident};
|
||||||
|
|
||||||
struct TestCase {
|
struct TestCase {
|
||||||
db: ProjectDatabase,
|
db: ProjectDatabase,
|
||||||
|
|
@ -232,7 +232,8 @@ impl TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn module<'c>(&'c self, name: &str) -> Module<'c> {
|
fn module<'c>(&'c self, name: &str) -> Module<'c> {
|
||||||
resolve_module(self.db(), &ModuleName::new(name).unwrap()).expect("module to be present")
|
resolve_module_confident(self.db(), &ModuleName::new(name).unwrap())
|
||||||
|
.expect("module to be present")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sorted_submodule_names(&self, parent_module_name: &str) -> Vec<String> {
|
fn sorted_submodule_names(&self, parent_module_name: &str) -> Vec<String> {
|
||||||
|
|
@ -811,7 +812,8 @@ fn directory_moved_to_project() -> anyhow::Result<()> {
|
||||||
.with_context(|| "Failed to create __init__.py")?;
|
.with_context(|| "Failed to create __init__.py")?;
|
||||||
std::fs::write(a_original_path.as_std_path(), "").with_context(|| "Failed to create a.py")?;
|
std::fs::write(a_original_path.as_std_path(), "").with_context(|| "Failed to create a.py")?;
|
||||||
|
|
||||||
let sub_a_module = resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap());
|
let sub_a_module =
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("sub.a").unwrap());
|
||||||
|
|
||||||
assert_eq!(sub_a_module, None);
|
assert_eq!(sub_a_module, None);
|
||||||
case.assert_indexed_project_files([bar]);
|
case.assert_indexed_project_files([bar]);
|
||||||
|
|
@ -832,7 +834,9 @@ fn directory_moved_to_project() -> anyhow::Result<()> {
|
||||||
.expect("a.py to exist");
|
.expect("a.py to exist");
|
||||||
|
|
||||||
// `import sub.a` should now resolve
|
// `import sub.a` should now resolve
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some());
|
assert!(
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some()
|
||||||
|
);
|
||||||
|
|
||||||
case.assert_indexed_project_files([bar, init_file, a_file]);
|
case.assert_indexed_project_files([bar, init_file, a_file]);
|
||||||
|
|
||||||
|
|
@ -848,7 +852,9 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||||
])?;
|
])?;
|
||||||
let bar = case.system_file(case.project_path("bar.py")).unwrap();
|
let bar = case.system_file(case.project_path("bar.py")).unwrap();
|
||||||
|
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some());
|
assert!(
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some()
|
||||||
|
);
|
||||||
|
|
||||||
let sub_path = case.project_path("sub");
|
let sub_path = case.project_path("sub");
|
||||||
let init_file = case
|
let init_file = case
|
||||||
|
|
@ -870,7 +876,9 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||||
case.apply_changes(changes, None);
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
// `import sub.a` should no longer resolve
|
// `import sub.a` should no longer resolve
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none());
|
assert!(
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none()
|
||||||
|
);
|
||||||
|
|
||||||
assert!(!init_file.exists(case.db()));
|
assert!(!init_file.exists(case.db()));
|
||||||
assert!(!a_file.exists(case.db()));
|
assert!(!a_file.exists(case.db()));
|
||||||
|
|
@ -890,8 +898,12 @@ fn directory_renamed() -> anyhow::Result<()> {
|
||||||
|
|
||||||
let bar = case.system_file(case.project_path("bar.py")).unwrap();
|
let bar = case.system_file(case.project_path("bar.py")).unwrap();
|
||||||
|
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some());
|
assert!(
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("foo.baz").unwrap()).is_none());
|
resolve_module_confident(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some()
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("foo.baz").unwrap()).is_none()
|
||||||
|
);
|
||||||
|
|
||||||
let sub_path = case.project_path("sub");
|
let sub_path = case.project_path("sub");
|
||||||
let sub_init = case
|
let sub_init = case
|
||||||
|
|
@ -915,9 +927,13 @@ fn directory_renamed() -> anyhow::Result<()> {
|
||||||
case.apply_changes(changes, None);
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
// `import sub.a` should no longer resolve
|
// `import sub.a` should no longer resolve
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none());
|
assert!(
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none()
|
||||||
|
);
|
||||||
// `import foo.baz` should now resolve
|
// `import foo.baz` should now resolve
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("foo.baz").unwrap()).is_some());
|
assert!(
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("foo.baz").unwrap()).is_some()
|
||||||
|
);
|
||||||
|
|
||||||
// The old paths are no longer tracked
|
// The old paths are no longer tracked
|
||||||
assert!(!sub_init.exists(case.db()));
|
assert!(!sub_init.exists(case.db()));
|
||||||
|
|
@ -950,7 +966,9 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||||
|
|
||||||
let bar = case.system_file(case.project_path("bar.py")).unwrap();
|
let bar = case.system_file(case.project_path("bar.py")).unwrap();
|
||||||
|
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some());
|
assert!(
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_some()
|
||||||
|
);
|
||||||
|
|
||||||
let sub_path = case.project_path("sub");
|
let sub_path = case.project_path("sub");
|
||||||
|
|
||||||
|
|
@ -970,7 +988,9 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||||
case.apply_changes(changes, None);
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
// `import sub.a` should no longer resolve
|
// `import sub.a` should no longer resolve
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none());
|
assert!(
|
||||||
|
resolve_module_confident(case.db(), &ModuleName::new_static("sub.a").unwrap()).is_none()
|
||||||
|
);
|
||||||
|
|
||||||
assert!(!init_file.exists(case.db()));
|
assert!(!init_file.exists(case.db()));
|
||||||
assert!(!a_file.exists(case.db()));
|
assert!(!a_file.exists(case.db()));
|
||||||
|
|
@ -999,7 +1019,7 @@ fn search_path() -> anyhow::Result<()> {
|
||||||
let site_packages = case.root_path().join("site_packages");
|
let site_packages = case.root_path().join("site_packages");
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolve_module(case.db(), &ModuleName::new("a").unwrap()),
|
resolve_module_confident(case.db(), &ModuleName::new("a").unwrap()),
|
||||||
None
|
None
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
@ -1009,7 +1029,7 @@ fn search_path() -> anyhow::Result<()> {
|
||||||
|
|
||||||
case.apply_changes(changes, None);
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("a").unwrap()).is_some());
|
assert!(resolve_module_confident(case.db(), &ModuleName::new_static("a").unwrap()).is_some());
|
||||||
case.assert_indexed_project_files([case.system_file(case.project_path("bar.py")).unwrap()]);
|
case.assert_indexed_project_files([case.system_file(case.project_path("bar.py")).unwrap()]);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -1022,7 +1042,7 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||||
let site_packages = case.project_path("site_packages");
|
let site_packages = case.project_path("site_packages");
|
||||||
std::fs::create_dir_all(site_packages.as_std_path())?;
|
std::fs::create_dir_all(site_packages.as_std_path())?;
|
||||||
|
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("a").unwrap()).is_none());
|
assert!(resolve_module_confident(case.db(), &ModuleName::new_static("a").unwrap()).is_none());
|
||||||
|
|
||||||
// Register site-packages as a search path.
|
// Register site-packages as a search path.
|
||||||
case.update_options(Options {
|
case.update_options(Options {
|
||||||
|
|
@ -1040,7 +1060,7 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||||
|
|
||||||
case.apply_changes(changes, None);
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new_static("a").unwrap()).is_some());
|
assert!(resolve_module_confident(case.db(), &ModuleName::new_static("a").unwrap()).is_some());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -1172,7 +1192,7 @@ fn changed_versions_file() -> anyhow::Result<()> {
|
||||||
|
|
||||||
// Unset the custom typeshed directory.
|
// Unset the custom typeshed directory.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolve_module(case.db(), &ModuleName::new("os").unwrap()),
|
resolve_module_confident(case.db(), &ModuleName::new("os").unwrap()),
|
||||||
None
|
None
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
@ -1187,7 +1207,7 @@ fn changed_versions_file() -> anyhow::Result<()> {
|
||||||
|
|
||||||
case.apply_changes(changes, None);
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
assert!(resolve_module(case.db(), &ModuleName::new("os").unwrap()).is_some());
|
assert!(resolve_module_confident(case.db(), &ModuleName::new("os").unwrap()).is_some());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -1410,7 +1430,7 @@ mod unix {
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let baz = resolve_module(case.db(), &ModuleName::new_static("bar.baz").unwrap())
|
let baz = resolve_module_confident(case.db(), &ModuleName::new_static("bar.baz").unwrap())
|
||||||
.expect("Expected bar.baz to exist in site-packages.");
|
.expect("Expected bar.baz to exist in site-packages.");
|
||||||
let baz_project = case.project_path("bar/baz.py");
|
let baz_project = case.project_path("bar/baz.py");
|
||||||
let baz_file = baz.file(case.db()).unwrap();
|
let baz_file = baz.file(case.db()).unwrap();
|
||||||
|
|
@ -1486,7 +1506,7 @@ mod unix {
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let baz = resolve_module(case.db(), &ModuleName::new_static("bar.baz").unwrap())
|
let baz = resolve_module_confident(case.db(), &ModuleName::new_static("bar.baz").unwrap())
|
||||||
.expect("Expected bar.baz to exist in site-packages.");
|
.expect("Expected bar.baz to exist in site-packages.");
|
||||||
let baz_file = baz.file(case.db()).unwrap();
|
let baz_file = baz.file(case.db()).unwrap();
|
||||||
let bar_baz = case.project_path("bar/baz.py");
|
let bar_baz = case.project_path("bar/baz.py");
|
||||||
|
|
@ -1591,7 +1611,7 @@ mod unix {
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let baz = resolve_module(case.db(), &ModuleName::new_static("bar.baz").unwrap())
|
let baz = resolve_module_confident(case.db(), &ModuleName::new_static("bar.baz").unwrap())
|
||||||
.expect("Expected bar.baz to exist in site-packages.");
|
.expect("Expected bar.baz to exist in site-packages.");
|
||||||
let baz_site_packages_path =
|
let baz_site_packages_path =
|
||||||
case.project_path(".venv/lib/python3.12/site-packages/bar/baz.py");
|
case.project_path(".venv/lib/python3.12/site-packages/bar/baz.py");
|
||||||
|
|
@ -1854,11 +1874,11 @@ fn rename_files_casing_only() -> anyhow::Result<()> {
|
||||||
let mut case = setup([("lib.py", "class Foo: ...")])?;
|
let mut case = setup([("lib.py", "class Foo: ...")])?;
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
resolve_module(case.db(), &ModuleName::new("lib").unwrap()).is_some(),
|
resolve_module_confident(case.db(), &ModuleName::new("lib").unwrap()).is_some(),
|
||||||
"Expected `lib` module to exist."
|
"Expected `lib` module to exist."
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolve_module(case.db(), &ModuleName::new("Lib").unwrap()),
|
resolve_module_confident(case.db(), &ModuleName::new("Lib").unwrap()),
|
||||||
None,
|
None,
|
||||||
"Expected `Lib` module not to exist"
|
"Expected `Lib` module not to exist"
|
||||||
);
|
);
|
||||||
|
|
@ -1891,13 +1911,13 @@ fn rename_files_casing_only() -> anyhow::Result<()> {
|
||||||
|
|
||||||
// Resolving `lib` should now fail but `Lib` should now succeed
|
// Resolving `lib` should now fail but `Lib` should now succeed
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolve_module(case.db(), &ModuleName::new("lib").unwrap()),
|
resolve_module_confident(case.db(), &ModuleName::new("lib").unwrap()),
|
||||||
None,
|
None,
|
||||||
"Expected `lib` module to no longer exist."
|
"Expected `lib` module to no longer exist."
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
resolve_module(case.db(), &ModuleName::new("Lib").unwrap()).is_some(),
|
resolve_module_confident(case.db(), &ModuleName::new("Lib").unwrap()).is_some(),
|
||||||
"Expected `Lib` module to exist"
|
"Expected `Lib` module to exist"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,7 @@
|
||||||
name,file,index,rank
|
name,file,index,rank
|
||||||
|
auto-import-includes-modules,main.py,0,1
|
||||||
|
auto-import-includes-modules,main.py,1,7
|
||||||
|
auto-import-includes-modules,main.py,2,1
|
||||||
auto-import-skips-current-module,main.py,0,1
|
auto-import-skips-current-module,main.py,0,1
|
||||||
fstring-completions,main.py,0,1
|
fstring-completions,main.py,0,1
|
||||||
higher-level-symbols-preferred,main.py,0,
|
higher-level-symbols-preferred,main.py,0,
|
||||||
|
|
@ -11,9 +14,9 @@ import-deprioritizes-type_check_only,main.py,2,1
|
||||||
import-deprioritizes-type_check_only,main.py,3,2
|
import-deprioritizes-type_check_only,main.py,3,2
|
||||||
import-deprioritizes-type_check_only,main.py,4,3
|
import-deprioritizes-type_check_only,main.py,4,3
|
||||||
import-keyword-completion,main.py,0,1
|
import-keyword-completion,main.py,0,1
|
||||||
internal-typeshed-hidden,main.py,0,4
|
internal-typeshed-hidden,main.py,0,2
|
||||||
none-completion,main.py,0,2
|
none-completion,main.py,0,2
|
||||||
numpy-array,main.py,0,
|
numpy-array,main.py,0,159
|
||||||
numpy-array,main.py,1,1
|
numpy-array,main.py,1,1
|
||||||
object-attr-instance-methods,main.py,0,1
|
object-attr-instance-methods,main.py,0,1
|
||||||
object-attr-instance-methods,main.py,1,1
|
object-attr-instance-methods,main.py,1,1
|
||||||
|
|
@ -23,6 +26,6 @@ scope-existing-over-new-import,main.py,0,1
|
||||||
scope-prioritize-closer,main.py,0,2
|
scope-prioritize-closer,main.py,0,2
|
||||||
scope-simple-long-identifier,main.py,0,1
|
scope-simple-long-identifier,main.py,0,1
|
||||||
tstring-completions,main.py,0,1
|
tstring-completions,main.py,0,1
|
||||||
ty-extensions-lower-stdlib,main.py,0,8
|
ty-extensions-lower-stdlib,main.py,0,9
|
||||||
type-var-typing-over-ast,main.py,0,3
|
type-var-typing-over-ast,main.py,0,3
|
||||||
type-var-typing-over-ast,main.py,1,275
|
type-var-typing-over-ast,main.py,1,251
|
||||||
|
|
|
||||||
|
|
|
@ -506,9 +506,21 @@ struct CompletionAnswer {
|
||||||
impl CompletionAnswer {
|
impl CompletionAnswer {
|
||||||
/// Returns true when this answer matches the completion given.
|
/// Returns true when this answer matches the completion given.
|
||||||
fn matches(&self, completion: &Completion) -> bool {
|
fn matches(&self, completion: &Completion) -> bool {
|
||||||
|
if let Some(ref qualified) = completion.qualified {
|
||||||
|
if qualified.as_str() == self.qualified() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
self.symbol == completion.name.as_str()
|
self.symbol == completion.name.as_str()
|
||||||
&& self.module.as_deref() == completion.module_name.map(ModuleName::as_str)
|
&& self.module.as_deref() == completion.module_name.map(ModuleName::as_str)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn qualified(&self) -> String {
|
||||||
|
self.module
|
||||||
|
.as_ref()
|
||||||
|
.map(|module| format!("{module}.{}", self.symbol))
|
||||||
|
.unwrap_or_else(|| self.symbol.clone())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Copy the Python project from `src_dir` to `dst_dir`.
|
/// Copy the Python project from `src_dir` to `dst_dir`.
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,2 @@
|
||||||
|
[settings]
|
||||||
|
auto-import = true
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
multiprocess<CURSOR: multiprocessing>
|
||||||
|
collect<CURSOR: collections>
|
||||||
|
collabc<CURSOR: collections.abc>
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
[project]
|
||||||
|
name = "test"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.13"
|
||||||
|
dependencies = []
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
version = 1
|
||||||
|
revision = 3
|
||||||
|
requires-python = ">=3.13"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "test"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = { virtual = "." }
|
||||||
|
|
@ -2,7 +2,10 @@ use ruff_db::files::File;
|
||||||
use ty_project::Db;
|
use ty_project::Db;
|
||||||
use ty_python_semantic::{Module, ModuleName, all_modules, resolve_real_shadowable_module};
|
use ty_python_semantic::{Module, ModuleName, all_modules, resolve_real_shadowable_module};
|
||||||
|
|
||||||
use crate::symbols::{QueryPattern, SymbolInfo, symbols_for_file_global_only};
|
use crate::{
|
||||||
|
SymbolKind,
|
||||||
|
symbols::{QueryPattern, SymbolInfo, symbols_for_file_global_only},
|
||||||
|
};
|
||||||
|
|
||||||
/// Get all symbols matching the query string.
|
/// Get all symbols matching the query string.
|
||||||
///
|
///
|
||||||
|
|
@ -20,7 +23,7 @@ pub fn all_symbols<'db>(
|
||||||
|
|
||||||
let typing_extensions = ModuleName::new("typing_extensions").unwrap();
|
let typing_extensions = ModuleName::new("typing_extensions").unwrap();
|
||||||
let is_typing_extensions_available = importing_from.is_stub(db)
|
let is_typing_extensions_available = importing_from.is_stub(db)
|
||||||
|| resolve_real_shadowable_module(db, &typing_extensions).is_some();
|
|| resolve_real_shadowable_module(db, importing_from, &typing_extensions).is_some();
|
||||||
|
|
||||||
let results = std::sync::Mutex::new(Vec::new());
|
let results = std::sync::Mutex::new(Vec::new());
|
||||||
{
|
{
|
||||||
|
|
@ -36,18 +39,39 @@ pub fn all_symbols<'db>(
|
||||||
let Some(file) = module.file(&*db) else {
|
let Some(file) = module.file(&*db) else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
// By convention, modules starting with an underscore
|
||||||
|
// are generally considered unexported. However, we
|
||||||
|
// should consider first party modules fair game.
|
||||||
|
//
|
||||||
|
// Note that we apply this recursively. e.g.,
|
||||||
|
// `numpy._core.multiarray` is considered private
|
||||||
|
// because it's a child of `_core`.
|
||||||
|
if module.name(&*db).components().any(|c| c.starts_with('_'))
|
||||||
|
&& module
|
||||||
|
.search_path(&*db)
|
||||||
|
.is_none_or(|sp| !sp.is_first_party())
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
// TODO: also make it available in `TYPE_CHECKING` blocks
|
// TODO: also make it available in `TYPE_CHECKING` blocks
|
||||||
// (we'd need https://github.com/astral-sh/ty/issues/1553 to do this well)
|
// (we'd need https://github.com/astral-sh/ty/issues/1553 to do this well)
|
||||||
if !is_typing_extensions_available && module.name(&*db) == &typing_extensions {
|
if !is_typing_extensions_available && module.name(&*db) == &typing_extensions {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
s.spawn(move |_| {
|
s.spawn(move |_| {
|
||||||
|
if query.is_match_symbol_name(module.name(&*db)) {
|
||||||
|
results.lock().unwrap().push(AllSymbolInfo {
|
||||||
|
symbol: None,
|
||||||
|
module,
|
||||||
|
file,
|
||||||
|
});
|
||||||
|
}
|
||||||
for (_, symbol) in symbols_for_file_global_only(&*db, file).search(query) {
|
for (_, symbol) in symbols_for_file_global_only(&*db, file).search(query) {
|
||||||
// It seems like we could do better here than
|
// It seems like we could do better here than
|
||||||
// locking `results` for every single symbol,
|
// locking `results` for every single symbol,
|
||||||
// but this works pretty well as it is.
|
// but this works pretty well as it is.
|
||||||
results.lock().unwrap().push(AllSymbolInfo {
|
results.lock().unwrap().push(AllSymbolInfo {
|
||||||
symbol: symbol.to_owned(),
|
symbol: Some(symbol.to_owned()),
|
||||||
module,
|
module,
|
||||||
file,
|
file,
|
||||||
});
|
});
|
||||||
|
|
@ -59,8 +83,16 @@ pub fn all_symbols<'db>(
|
||||||
|
|
||||||
let mut results = results.into_inner().unwrap();
|
let mut results = results.into_inner().unwrap();
|
||||||
results.sort_by(|s1, s2| {
|
results.sort_by(|s1, s2| {
|
||||||
let key1 = (&s1.symbol.name, s1.file.path(db).as_str());
|
let key1 = (
|
||||||
let key2 = (&s2.symbol.name, s2.file.path(db).as_str());
|
s1.name_in_file()
|
||||||
|
.unwrap_or_else(|| s1.module().name(db).as_str()),
|
||||||
|
s1.file.path(db).as_str(),
|
||||||
|
);
|
||||||
|
let key2 = (
|
||||||
|
s2.name_in_file()
|
||||||
|
.unwrap_or_else(|| s2.module().name(db).as_str()),
|
||||||
|
s2.file.path(db).as_str(),
|
||||||
|
);
|
||||||
key1.cmp(&key2)
|
key1.cmp(&key2)
|
||||||
});
|
});
|
||||||
results
|
results
|
||||||
|
|
@ -71,14 +103,53 @@ pub fn all_symbols<'db>(
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct AllSymbolInfo<'db> {
|
pub struct AllSymbolInfo<'db> {
|
||||||
/// The symbol information.
|
/// The symbol information.
|
||||||
pub symbol: SymbolInfo<'static>,
|
///
|
||||||
|
/// When absent, this implies the symbol is the module itself.
|
||||||
|
symbol: Option<SymbolInfo<'static>>,
|
||||||
/// The module containing the symbol.
|
/// The module containing the symbol.
|
||||||
pub module: Module<'db>,
|
module: Module<'db>,
|
||||||
/// The file containing the symbol.
|
/// The file containing the symbol.
|
||||||
///
|
///
|
||||||
/// This `File` is guaranteed to be the same
|
/// This `File` is guaranteed to be the same
|
||||||
/// as the `File` underlying `module`.
|
/// as the `File` underlying `module`.
|
||||||
pub file: File,
|
file: File,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'db> AllSymbolInfo<'db> {
|
||||||
|
/// Returns the name of this symbol as it exists in a file.
|
||||||
|
///
|
||||||
|
/// When absent, there is no concrete symbol in a module
|
||||||
|
/// somewhere. Instead, this represents importing a module.
|
||||||
|
/// In this case, if the caller needs a symbol name, they
|
||||||
|
/// should use `AllSymbolInfo::module().name()`.
|
||||||
|
pub fn name_in_file(&self) -> Option<&str> {
|
||||||
|
self.symbol.as_ref().map(|symbol| &*symbol.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the "kind" of this symbol.
|
||||||
|
///
|
||||||
|
/// The kind of a symbol in the context of auto-import is
|
||||||
|
/// determined on a best effort basis. It may be imprecise
|
||||||
|
/// in some cases, e.g., reporting a module as a variable.
|
||||||
|
pub fn kind(&self) -> SymbolKind {
|
||||||
|
self.symbol
|
||||||
|
.as_ref()
|
||||||
|
.map(|symbol| symbol.kind)
|
||||||
|
.unwrap_or(SymbolKind::Module)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the module this symbol is exported from.
|
||||||
|
pub fn module(&self) -> Module<'db> {
|
||||||
|
self.module
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `File` corresponding to the module.
|
||||||
|
///
|
||||||
|
/// This is always equivalent to
|
||||||
|
/// `AllSymbolInfo::module().file().unwrap()`.
|
||||||
|
pub fn file(&self) -> File {
|
||||||
|
self.file
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
@ -162,25 +233,31 @@ ABCDEFGHIJKLMNOP = 'https://api.example.com'
|
||||||
return "No symbols found".to_string();
|
return "No symbols found".to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
self.render_diagnostics(symbols.into_iter().map(AllSymbolDiagnostic::new))
|
self.render_diagnostics(symbols.into_iter().map(|symbol_info| AllSymbolDiagnostic {
|
||||||
|
db: &self.db,
|
||||||
|
symbol_info,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct AllSymbolDiagnostic<'db> {
|
struct AllSymbolDiagnostic<'db> {
|
||||||
|
db: &'db dyn Db,
|
||||||
symbol_info: AllSymbolInfo<'db>,
|
symbol_info: AllSymbolInfo<'db>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> AllSymbolDiagnostic<'db> {
|
|
||||||
fn new(symbol_info: AllSymbolInfo<'db>) -> Self {
|
|
||||||
Self { symbol_info }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoDiagnostic for AllSymbolDiagnostic<'_> {
|
impl IntoDiagnostic for AllSymbolDiagnostic<'_> {
|
||||||
fn into_diagnostic(self) -> Diagnostic {
|
fn into_diagnostic(self) -> Diagnostic {
|
||||||
let symbol_kind_str = self.symbol_info.symbol.kind.to_string();
|
let symbol_kind_str = self.symbol_info.kind().to_string();
|
||||||
|
|
||||||
let info_text = format!("{} {}", symbol_kind_str, self.symbol_info.symbol.name);
|
let info_text = format!(
|
||||||
|
"{} {}",
|
||||||
|
symbol_kind_str,
|
||||||
|
self.symbol_info.name_in_file().unwrap_or_else(|| self
|
||||||
|
.symbol_info
|
||||||
|
.module()
|
||||||
|
.name(self.db)
|
||||||
|
.as_str())
|
||||||
|
);
|
||||||
|
|
||||||
let sub = SubDiagnostic::new(SubDiagnosticSeverity::Info, info_text);
|
let sub = SubDiagnostic::new(SubDiagnosticSeverity::Info, info_text);
|
||||||
|
|
||||||
|
|
@ -189,9 +266,12 @@ ABCDEFGHIJKLMNOP = 'https://api.example.com'
|
||||||
Severity::Info,
|
Severity::Info,
|
||||||
"AllSymbolInfo".to_string(),
|
"AllSymbolInfo".to_string(),
|
||||||
);
|
);
|
||||||
main.annotate(Annotation::primary(
|
|
||||||
Span::from(self.symbol_info.file).with_range(self.symbol_info.symbol.name_range),
|
let mut span = Span::from(self.symbol_info.file());
|
||||||
));
|
if let Some(ref symbol) = self.symbol_info.symbol {
|
||||||
|
span = span.with_range(symbol.name_range);
|
||||||
|
}
|
||||||
|
main.annotate(Annotation::primary(span));
|
||||||
main.sub(sub);
|
main.sub(sub);
|
||||||
|
|
||||||
main
|
main
|
||||||
|
|
|
||||||
|
|
@ -74,7 +74,7 @@ impl<'db> Completions<'db> {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|item| {
|
.filter_map(|item| {
|
||||||
Some(ImportEdit {
|
Some(ImportEdit {
|
||||||
label: format!("import {}.{}", item.module_name?, item.name),
|
label: format!("import {}", item.qualified?),
|
||||||
edit: item.import?,
|
edit: item.import?,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
@ -160,6 +160,10 @@ impl<'db> Extend<Completion<'db>> for Completions<'db> {
|
||||||
pub struct Completion<'db> {
|
pub struct Completion<'db> {
|
||||||
/// The label shown to the user for this suggestion.
|
/// The label shown to the user for this suggestion.
|
||||||
pub name: Name,
|
pub name: Name,
|
||||||
|
/// The fully qualified name, when available.
|
||||||
|
///
|
||||||
|
/// This is only set when `module_name` is available.
|
||||||
|
pub qualified: Option<Name>,
|
||||||
/// The text that should be inserted at the cursor
|
/// The text that should be inserted at the cursor
|
||||||
/// when the completion is selected.
|
/// when the completion is selected.
|
||||||
///
|
///
|
||||||
|
|
@ -225,6 +229,7 @@ impl<'db> Completion<'db> {
|
||||||
let is_type_check_only = semantic.is_type_check_only(db);
|
let is_type_check_only = semantic.is_type_check_only(db);
|
||||||
Completion {
|
Completion {
|
||||||
name: semantic.name,
|
name: semantic.name,
|
||||||
|
qualified: None,
|
||||||
insert: None,
|
insert: None,
|
||||||
ty: semantic.ty,
|
ty: semantic.ty,
|
||||||
kind: None,
|
kind: None,
|
||||||
|
|
@ -306,6 +311,7 @@ impl<'db> Completion<'db> {
|
||||||
fn keyword(name: &str) -> Self {
|
fn keyword(name: &str) -> Self {
|
||||||
Completion {
|
Completion {
|
||||||
name: name.into(),
|
name: name.into(),
|
||||||
|
qualified: None,
|
||||||
insert: None,
|
insert: None,
|
||||||
ty: None,
|
ty: None,
|
||||||
kind: Some(CompletionKind::Keyword),
|
kind: Some(CompletionKind::Keyword),
|
||||||
|
|
@ -321,6 +327,7 @@ impl<'db> Completion<'db> {
|
||||||
fn value_keyword(name: &str, ty: Type<'db>) -> Completion<'db> {
|
fn value_keyword(name: &str, ty: Type<'db>) -> Completion<'db> {
|
||||||
Completion {
|
Completion {
|
||||||
name: name.into(),
|
name: name.into(),
|
||||||
|
qualified: None,
|
||||||
insert: None,
|
insert: None,
|
||||||
ty: Some(ty),
|
ty: Some(ty),
|
||||||
kind: Some(CompletionKind::Keyword),
|
kind: Some(CompletionKind::Keyword),
|
||||||
|
|
@ -537,12 +544,22 @@ fn add_unimported_completions<'db>(
|
||||||
let members = importer.members_in_scope_at(scoped.node, scoped.node.start());
|
let members = importer.members_in_scope_at(scoped.node, scoped.node.start());
|
||||||
|
|
||||||
for symbol in all_symbols(db, file, &completions.query) {
|
for symbol in all_symbols(db, file, &completions.query) {
|
||||||
if symbol.module.file(db) == Some(file) || symbol.module.is_known(db, KnownModule::Builtins)
|
if symbol.file() == file || symbol.module().is_known(db, KnownModule::Builtins) {
|
||||||
{
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let request = create_import_request(symbol.module.name(db), &symbol.symbol.name);
|
let module_name = symbol.module().name(db);
|
||||||
|
let (name, qualified, request) = symbol
|
||||||
|
.name_in_file()
|
||||||
|
.map(|name| {
|
||||||
|
let qualified = format!("{module_name}.{name}");
|
||||||
|
(name, qualified, create_import_request(module_name, name))
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
let name = module_name.as_str();
|
||||||
|
let qualified = name.to_string();
|
||||||
|
(name, qualified, ImportRequest::module(name))
|
||||||
|
});
|
||||||
// FIXME: `all_symbols` doesn't account for wildcard imports.
|
// FIXME: `all_symbols` doesn't account for wildcard imports.
|
||||||
// Since we're looking at every module, this is probably
|
// Since we're looking at every module, this is probably
|
||||||
// "fine," but it might mean that we import a symbol from the
|
// "fine," but it might mean that we import a symbol from the
|
||||||
|
|
@ -551,11 +568,12 @@ fn add_unimported_completions<'db>(
|
||||||
// N.B. We use `add` here because `all_symbols` already
|
// N.B. We use `add` here because `all_symbols` already
|
||||||
// takes our query into account.
|
// takes our query into account.
|
||||||
completions.force_add(Completion {
|
completions.force_add(Completion {
|
||||||
name: ast::name::Name::new(&symbol.symbol.name),
|
name: ast::name::Name::new(name),
|
||||||
|
qualified: Some(ast::name::Name::new(qualified)),
|
||||||
insert: Some(import_action.symbol_text().into()),
|
insert: Some(import_action.symbol_text().into()),
|
||||||
ty: None,
|
ty: None,
|
||||||
kind: symbol.symbol.kind.to_completion_kind(),
|
kind: symbol.kind().to_completion_kind(),
|
||||||
module_name: Some(symbol.module.name(db)),
|
module_name: Some(module_name),
|
||||||
import: import_action.import().cloned(),
|
import: import_action.import().cloned(),
|
||||||
builtin: false,
|
builtin: false,
|
||||||
// TODO: `is_type_check_only` requires inferring the type of the symbol
|
// TODO: `is_type_check_only` requires inferring the type of the symbol
|
||||||
|
|
@ -4350,7 +4368,7 @@ from os.<CURSOR>
|
||||||
.build()
|
.build()
|
||||||
.snapshot();
|
.snapshot();
|
||||||
assert_snapshot!(snapshot, @r"
|
assert_snapshot!(snapshot, @r"
|
||||||
Kadabra :: Literal[1] :: Current module
|
Kadabra :: Literal[1] :: <no import required>
|
||||||
AbraKadabra :: Unavailable :: package
|
AbraKadabra :: Unavailable :: package
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
@ -5534,7 +5552,7 @@ def foo(param: s<CURSOR>)
|
||||||
// Even though long_namea is alphabetically before long_nameb,
|
// Even though long_namea is alphabetically before long_nameb,
|
||||||
// long_nameb is currently imported and should be preferred.
|
// long_nameb is currently imported and should be preferred.
|
||||||
assert_snapshot!(snapshot, @r"
|
assert_snapshot!(snapshot, @r"
|
||||||
long_nameb :: Literal[1] :: Current module
|
long_nameb :: Literal[1] :: <no import required>
|
||||||
long_namea :: Unavailable :: foo
|
long_namea :: Unavailable :: foo
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
@ -5804,7 +5822,7 @@ from .imp<CURSOR>
|
||||||
#[test]
|
#[test]
|
||||||
fn typing_extensions_excluded_from_import() {
|
fn typing_extensions_excluded_from_import() {
|
||||||
let builder = completion_test_builder("from typing<CURSOR>").module_names();
|
let builder = completion_test_builder("from typing<CURSOR>").module_names();
|
||||||
assert_snapshot!(builder.build().snapshot(), @"typing :: Current module");
|
assert_snapshot!(builder.build().snapshot(), @"typing :: <no import required>");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -5812,13 +5830,7 @@ from .imp<CURSOR>
|
||||||
let builder = completion_test_builder("deprecated<CURSOR>")
|
let builder = completion_test_builder("deprecated<CURSOR>")
|
||||||
.auto_import()
|
.auto_import()
|
||||||
.module_names();
|
.module_names();
|
||||||
assert_snapshot!(builder.build().snapshot(), @r"
|
assert_snapshot!(builder.build().snapshot(), @"deprecated :: warnings");
|
||||||
Deprecated :: importlib.metadata
|
|
||||||
DeprecatedList :: importlib.metadata
|
|
||||||
DeprecatedNonAbstract :: importlib.metadata
|
|
||||||
DeprecatedTuple :: importlib.metadata
|
|
||||||
deprecated :: warnings
|
|
||||||
");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -5829,8 +5841,8 @@ from .imp<CURSOR>
|
||||||
.completion_test_builder()
|
.completion_test_builder()
|
||||||
.module_names();
|
.module_names();
|
||||||
assert_snapshot!(builder.build().snapshot(), @r"
|
assert_snapshot!(builder.build().snapshot(), @r"
|
||||||
typing :: Current module
|
typing :: <no import required>
|
||||||
typing_extensions :: Current module
|
typing_extensions :: <no import required>
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -5843,10 +5855,6 @@ from .imp<CURSOR>
|
||||||
.auto_import()
|
.auto_import()
|
||||||
.module_names();
|
.module_names();
|
||||||
assert_snapshot!(builder.build().snapshot(), @r"
|
assert_snapshot!(builder.build().snapshot(), @r"
|
||||||
Deprecated :: importlib.metadata
|
|
||||||
DeprecatedList :: importlib.metadata
|
|
||||||
DeprecatedNonAbstract :: importlib.metadata
|
|
||||||
DeprecatedTuple :: importlib.metadata
|
|
||||||
deprecated :: typing_extensions
|
deprecated :: typing_extensions
|
||||||
deprecated :: warnings
|
deprecated :: warnings
|
||||||
");
|
");
|
||||||
|
|
@ -5859,8 +5867,8 @@ from .imp<CURSOR>
|
||||||
.completion_test_builder()
|
.completion_test_builder()
|
||||||
.module_names();
|
.module_names();
|
||||||
assert_snapshot!(builder.build().snapshot(), @r"
|
assert_snapshot!(builder.build().snapshot(), @r"
|
||||||
typing :: Current module
|
typing :: <no import required>
|
||||||
typing_extensions :: Current module
|
typing_extensions :: <no import required>
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -5872,15 +5880,284 @@ from .imp<CURSOR>
|
||||||
.auto_import()
|
.auto_import()
|
||||||
.module_names();
|
.module_names();
|
||||||
assert_snapshot!(builder.build().snapshot(), @r"
|
assert_snapshot!(builder.build().snapshot(), @r"
|
||||||
Deprecated :: importlib.metadata
|
|
||||||
DeprecatedList :: importlib.metadata
|
|
||||||
DeprecatedNonAbstract :: importlib.metadata
|
|
||||||
DeprecatedTuple :: importlib.metadata
|
|
||||||
deprecated :: typing_extensions
|
deprecated :: typing_extensions
|
||||||
deprecated :: warnings
|
deprecated :: warnings
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_simple_import_noauto() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
import foo
|
||||||
|
foo.ZQ<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("foo.py", r#"from bar import ZQZQ"#)
|
||||||
|
.source("bar.py", r#"ZQZQ = 1"#)
|
||||||
|
.completion_test_builder()
|
||||||
|
.module_names()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
assert_snapshot!(snapshot, @"ZQZQ :: <no import required>");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_simple_import_auto() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
ZQ<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("foo.py", r#"from bar import ZQZQ"#)
|
||||||
|
.source("bar.py", r#"ZQZQ = 1"#)
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.module_names()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
// We're specifically looking for `ZQZQ` in `bar`
|
||||||
|
// here but *not* in `foo`. Namely, in `foo`,
|
||||||
|
// `ZQZQ` is a "regular" import that is not by
|
||||||
|
// convention considered a re-export.
|
||||||
|
assert_snapshot!(snapshot, @"ZQZQ :: bar");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_redundant_convention_import_noauto() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
import foo
|
||||||
|
foo.ZQ<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("foo.py", r#"from bar import ZQZQ as ZQZQ"#)
|
||||||
|
.source("bar.py", r#"ZQZQ = 1"#)
|
||||||
|
.completion_test_builder()
|
||||||
|
.module_names()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
assert_snapshot!(snapshot, @"ZQZQ :: <no import required>");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_redundant_convention_import_auto() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
ZQ<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("foo.py", r#"from bar import ZQZQ as ZQZQ"#)
|
||||||
|
.source("bar.py", r#"ZQZQ = 1"#)
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.module_names()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
assert_snapshot!(snapshot, @r"
|
||||||
|
ZQZQ :: bar
|
||||||
|
ZQZQ :: foo
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_import_respects_all() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
ZQ<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"bar.py",
|
||||||
|
r#"
|
||||||
|
ZQZQ1 = 1
|
||||||
|
ZQZQ2 = 1
|
||||||
|
__all__ = ['ZQZQ1']
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.module_names()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
// We specifically do not want `ZQZQ2` here, since
|
||||||
|
// it is not part of `__all__`.
|
||||||
|
assert_snapshot!(snapshot, @r"
|
||||||
|
ZQZQ1 :: bar
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
// This test confirms current behavior (as of 2025-12-04), but
|
||||||
|
// it's not consistent with auto-import. That is, it doesn't
|
||||||
|
// strictly respect `__all__` on `bar`, but perhaps it should.
|
||||||
|
//
|
||||||
|
// See: https://github.com/astral-sh/ty/issues/1757
|
||||||
|
#[test]
|
||||||
|
fn object_attr_ignores_all() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
import bar
|
||||||
|
bar.ZQ<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"bar.py",
|
||||||
|
r#"
|
||||||
|
ZQZQ1 = 1
|
||||||
|
ZQZQ2 = 1
|
||||||
|
__all__ = ['ZQZQ1']
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.module_names()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
// We specifically do not want `ZQZQ2` here, since
|
||||||
|
// it is not part of `__all__`.
|
||||||
|
assert_snapshot!(snapshot, @r"
|
||||||
|
ZQZQ1 :: <no import required>
|
||||||
|
ZQZQ2 :: <no import required>
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_import_ignores_modules_with_leading_underscore() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
Quitter<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.module_names()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
// There is a `Quitter` in `_sitebuiltins` in the standard
|
||||||
|
// library. But this is skipped by auto-import because it's
|
||||||
|
// 1) not first party and 2) starts with an `_`.
|
||||||
|
assert_snapshot!(snapshot, @"<No completions found>");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_import_includes_modules_with_leading_underscore_in_first_party() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
ZQ<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"bar.py",
|
||||||
|
r#"
|
||||||
|
ZQZQ1 = 1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"_foo.py",
|
||||||
|
r#"
|
||||||
|
ZQZQ1 = 1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.module_names()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
assert_snapshot!(snapshot, @r"
|
||||||
|
ZQZQ1 :: _foo
|
||||||
|
ZQZQ1 :: bar
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_import_includes_stdlib_modules_as_suggestions() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
multiprocess<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
assert_snapshot!(snapshot, @r"
|
||||||
|
multiprocessing
|
||||||
|
multiprocessing.connection
|
||||||
|
multiprocessing.context
|
||||||
|
multiprocessing.dummy
|
||||||
|
multiprocessing.dummy.connection
|
||||||
|
multiprocessing.forkserver
|
||||||
|
multiprocessing.heap
|
||||||
|
multiprocessing.managers
|
||||||
|
multiprocessing.pool
|
||||||
|
multiprocessing.popen_fork
|
||||||
|
multiprocessing.popen_forkserver
|
||||||
|
multiprocessing.popen_spawn_posix
|
||||||
|
multiprocessing.popen_spawn_win32
|
||||||
|
multiprocessing.process
|
||||||
|
multiprocessing.queues
|
||||||
|
multiprocessing.reduction
|
||||||
|
multiprocessing.resource_sharer
|
||||||
|
multiprocessing.resource_tracker
|
||||||
|
multiprocessing.shared_memory
|
||||||
|
multiprocessing.sharedctypes
|
||||||
|
multiprocessing.spawn
|
||||||
|
multiprocessing.synchronize
|
||||||
|
multiprocessing.util
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_import_includes_first_party_modules_as_suggestions() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
zqzqzq<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("zqzqzqzqzq.py", "")
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
assert_snapshot!(snapshot, @"zqzqzqzqzq");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_import_includes_sub_modules_as_suggestions() {
|
||||||
|
let snapshot = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
collabc<CURSOR>
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.completion_test_builder()
|
||||||
|
.auto_import()
|
||||||
|
.build()
|
||||||
|
.snapshot();
|
||||||
|
assert_snapshot!(snapshot, @"collections.abc");
|
||||||
|
}
|
||||||
|
|
||||||
/// A way to create a simple single-file (named `main.py`) completion test
|
/// A way to create a simple single-file (named `main.py`) completion test
|
||||||
/// builder.
|
/// builder.
|
||||||
///
|
///
|
||||||
|
|
@ -6055,7 +6332,7 @@ from .imp<CURSOR>
|
||||||
let module_name = c
|
let module_name = c
|
||||||
.module_name
|
.module_name
|
||||||
.map(ModuleName::as_str)
|
.map(ModuleName::as_str)
|
||||||
.unwrap_or("Current module");
|
.unwrap_or("<no import required>");
|
||||||
snapshot = format!("{snapshot} :: {module_name}");
|
snapshot = format!("{snapshot} :: {module_name}");
|
||||||
}
|
}
|
||||||
snapshot
|
snapshot
|
||||||
|
|
|
||||||
|
|
@ -898,6 +898,42 @@ cls = MyClass
|
||||||
assert_snapshot!(test.references(), @"No references found");
|
assert_snapshot!(test.references(), @"No references found");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_string_annotation_recursive() {
|
||||||
|
let test = cursor_test(
|
||||||
|
r#"
|
||||||
|
ab: "a<CURSOR>b"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.references(), @r#"
|
||||||
|
info[references]: Reference 1
|
||||||
|
--> main.py:2:1
|
||||||
|
|
|
||||||
|
2 | ab: "ab"
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[references]: Reference 2
|
||||||
|
--> main.py:2:6
|
||||||
|
|
|
||||||
|
2 | ab: "ab"
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_string_annotation_unknown() {
|
||||||
|
let test = cursor_test(
|
||||||
|
r#"
|
||||||
|
x: "foo<CURSOR>bar"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.references(), @"No references found");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn references_match_name_stmt() {
|
fn references_match_name_stmt() {
|
||||||
let test = cursor_test(
|
let test = cursor_test(
|
||||||
|
|
@ -1870,4 +1906,211 @@ func<CURSOR>_alias()
|
||||||
|
|
|
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_submodule_import_from_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// TODO(submodule-imports): this should light up both instances of `subpkg`
|
||||||
|
assert_snapshot!(test.references(), @r"
|
||||||
|
info[references]: Reference 1
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_submodule_import_from_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg.submod import val
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// TODO(submodule-imports): this should light up both instances of `subpkg`
|
||||||
|
assert_snapshot!(test.references(), @"No references found");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_submodule_import_from_wrong_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>mod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// No references is actually correct (or it should only see itself)
|
||||||
|
assert_snapshot!(test.references(), @"No references found");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_submodule_import_from_wrong_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.sub<CURSOR>mod import val
|
||||||
|
|
||||||
|
x = submod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// No references is actually correct (or it should only see itself)
|
||||||
|
assert_snapshot!(test.references(), @"No references found");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_submodule_import_from_confusing_shadowed_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg import subpkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// No references is actually correct (or it should only see itself)
|
||||||
|
assert_snapshot!(test.references(), @"No references found");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_submodule_import_from_confusing_real_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import sub<CURSOR>pkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.references(), @r"
|
||||||
|
info[references]: Reference 1
|
||||||
|
--> mypackage/__init__.py:2:21
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
|
||||||
|
info[references]: Reference 2
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[references]: Reference 3
|
||||||
|
--> mypackage/subpkg/__init__.py:2:1
|
||||||
|
|
|
||||||
|
2 | subpkg: int = 10
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn references_submodule_import_from_confusing_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import subpkg
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// TODO: this should also highlight the RHS subpkg in the import
|
||||||
|
assert_snapshot!(test.references(), @r"
|
||||||
|
info[references]: Reference 1
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1073,6 +1073,41 @@ def another_helper(path):
|
||||||
assert_snapshot!(test.goto_declaration(), @"No goto target found");
|
assert_snapshot!(test.goto_declaration(), @"No goto target found");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_string_annotation_recursive() {
|
||||||
|
let test = cursor_test(
|
||||||
|
r#"
|
||||||
|
ab: "a<CURSOR>b"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.goto_declaration(), @r#"
|
||||||
|
info[goto-declaration]: Declaration
|
||||||
|
--> main.py:2:1
|
||||||
|
|
|
||||||
|
2 | ab: "ab"
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main.py:2:6
|
||||||
|
|
|
||||||
|
2 | ab: "ab"
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_string_annotation_unknown() {
|
||||||
|
let test = cursor_test(
|
||||||
|
r#"
|
||||||
|
x: "foo<CURSOR>bar"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.goto_declaration(), @"No goto target found");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn goto_declaration_nested_instance_attribute() {
|
fn goto_declaration_nested_instance_attribute() {
|
||||||
let test = cursor_test(
|
let test = cursor_test(
|
||||||
|
|
@ -2567,6 +2602,298 @@ def ab(a: int, *, c: int): ...
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_submodule_import_from_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// TODO(submodule-imports): this should only highlight `subpkg` in the import statement
|
||||||
|
// This happens because DefinitionKind::ImportFromSubmodule claims the entire ImportFrom node,
|
||||||
|
// which is correct but unhelpful. Unfortunately even if it only claimed the LHS identifier it
|
||||||
|
// would highlight `subpkg.submod` which is strictly better but still isn't what we want.
|
||||||
|
assert_snapshot!(test.goto_declaration(), @r"
|
||||||
|
info[goto-declaration]: Declaration
|
||||||
|
--> mypackage/__init__.py:2:1
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_submodule_import_from_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg.submod import val
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// TODO(submodule-imports): I don't *think* this is what we want..?
|
||||||
|
// It's a bit confusing because this symbol is essentially the LHS *and* RHS of
|
||||||
|
// `subpkg = mypackage.subpkg`. As in, it's both defining a local `subpkg` and
|
||||||
|
// loading the module `mypackage.subpkg`, so, it's understandable to get confused!
|
||||||
|
assert_snapshot!(test.goto_declaration(), @r"
|
||||||
|
info[goto-declaration]: Declaration
|
||||||
|
--> mypackage/subpkg/__init__.py:1:1
|
||||||
|
|
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:2:7
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_submodule_import_from_wrong_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>mod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// No result is correct!
|
||||||
|
assert_snapshot!(test.goto_declaration(), @"No goto target found");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_submodule_import_from_wrong_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.sub<CURSOR>mod import val
|
||||||
|
|
||||||
|
x = submod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Going to the submod module is correct!
|
||||||
|
assert_snapshot!(test.goto_declaration(), @r"
|
||||||
|
info[goto-declaration]: Declaration
|
||||||
|
--> mypackage/subpkg/submod.py:1:1
|
||||||
|
|
|
||||||
|
1 |
|
||||||
|
| ^
|
||||||
|
2 | val: int = 0
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:2:14
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = submod
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_submodule_import_from_confusing_shadowed_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg import subpkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Going to the subpkg module is correct!
|
||||||
|
assert_snapshot!(test.goto_declaration(), @r"
|
||||||
|
info[goto-declaration]: Declaration
|
||||||
|
--> mypackage/subpkg/__init__.py:1:1
|
||||||
|
|
|
||||||
|
1 |
|
||||||
|
| ^
|
||||||
|
2 | subpkg: int = 10
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:2:7
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_submodule_import_from_confusing_real_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import sub<CURSOR>pkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Going to the subpkg `int` is correct!
|
||||||
|
assert_snapshot!(test.goto_declaration(), @r"
|
||||||
|
info[goto-declaration]: Declaration
|
||||||
|
--> mypackage/subpkg/__init__.py:2:1
|
||||||
|
|
|
||||||
|
2 | subpkg: int = 10
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:2:21
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_declaration_submodule_import_from_confusing_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import subpkg
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// TODO(submodule-imports): Ok this one is FASCINATING and it's kinda right but confusing!
|
||||||
|
//
|
||||||
|
// So there's 3 relevant definitions here:
|
||||||
|
//
|
||||||
|
// * `subpkg: int = 10` in the other file is in fact the original definition
|
||||||
|
//
|
||||||
|
// * the LHS `subpkg` in the import is an instance of `subpkg = ...`
|
||||||
|
// because it's a `DefinitionKind::ImportFromSubmodle`.
|
||||||
|
// This is the span that covers the entire import.
|
||||||
|
//
|
||||||
|
// * `the RHS `subpkg` in the import is a second instance of `subpkg = ...`
|
||||||
|
// that *immediately* overwrites the `ImportFromSubmodule`'s definition
|
||||||
|
// This span seemingly doesn't appear at all!? Is it getting hidden by the LHS span?
|
||||||
|
assert_snapshot!(test.goto_declaration(), @r"
|
||||||
|
info[goto-declaration]: Declaration
|
||||||
|
--> mypackage/__init__.py:2:1
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[goto-declaration]: Declaration
|
||||||
|
--> mypackage/subpkg/__init__.py:2:1
|
||||||
|
|
|
||||||
|
2 | subpkg: int = 10
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
impl CursorTest {
|
impl CursorTest {
|
||||||
fn goto_declaration(&self) -> String {
|
fn goto_declaration(&self) -> String {
|
||||||
let Some(targets) = goto_declaration(&self.db, self.cursor.file, self.cursor.offset)
|
let Some(targets) = goto_declaration(&self.db, self.cursor.file, self.cursor.offset)
|
||||||
|
|
|
||||||
|
|
@ -145,14 +145,14 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.goto_type_definition(), @r"
|
assert_snapshot!(test.goto_type_definition(), @r"
|
||||||
info[goto-type-definition]: Type definition
|
info[goto-type-definition]: Type definition
|
||||||
--> stdlib/typing.pyi:770:1
|
--> stdlib/typing.pyi:781:1
|
||||||
|
|
|
|
||||||
768 | def __class_getitem__(cls, args: TypeVar | tuple[TypeVar, ...]) -> _Final: ...
|
779 | def __class_getitem__(cls, args: TypeVar | tuple[TypeVar, ...]) -> _Final: ...
|
||||||
769 |
|
780 |
|
||||||
770 | Generic: type[_Generic]
|
781 | Generic: type[_Generic]
|
||||||
| ^^^^^^^
|
| ^^^^^^^
|
||||||
771 |
|
782 |
|
||||||
772 | class _ProtocolMeta(ABCMeta):
|
783 | class _ProtocolMeta(ABCMeta):
|
||||||
|
|
|
|
||||||
info: Source
|
info: Source
|
||||||
--> main.py:4:1
|
--> main.py:4:1
|
||||||
|
|
@ -964,6 +964,60 @@ mod tests {
|
||||||
assert_snapshot!(test.goto_type_definition(), @"No goto target found");
|
assert_snapshot!(test.goto_type_definition(), @"No goto target found");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_string_annotation_recursive() {
|
||||||
|
let test = cursor_test(
|
||||||
|
r#"
|
||||||
|
ab: "a<CURSOR>b"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r#"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> stdlib/ty_extensions.pyi:20:1
|
||||||
|
|
|
||||||
|
19 | # Types
|
||||||
|
20 | Unknown = object()
|
||||||
|
| ^^^^^^^
|
||||||
|
21 | AlwaysTruthy = object()
|
||||||
|
22 | AlwaysFalsy = object()
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main.py:2:6
|
||||||
|
|
|
||||||
|
2 | ab: "ab"
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_string_annotation_unknown() {
|
||||||
|
let test = cursor_test(
|
||||||
|
r#"
|
||||||
|
x: "foo<CURSOR>bar"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r#"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> stdlib/ty_extensions.pyi:20:1
|
||||||
|
|
|
||||||
|
19 | # Types
|
||||||
|
20 | Unknown = object()
|
||||||
|
| ^^^^^^^
|
||||||
|
21 | AlwaysTruthy = object()
|
||||||
|
22 | AlwaysFalsy = object()
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main.py:2:5
|
||||||
|
|
|
||||||
|
2 | x: "foobar"
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn goto_type_match_name_stmt() {
|
fn goto_type_match_name_stmt() {
|
||||||
let test = cursor_test(
|
let test = cursor_test(
|
||||||
|
|
@ -1618,6 +1672,283 @@ def function():
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_submodule_import_from_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// The module is the correct type definition
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> mypackage/subpkg/__init__.py:1:1
|
||||||
|
|
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_submodule_import_from_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg.submod import val
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// The module is the correct type definition
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> mypackage/subpkg/__init__.py:1:1
|
||||||
|
|
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:2:7
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_submodule_import_from_wrong_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>mod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Unknown is correct, `submod` is not in scope
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> stdlib/ty_extensions.pyi:20:1
|
||||||
|
|
|
||||||
|
19 | # Types
|
||||||
|
20 | Unknown = object()
|
||||||
|
| ^^^^^^^
|
||||||
|
21 | AlwaysTruthy = object()
|
||||||
|
22 | AlwaysFalsy = object()
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
3 |
|
||||||
|
4 | x = submod
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_submodule_import_from_wrong_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.sub<CURSOR>mod import val
|
||||||
|
|
||||||
|
x = submod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// The module is correct
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> mypackage/subpkg/submod.py:1:1
|
||||||
|
|
|
||||||
|
1 | /
|
||||||
|
2 | | val: int = 0
|
||||||
|
| |_____________^
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:2:14
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = submod
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_submodule_import_from_confusing_shadowed_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg import subpkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// The module is correct
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> mypackage/subpkg/__init__.py:1:1
|
||||||
|
|
|
||||||
|
1 | /
|
||||||
|
2 | | subpkg: int = 10
|
||||||
|
| |_________________^
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:2:7
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_submodule_import_from_confusing_real_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import sub<CURSOR>pkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// `int` is correct
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r#"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> stdlib/builtins.pyi:348:7
|
||||||
|
|
|
||||||
|
347 | @disjoint_base
|
||||||
|
348 | class int:
|
||||||
|
| ^^^
|
||||||
|
349 | """int([x]) -> integer
|
||||||
|
350 | int(x, base=10) -> integer
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:2:21
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_type_submodule_import_from_confusing_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import subpkg
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// `int` is correct
|
||||||
|
assert_snapshot!(test.goto_type_definition(), @r#"
|
||||||
|
info[goto-type-definition]: Type definition
|
||||||
|
--> stdlib/builtins.pyi:348:7
|
||||||
|
|
|
||||||
|
347 | @disjoint_base
|
||||||
|
348 | class int:
|
||||||
|
| ^^^
|
||||||
|
349 | """int([x]) -> integer
|
||||||
|
350 | int(x, base=10) -> integer
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
impl CursorTest {
|
impl CursorTest {
|
||||||
fn goto_type_definition(&self) -> String {
|
fn goto_type_definition(&self) -> String {
|
||||||
let Some(targets) =
|
let Some(targets) =
|
||||||
|
|
|
||||||
|
|
@ -1089,6 +1089,60 @@ mod tests {
|
||||||
assert_snapshot!(test.hover(), @"Hover provided no content");
|
assert_snapshot!(test.hover(), @"Hover provided no content");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_string_annotation_recursive() {
|
||||||
|
let test = cursor_test(
|
||||||
|
r#"
|
||||||
|
ab: "a<CURSOR>b"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.hover(), @r#"
|
||||||
|
Unknown
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
Unknown
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> main.py:2:6
|
||||||
|
|
|
||||||
|
2 | ab: "ab"
|
||||||
|
| ^-
|
||||||
|
| ||
|
||||||
|
| |Cursor offset
|
||||||
|
| source
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_string_annotation_unknown() {
|
||||||
|
let test = cursor_test(
|
||||||
|
r#"
|
||||||
|
x: "foo<CURSOR>bar"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.hover(), @r#"
|
||||||
|
Unknown
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
Unknown
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> main.py:2:5
|
||||||
|
|
|
||||||
|
2 | x: "foobar"
|
||||||
|
| ^^^-^^
|
||||||
|
| | |
|
||||||
|
| | Cursor offset
|
||||||
|
| source
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn hover_overload_type_disambiguated1() {
|
fn hover_overload_type_disambiguated1() {
|
||||||
let test = CursorTest::builder()
|
let test = CursorTest::builder()
|
||||||
|
|
@ -3267,6 +3321,297 @@ def function():
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_submodule_import_from_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// The module is correct
|
||||||
|
assert_snapshot!(test.hover(), @r"
|
||||||
|
<module 'mypackage.subpkg'>
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
<module 'mypackage.subpkg'>
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^-^^
|
||||||
|
| | |
|
||||||
|
| | Cursor offset
|
||||||
|
| source
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_submodule_import_from_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg.submod import val
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// The module is correct
|
||||||
|
assert_snapshot!(test.hover(), @r"
|
||||||
|
<module 'mypackage.subpkg'>
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
<module 'mypackage.subpkg'>
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> mypackage/__init__.py:2:7
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
| ^^^-^^
|
||||||
|
| | |
|
||||||
|
| | Cursor offset
|
||||||
|
| source
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_submodule_import_from_wrong_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>mod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Unknown is correct
|
||||||
|
assert_snapshot!(test.hover(), @r"
|
||||||
|
Unknown
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
Unknown
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
3 |
|
||||||
|
4 | x = submod
|
||||||
|
| ^^^-^^
|
||||||
|
| | |
|
||||||
|
| | Cursor offset
|
||||||
|
| source
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_submodule_import_from_wrong_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.sub<CURSOR>mod import val
|
||||||
|
|
||||||
|
x = submod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// The submodule is correct
|
||||||
|
assert_snapshot!(test.hover(), @r"
|
||||||
|
<module 'mypackage.subpkg.submod'>
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
<module 'mypackage.subpkg.submod'>
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> mypackage/__init__.py:2:14
|
||||||
|
|
|
||||||
|
2 | from .subpkg.submod import val
|
||||||
|
| ^^^-^^
|
||||||
|
| | |
|
||||||
|
| | Cursor offset
|
||||||
|
| source
|
||||||
|
3 |
|
||||||
|
4 | x = submod
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_submodule_import_from_confusing_shadowed_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg import subpkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// The module is correct
|
||||||
|
assert_snapshot!(test.hover(), @r"
|
||||||
|
<module 'mypackage.subpkg'>
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
<module 'mypackage.subpkg'>
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> mypackage/__init__.py:2:7
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^-^^
|
||||||
|
| | |
|
||||||
|
| | Cursor offset
|
||||||
|
| source
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_submodule_import_from_confusing_real_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import sub<CURSOR>pkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// int is correct
|
||||||
|
assert_snapshot!(test.hover(), @r"
|
||||||
|
int
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
int
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> mypackage/__init__.py:2:21
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^-^^
|
||||||
|
| | |
|
||||||
|
| | Cursor offset
|
||||||
|
| source
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_submodule_import_from_confusing_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import subpkg
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// int is correct
|
||||||
|
assert_snapshot!(test.hover(), @r"
|
||||||
|
int
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
int
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^-^^
|
||||||
|
| | |
|
||||||
|
| | Cursor offset
|
||||||
|
| source
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
impl CursorTest {
|
impl CursorTest {
|
||||||
fn hover(&self) -> String {
|
fn hover(&self) -> String {
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
|
||||||
|
|
@ -145,7 +145,7 @@ impl<'a> Importer<'a> {
|
||||||
members: &MembersInScope,
|
members: &MembersInScope,
|
||||||
) -> ImportAction {
|
) -> ImportAction {
|
||||||
let request = request.avoid_conflicts(self.db, self.file, members);
|
let request = request.avoid_conflicts(self.db, self.file, members);
|
||||||
let mut symbol_text: Box<str> = request.member.into();
|
let mut symbol_text: Box<str> = request.member.unwrap_or(request.module).into();
|
||||||
let Some(response) = self.find(&request, members.at) else {
|
let Some(response) = self.find(&request, members.at) else {
|
||||||
let insertion = if let Some(future) = self.find_last_future_import(members.at) {
|
let insertion = if let Some(future) = self.find_last_future_import(members.at) {
|
||||||
Insertion::end_of_statement(future.stmt, self.source, self.stylist)
|
Insertion::end_of_statement(future.stmt, self.source, self.stylist)
|
||||||
|
|
@ -157,14 +157,27 @@ impl<'a> Importer<'a> {
|
||||||
Insertion::start_of_file(self.parsed.suite(), self.source, self.stylist, range)
|
Insertion::start_of_file(self.parsed.suite(), self.source, self.stylist, range)
|
||||||
};
|
};
|
||||||
let import = insertion.into_edit(&request.to_string());
|
let import = insertion.into_edit(&request.to_string());
|
||||||
if matches!(request.style, ImportStyle::Import) {
|
if let Some(member) = request.member
|
||||||
symbol_text = format!("{}.{}", request.module, request.member).into();
|
&& matches!(request.style, ImportStyle::Import)
|
||||||
|
{
|
||||||
|
symbol_text = format!("{}.{}", request.module, member).into();
|
||||||
}
|
}
|
||||||
return ImportAction {
|
return ImportAction {
|
||||||
import: Some(import),
|
import: Some(import),
|
||||||
symbol_text,
|
symbol_text,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// When we just have a request to import a module (and not
|
||||||
|
// any members from that module), then the only way we can be
|
||||||
|
// here is if we found a pre-existing import that definitively
|
||||||
|
// satisfies the request. So we're done.
|
||||||
|
let Some(member) = request.member else {
|
||||||
|
return ImportAction {
|
||||||
|
import: None,
|
||||||
|
symbol_text,
|
||||||
|
};
|
||||||
|
};
|
||||||
match response.kind {
|
match response.kind {
|
||||||
ImportResponseKind::Unqualified { ast, alias } => {
|
ImportResponseKind::Unqualified { ast, alias } => {
|
||||||
let member = alias.asname.as_ref().unwrap_or(&alias.name).as_str();
|
let member = alias.asname.as_ref().unwrap_or(&alias.name).as_str();
|
||||||
|
|
@ -189,13 +202,10 @@ impl<'a> Importer<'a> {
|
||||||
let import = if let Some(insertion) =
|
let import = if let Some(insertion) =
|
||||||
Insertion::existing_import(response.import.stmt, self.tokens)
|
Insertion::existing_import(response.import.stmt, self.tokens)
|
||||||
{
|
{
|
||||||
insertion.into_edit(request.member)
|
insertion.into_edit(member)
|
||||||
} else {
|
} else {
|
||||||
Insertion::end_of_statement(response.import.stmt, self.source, self.stylist)
|
Insertion::end_of_statement(response.import.stmt, self.source, self.stylist)
|
||||||
.into_edit(&format!(
|
.into_edit(&format!("from {} import {member}", request.module))
|
||||||
"from {} import {}",
|
|
||||||
request.module, request.member
|
|
||||||
))
|
|
||||||
};
|
};
|
||||||
ImportAction {
|
ImportAction {
|
||||||
import: Some(import),
|
import: Some(import),
|
||||||
|
|
@ -481,6 +491,17 @@ impl<'ast> AstImportKind<'ast> {
|
||||||
Some(ImportResponseKind::Qualified { ast, alias })
|
Some(ImportResponseKind::Qualified { ast, alias })
|
||||||
}
|
}
|
||||||
AstImportKind::ImportFrom(ast) => {
|
AstImportKind::ImportFrom(ast) => {
|
||||||
|
// If the request is for a module itself, then we
|
||||||
|
// assume that it can never be satisfies by a
|
||||||
|
// `from ... import ...` statement. For example, a
|
||||||
|
// `request for collections.abc` needs an
|
||||||
|
// `import collections.abc`. Now, there could be a
|
||||||
|
// `from collections import abc`, and we could
|
||||||
|
// plausibly consider that a match and return a
|
||||||
|
// symbol text of `abc`. But it's not clear if that's
|
||||||
|
// the right choice or not.
|
||||||
|
let member = request.member?;
|
||||||
|
|
||||||
if request.force_style && !matches!(request.style, ImportStyle::ImportFrom) {
|
if request.force_style && !matches!(request.style, ImportStyle::ImportFrom) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
@ -492,9 +513,7 @@ impl<'ast> AstImportKind<'ast> {
|
||||||
let kind = ast
|
let kind = ast
|
||||||
.names
|
.names
|
||||||
.iter()
|
.iter()
|
||||||
.find(|alias| {
|
.find(|alias| alias.name.as_str() == "*" || alias.name.as_str() == member)
|
||||||
alias.name.as_str() == "*" || alias.name.as_str() == request.member
|
|
||||||
})
|
|
||||||
.map(|alias| ImportResponseKind::Unqualified { ast, alias })
|
.map(|alias| ImportResponseKind::Unqualified { ast, alias })
|
||||||
.unwrap_or_else(|| ImportResponseKind::Partial(ast));
|
.unwrap_or_else(|| ImportResponseKind::Partial(ast));
|
||||||
Some(kind)
|
Some(kind)
|
||||||
|
|
@ -510,7 +529,10 @@ pub(crate) struct ImportRequest<'a> {
|
||||||
/// `foo`, in `from foo import bar`).
|
/// `foo`, in `from foo import bar`).
|
||||||
module: &'a str,
|
module: &'a str,
|
||||||
/// The member to import (e.g., `bar`, in `from foo import bar`).
|
/// The member to import (e.g., `bar`, in `from foo import bar`).
|
||||||
member: &'a str,
|
///
|
||||||
|
/// When `member` is absent, then this request reflects an import
|
||||||
|
/// of the module itself. i.e., `import module`.
|
||||||
|
member: Option<&'a str>,
|
||||||
/// The preferred style to use when importing the symbol (e.g.,
|
/// The preferred style to use when importing the symbol (e.g.,
|
||||||
/// `import foo` or `from foo import bar`).
|
/// `import foo` or `from foo import bar`).
|
||||||
///
|
///
|
||||||
|
|
@ -532,7 +554,7 @@ impl<'a> ImportRequest<'a> {
|
||||||
pub(crate) fn import(module: &'a str, member: &'a str) -> Self {
|
pub(crate) fn import(module: &'a str, member: &'a str) -> Self {
|
||||||
Self {
|
Self {
|
||||||
module,
|
module,
|
||||||
member,
|
member: Some(member),
|
||||||
style: ImportStyle::Import,
|
style: ImportStyle::Import,
|
||||||
force_style: false,
|
force_style: false,
|
||||||
}
|
}
|
||||||
|
|
@ -545,12 +567,26 @@ impl<'a> ImportRequest<'a> {
|
||||||
pub(crate) fn import_from(module: &'a str, member: &'a str) -> Self {
|
pub(crate) fn import_from(module: &'a str, member: &'a str) -> Self {
|
||||||
Self {
|
Self {
|
||||||
module,
|
module,
|
||||||
member,
|
member: Some(member),
|
||||||
style: ImportStyle::ImportFrom,
|
style: ImportStyle::ImportFrom,
|
||||||
force_style: false,
|
force_style: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new [`ImportRequest`] for bringing the given module
|
||||||
|
/// into scope.
|
||||||
|
///
|
||||||
|
/// This is for just importing the module itself, always via an
|
||||||
|
/// `import` statement.
|
||||||
|
pub(crate) fn module(module: &'a str) -> Self {
|
||||||
|
Self {
|
||||||
|
module,
|
||||||
|
member: None,
|
||||||
|
style: ImportStyle::Import,
|
||||||
|
force_style: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Causes this request to become a command. This will force the
|
/// Causes this request to become a command. This will force the
|
||||||
/// requested import style, even if another style would be more
|
/// requested import style, even if another style would be more
|
||||||
/// appropriate generally.
|
/// appropriate generally.
|
||||||
|
|
@ -565,7 +601,13 @@ impl<'a> ImportRequest<'a> {
|
||||||
/// of an import conflict are minimized (although not always reduced
|
/// of an import conflict are minimized (although not always reduced
|
||||||
/// to zero).
|
/// to zero).
|
||||||
fn avoid_conflicts(self, db: &dyn Db, importing_file: File, members: &MembersInScope) -> Self {
|
fn avoid_conflicts(self, db: &dyn Db, importing_file: File, members: &MembersInScope) -> Self {
|
||||||
match (members.map.get(self.module), members.map.get(self.member)) {
|
let Some(member) = self.member else {
|
||||||
|
return Self {
|
||||||
|
style: ImportStyle::Import,
|
||||||
|
..self
|
||||||
|
};
|
||||||
|
};
|
||||||
|
match (members.map.get(self.module), members.map.get(member)) {
|
||||||
// Neither symbol exists, so we can just proceed as
|
// Neither symbol exists, so we can just proceed as
|
||||||
// normal.
|
// normal.
|
||||||
(None, None) => self,
|
(None, None) => self,
|
||||||
|
|
@ -630,7 +672,10 @@ impl std::fmt::Display for ImportRequest<'_> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
match self.style {
|
match self.style {
|
||||||
ImportStyle::Import => write!(f, "import {}", self.module),
|
ImportStyle::Import => write!(f, "import {}", self.module),
|
||||||
ImportStyle::ImportFrom => write!(f, "from {} import {}", self.module, self.member),
|
ImportStyle::ImportFrom => match self.member {
|
||||||
|
None => write!(f, "import {}", self.module),
|
||||||
|
Some(member) => write!(f, "from {} import {member}", self.module),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -843,6 +888,10 @@ mod tests {
|
||||||
self.add(ImportRequest::import_from(module, member))
|
self.add(ImportRequest::import_from(module, member))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn module(&self, module: &str) -> String {
|
||||||
|
self.add(ImportRequest::module(module))
|
||||||
|
}
|
||||||
|
|
||||||
fn add(&self, request: ImportRequest<'_>) -> String {
|
fn add(&self, request: ImportRequest<'_>) -> String {
|
||||||
let node = covering_node(
|
let node = covering_node(
|
||||||
self.cursor.parsed.syntax().into(),
|
self.cursor.parsed.syntax().into(),
|
||||||
|
|
@ -2156,4 +2205,73 @@ except ImportError:
|
||||||
(bar.MAGIC)
|
(bar.MAGIC)
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn import_module_blank() {
|
||||||
|
let test = cursor_test(
|
||||||
|
"\
|
||||||
|
<CURSOR>
|
||||||
|
",
|
||||||
|
);
|
||||||
|
assert_snapshot!(
|
||||||
|
test.module("collections"), @r"
|
||||||
|
import collections
|
||||||
|
collections
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn import_module_exists() {
|
||||||
|
let test = cursor_test(
|
||||||
|
"\
|
||||||
|
import collections
|
||||||
|
<CURSOR>
|
||||||
|
",
|
||||||
|
);
|
||||||
|
assert_snapshot!(
|
||||||
|
test.module("collections"), @r"
|
||||||
|
import collections
|
||||||
|
collections
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn import_module_from_exists() {
|
||||||
|
let test = cursor_test(
|
||||||
|
"\
|
||||||
|
from collections import defaultdict
|
||||||
|
<CURSOR>
|
||||||
|
",
|
||||||
|
);
|
||||||
|
assert_snapshot!(
|
||||||
|
test.module("collections"), @r"
|
||||||
|
import collections
|
||||||
|
from collections import defaultdict
|
||||||
|
collections
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
// This test is working as intended. That is,
|
||||||
|
// `abc` is already in scope, so requesting an
|
||||||
|
// import for `collections.abc` could feasibly
|
||||||
|
// reuse the import and rewrite the symbol text
|
||||||
|
// to just `abc`. But for now it seems better
|
||||||
|
// to respect what has been written and add the
|
||||||
|
// `import collections.abc`. This behavior could
|
||||||
|
// plausibly be changed.
|
||||||
|
#[test]
|
||||||
|
fn import_module_from_via_member_exists() {
|
||||||
|
let test = cursor_test(
|
||||||
|
"\
|
||||||
|
from collections import abc
|
||||||
|
<CURSOR>
|
||||||
|
",
|
||||||
|
);
|
||||||
|
assert_snapshot!(
|
||||||
|
test.module("collections.abc"), @r"
|
||||||
|
import collections.abc
|
||||||
|
from collections import abc
|
||||||
|
collections.abc
|
||||||
|
");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6428,11 +6428,11 @@ mod tests {
|
||||||
a = Literal['a', 'b', 'c']",
|
a = Literal['a', 'b', 'c']",
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_snapshot!(test.inlay_hints(), @r"
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
a[: <typing.Literal special form>] = Literal['a', 'b', 'c']
|
a[: <special form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
|
||||||
");
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct InlayHintLocationDiagnostic {
|
struct InlayHintLocationDiagnostic {
|
||||||
|
|
|
||||||
|
|
@ -110,6 +110,10 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rename(&self, new_name: &str) -> String {
|
fn rename(&self, new_name: &str) -> String {
|
||||||
|
let Some(_) = can_rename(&self.db, self.cursor.file, self.cursor.offset) else {
|
||||||
|
return "Cannot rename".to_string();
|
||||||
|
};
|
||||||
|
|
||||||
let Some(rename_results) =
|
let Some(rename_results) =
|
||||||
rename(&self.db, self.cursor.file, self.cursor.offset, new_name)
|
rename(&self.db, self.cursor.file, self.cursor.offset, new_name)
|
||||||
else {
|
else {
|
||||||
|
|
@ -1182,6 +1186,7 @@ result = func(10, y=20)
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO Should rename the alias
|
||||||
#[test]
|
#[test]
|
||||||
fn import_alias() {
|
fn import_alias() {
|
||||||
let test = CursorTest::builder()
|
let test = CursorTest::builder()
|
||||||
|
|
@ -1197,21 +1202,10 @@ result = func(10, y=20)
|
||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assert_snapshot!(test.rename("z"), @r"
|
assert_snapshot!(test.rename("z"), @"Cannot rename");
|
||||||
info[rename]: Rename symbol (found 2 locations)
|
|
||||||
--> main.py:3:20
|
|
||||||
|
|
|
||||||
2 | import warnings
|
|
||||||
3 | import warnings as abc
|
|
||||||
| ^^^
|
|
||||||
4 |
|
|
||||||
5 | x = abc
|
|
||||||
| ---
|
|
||||||
6 | y = warnings
|
|
||||||
|
|
|
||||||
");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO Should rename the alias
|
||||||
#[test]
|
#[test]
|
||||||
fn import_alias_use() {
|
fn import_alias_use() {
|
||||||
let test = CursorTest::builder()
|
let test = CursorTest::builder()
|
||||||
|
|
@ -1227,17 +1221,816 @@ result = func(10, y=20)
|
||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assert_snapshot!(test.rename("z"), @r"
|
assert_snapshot!(test.rename("z"), @"Cannot rename");
|
||||||
info[rename]: Rename symbol (found 2 locations)
|
}
|
||||||
--> main.py:3:20
|
|
||||||
|
#[test]
|
||||||
|
fn rename_submodule_import_from_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// TODO(submodule-imports): we should refuse to rename this (it's the name of a module)
|
||||||
|
assert_snapshot!(test.rename("mypkg"), @r"
|
||||||
|
info[rename]: Rename symbol (found 1 locations)
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
|
||||||
2 | import warnings
|
2 | from .subpkg.submod import val
|
||||||
3 | import warnings as abc
|
3 |
|
||||||
| ^^^
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_submodule_import_from_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg.submod import val
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Refusing to rename is correct
|
||||||
|
assert_snapshot!(test.rename("mypkg"), @"Cannot rename");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_submodule_import_from_wrong_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.submod import val
|
||||||
|
|
||||||
|
x = sub<CURSOR>mod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Refusing to rename is good/fine here, it's an undefined reference
|
||||||
|
assert_snapshot!(test.rename("mypkg"), @"Cannot rename");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_submodule_import_from_wrong_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg.sub<CURSOR>mod import val
|
||||||
|
|
||||||
|
x = submod
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source("mypackage/subpkg/__init__.py", r#""#)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/submod.py",
|
||||||
|
r#"
|
||||||
|
val: int = 0
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Refusing to rename is good here, it's a module name
|
||||||
|
assert_snapshot!(test.rename("mypkg"), @"Cannot rename");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_submodule_import_from_confusing_shadowed_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .sub<CURSOR>pkg import subpkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Refusing to rename is good here, it's the name of a module
|
||||||
|
assert_snapshot!(test.rename("mypkg"), @"Cannot rename");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_submodule_import_from_confusing_real_def() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import sub<CURSOR>pkg
|
||||||
|
|
||||||
|
x = subpkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// Renaming the integer is correct
|
||||||
|
assert_snapshot!(test.rename("mypkg"), @r"
|
||||||
|
info[rename]: Rename symbol (found 3 locations)
|
||||||
|
--> mypackage/__init__.py:2:21
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ------
|
||||||
|
|
|
||||||
|
::: mypackage/subpkg/__init__.py:2:1
|
||||||
|
|
|
||||||
|
2 | subpkg: int = 10
|
||||||
|
| ------
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_submodule_import_from_confusing_use() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mypackage/__init__.py",
|
||||||
|
r#"
|
||||||
|
from .subpkg import subpkg
|
||||||
|
|
||||||
|
x = sub<CURSOR>pkg
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"mypackage/subpkg/__init__.py",
|
||||||
|
r#"
|
||||||
|
subpkg: int = 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// TODO(submodule-imports): this is incorrect, we should rename the `subpkg` int
|
||||||
|
// and the RHS of the import statement (but *not* rename the LHS).
|
||||||
|
//
|
||||||
|
// However us being cautious here *would* be good as the rename will actually
|
||||||
|
// result in a `subpkg` variable still existing in this code, as the import's LHS
|
||||||
|
// `DefinitionKind::ImportFromSubmodule` would stop being overwritten by the RHS!
|
||||||
|
assert_snapshot!(test.rename("mypkg"), @r"
|
||||||
|
info[rename]: Rename symbol (found 1 locations)
|
||||||
|
--> mypackage/__init__.py:4:5
|
||||||
|
|
|
||||||
|
2 | from .subpkg import subpkg
|
||||||
|
3 |
|
||||||
|
4 | x = subpkg
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: This should rename all overloads
|
||||||
|
#[test]
|
||||||
|
fn rename_overloaded_function() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"lib1.py",
|
||||||
|
r#"
|
||||||
|
from typing import overload, Any
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def test<CURSOR>() -> None: ...
|
||||||
|
@overload
|
||||||
|
def test(a: str) -> str: ...
|
||||||
|
@overload
|
||||||
|
def test(a: int) -> int: ...
|
||||||
|
|
||||||
|
def test(a: Any) -> Any:
|
||||||
|
return a
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
from lib2 import test
|
||||||
|
|
||||||
|
test("test")
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r"
|
||||||
|
info[rename]: Rename symbol (found 1 locations)
|
||||||
|
--> lib1.py:5:5
|
||||||
|
|
|
||||||
|
4 | @overload
|
||||||
|
5 | def test() -> None: ...
|
||||||
|
| ^^^^
|
||||||
|
6 | @overload
|
||||||
|
7 | def test(a: str) -> str: ...
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_property() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"lib.py",
|
||||||
|
r#"
|
||||||
|
class Foo:
|
||||||
|
@property
|
||||||
|
def my_property<CURSOR>(self) -> int:
|
||||||
|
return 42
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
from lib import Foo
|
||||||
|
|
||||||
|
print(Foo().my_property)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r"
|
||||||
|
info[rename]: Rename symbol (found 2 locations)
|
||||||
|
--> lib.py:4:9
|
||||||
|
|
|
||||||
|
2 | class Foo:
|
||||||
|
3 | @property
|
||||||
|
4 | def my_property(self) -> int:
|
||||||
|
| ^^^^^^^^^^^
|
||||||
|
5 | return 42
|
||||||
|
|
|
||||||
|
::: main.py:4:13
|
||||||
|
|
|
||||||
|
2 | from lib import Foo
|
||||||
|
3 |
|
||||||
|
4 | print(Foo().my_property)
|
||||||
|
| -----------
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: this should rename the name of the function decorated with
|
||||||
|
// `@my_property.setter` as well as the getter function name
|
||||||
|
#[test]
|
||||||
|
fn rename_property_with_setter() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"lib.py",
|
||||||
|
r#"
|
||||||
|
class Foo:
|
||||||
|
@property
|
||||||
|
def my_property<CURSOR>(self) -> int:
|
||||||
|
return 42
|
||||||
|
|
||||||
|
@my_property.setter
|
||||||
|
def my_property(self, value: int) -> None:
|
||||||
|
pass
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
from lib import Foo
|
||||||
|
|
||||||
|
print(Foo().my_property)
|
||||||
|
Foo().my_property = 56
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r"
|
||||||
|
info[rename]: Rename symbol (found 4 locations)
|
||||||
|
--> lib.py:4:9
|
||||||
|
|
|
||||||
|
2 | class Foo:
|
||||||
|
3 | @property
|
||||||
|
4 | def my_property(self) -> int:
|
||||||
|
| ^^^^^^^^^^^
|
||||||
|
5 | return 42
|
||||||
|
6 |
|
||||||
|
7 | @my_property.setter
|
||||||
|
| -----------
|
||||||
|
8 | def my_property(self, value: int) -> None:
|
||||||
|
9 | pass
|
||||||
|
|
|
||||||
|
::: main.py:4:13
|
||||||
|
|
|
||||||
|
2 | from lib import Foo
|
||||||
|
3 |
|
||||||
|
4 | print(Foo().my_property)
|
||||||
|
| -----------
|
||||||
|
5 | Foo().my_property = 56
|
||||||
|
| -----------
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: this should rename the name of the function decorated with
|
||||||
|
// `@my_property.deleter` as well as the getter function name
|
||||||
|
#[test]
|
||||||
|
fn rename_property_with_deleter() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"lib.py",
|
||||||
|
r#"
|
||||||
|
class Foo:
|
||||||
|
@property
|
||||||
|
def my_property<CURSOR>(self) -> int:
|
||||||
|
return 42
|
||||||
|
|
||||||
|
@my_property.deleter
|
||||||
|
def my_property(self) -> None:
|
||||||
|
pass
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
from lib import Foo
|
||||||
|
|
||||||
|
print(Foo().my_property)
|
||||||
|
del Foo().my_property
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r"
|
||||||
|
info[rename]: Rename symbol (found 4 locations)
|
||||||
|
--> lib.py:4:9
|
||||||
|
|
|
||||||
|
2 | class Foo:
|
||||||
|
3 | @property
|
||||||
|
4 | def my_property(self) -> int:
|
||||||
|
| ^^^^^^^^^^^
|
||||||
|
5 | return 42
|
||||||
|
6 |
|
||||||
|
7 | @my_property.deleter
|
||||||
|
| -----------
|
||||||
|
8 | def my_property(self) -> None:
|
||||||
|
9 | pass
|
||||||
|
|
|
||||||
|
::: main.py:4:13
|
||||||
|
|
|
||||||
|
2 | from lib import Foo
|
||||||
|
3 |
|
||||||
|
4 | print(Foo().my_property)
|
||||||
|
| -----------
|
||||||
|
5 | del Foo().my_property
|
||||||
|
| -----------
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: this should rename the name of the functions decorated with
|
||||||
|
// `@my_property.deleter` and `@my_property.deleter` as well as the
|
||||||
|
// getter function name
|
||||||
|
#[test]
|
||||||
|
fn rename_property_with_setter_and_deleter() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"lib.py",
|
||||||
|
r#"
|
||||||
|
class Foo:
|
||||||
|
@property
|
||||||
|
def my_property<CURSOR>(self) -> int:
|
||||||
|
return 42
|
||||||
|
|
||||||
|
@my_property.setter
|
||||||
|
def my_property(self, value: int) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@my_property.deleter
|
||||||
|
def my_property(self) -> None:
|
||||||
|
pass
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
from lib import Foo
|
||||||
|
|
||||||
|
print(Foo().my_property)
|
||||||
|
Foo().my_property = 56
|
||||||
|
del Foo().my_property
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r"
|
||||||
|
info[rename]: Rename symbol (found 6 locations)
|
||||||
|
--> lib.py:4:9
|
||||||
|
|
|
||||||
|
2 | class Foo:
|
||||||
|
3 | @property
|
||||||
|
4 | def my_property(self) -> int:
|
||||||
|
| ^^^^^^^^^^^
|
||||||
|
5 | return 42
|
||||||
|
6 |
|
||||||
|
7 | @my_property.setter
|
||||||
|
| -----------
|
||||||
|
8 | def my_property(self, value: int) -> None:
|
||||||
|
9 | pass
|
||||||
|
10 |
|
||||||
|
11 | @my_property.deleter
|
||||||
|
| -----------
|
||||||
|
12 | def my_property(self) -> None:
|
||||||
|
13 | pass
|
||||||
|
|
|
||||||
|
::: main.py:4:13
|
||||||
|
|
|
||||||
|
2 | from lib import Foo
|
||||||
|
3 |
|
||||||
|
4 | print(Foo().my_property)
|
||||||
|
| -----------
|
||||||
|
5 | Foo().my_property = 56
|
||||||
|
| -----------
|
||||||
|
6 | del Foo().my_property
|
||||||
|
| -----------
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_single_dispatch_function() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
r#"
|
||||||
|
from functools import singledispatch
|
||||||
|
|
||||||
|
@singledispatch
|
||||||
|
def f<CURSOR>(x: object):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@f.register
|
||||||
|
def _(x: int) -> str:
|
||||||
|
return "int"
|
||||||
|
|
||||||
|
@f.register
|
||||||
|
def _(x: str) -> int:
|
||||||
|
return int(x)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r#"
|
||||||
|
info[rename]: Rename symbol (found 3 locations)
|
||||||
|
--> foo.py:5:5
|
||||||
|
|
|
||||||
|
4 | @singledispatch
|
||||||
|
5 | def f(x: object):
|
||||||
|
| ^
|
||||||
|
6 | raise NotImplementedError
|
||||||
|
7 |
|
||||||
|
8 | @f.register
|
||||||
|
| -
|
||||||
|
9 | def _(x: int) -> str:
|
||||||
|
10 | return "int"
|
||||||
|
11 |
|
||||||
|
12 | @f.register
|
||||||
|
| -
|
||||||
|
13 | def _(x: str) -> int:
|
||||||
|
14 | return int(x)
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_single_dispatch_function_stacked_register() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
r#"
|
||||||
|
from functools import singledispatch
|
||||||
|
|
||||||
|
@singledispatch
|
||||||
|
def f<CURSOR>(x):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@f.register(int)
|
||||||
|
@f.register(float)
|
||||||
|
def _(x) -> float:
|
||||||
|
return "int"
|
||||||
|
|
||||||
|
@f.register(str)
|
||||||
|
def _(x) -> int:
|
||||||
|
return int(x)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r#"
|
||||||
|
info[rename]: Rename symbol (found 4 locations)
|
||||||
|
--> foo.py:5:5
|
||||||
|
|
|
||||||
|
4 | @singledispatch
|
||||||
|
5 | def f(x):
|
||||||
|
| ^
|
||||||
|
6 | raise NotImplementedError
|
||||||
|
7 |
|
||||||
|
8 | @f.register(int)
|
||||||
|
| -
|
||||||
|
9 | @f.register(float)
|
||||||
|
| -
|
||||||
|
10 | def _(x) -> float:
|
||||||
|
11 | return "int"
|
||||||
|
12 |
|
||||||
|
13 | @f.register(str)
|
||||||
|
| -
|
||||||
|
14 | def _(x) -> int:
|
||||||
|
15 | return int(x)
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_single_dispatchmethod() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
r#"
|
||||||
|
from functools import singledispatchmethod
|
||||||
|
|
||||||
|
class Foo:
|
||||||
|
@singledispatchmethod
|
||||||
|
def f<CURSOR>(self, x: object):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@f.register
|
||||||
|
def _(self, x: str) -> float:
|
||||||
|
return "int"
|
||||||
|
|
||||||
|
@f.register
|
||||||
|
def _(self, x: str) -> int:
|
||||||
|
return int(x)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r#"
|
||||||
|
info[rename]: Rename symbol (found 3 locations)
|
||||||
|
--> foo.py:6:9
|
||||||
|
|
|
||||||
|
4 | class Foo:
|
||||||
|
5 | @singledispatchmethod
|
||||||
|
6 | def f(self, x: object):
|
||||||
|
| ^
|
||||||
|
7 | raise NotImplementedError
|
||||||
|
8 |
|
||||||
|
9 | @f.register
|
||||||
|
| -
|
||||||
|
10 | def _(self, x: str) -> float:
|
||||||
|
11 | return "int"
|
||||||
|
12 |
|
||||||
|
13 | @f.register
|
||||||
|
| -
|
||||||
|
14 | def _(self, x: str) -> int:
|
||||||
|
15 | return int(x)
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_single_dispatchmethod_staticmethod() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
r#"
|
||||||
|
from functools import singledispatchmethod
|
||||||
|
|
||||||
|
class Foo:
|
||||||
|
@singledispatchmethod
|
||||||
|
@staticmethod
|
||||||
|
def f<CURSOR>(self, x):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@f.register(str)
|
||||||
|
@staticmethod
|
||||||
|
def _(x: int) -> str:
|
||||||
|
return "int"
|
||||||
|
|
||||||
|
@f.register
|
||||||
|
@staticmethod
|
||||||
|
def _(x: str) -> int:
|
||||||
|
return int(x)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r#"
|
||||||
|
info[rename]: Rename symbol (found 3 locations)
|
||||||
|
--> foo.py:7:9
|
||||||
|
|
|
||||||
|
5 | @singledispatchmethod
|
||||||
|
6 | @staticmethod
|
||||||
|
7 | def f(self, x):
|
||||||
|
| ^
|
||||||
|
8 | raise NotImplementedError
|
||||||
|
9 |
|
||||||
|
10 | @f.register(str)
|
||||||
|
| -
|
||||||
|
11 | @staticmethod
|
||||||
|
12 | def _(x: int) -> str:
|
||||||
|
13 | return "int"
|
||||||
|
14 |
|
||||||
|
15 | @f.register
|
||||||
|
| -
|
||||||
|
16 | @staticmethod
|
||||||
|
17 | def _(x: str) -> int:
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_single_dispatchmethod_classmethod() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
r#"
|
||||||
|
from functools import singledispatchmethod
|
||||||
|
|
||||||
|
class Foo:
|
||||||
|
@singledispatchmethod
|
||||||
|
@classmethod
|
||||||
|
def f<CURSOR>(cls, x):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@f.register(str)
|
||||||
|
@classmethod
|
||||||
|
def _(cls, x) -> str:
|
||||||
|
return "int"
|
||||||
|
|
||||||
|
@f.register(int)
|
||||||
|
@f.register(float)
|
||||||
|
@staticmethod
|
||||||
|
def _(cls, x) -> int:
|
||||||
|
return int(x)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r#"
|
||||||
|
info[rename]: Rename symbol (found 4 locations)
|
||||||
|
--> foo.py:7:9
|
||||||
|
|
|
||||||
|
5 | @singledispatchmethod
|
||||||
|
6 | @classmethod
|
||||||
|
7 | def f(cls, x):
|
||||||
|
| ^
|
||||||
|
8 | raise NotImplementedError
|
||||||
|
9 |
|
||||||
|
10 | @f.register(str)
|
||||||
|
| -
|
||||||
|
11 | @classmethod
|
||||||
|
12 | def _(cls, x) -> str:
|
||||||
|
13 | return "int"
|
||||||
|
14 |
|
||||||
|
15 | @f.register(int)
|
||||||
|
| -
|
||||||
|
16 | @f.register(float)
|
||||||
|
| -
|
||||||
|
17 | @staticmethod
|
||||||
|
18 | def _(cls, x) -> int:
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn rename_attribute() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
r#"
|
||||||
|
class Test:
|
||||||
|
attribute<CURSOR>: str
|
||||||
|
|
||||||
|
def __init__(self, value: str):
|
||||||
|
self.attribute = value
|
||||||
|
|
||||||
|
class Child(Test):
|
||||||
|
def test(self):
|
||||||
|
return self.attribute
|
||||||
|
|
||||||
|
|
||||||
|
c = Child("test")
|
||||||
|
|
||||||
|
print(c.attribute)
|
||||||
|
c.attribute = "new_value"
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r#"
|
||||||
|
info[rename]: Rename symbol (found 5 locations)
|
||||||
|
--> foo.py:3:5
|
||||||
|
|
|
||||||
|
2 | class Test:
|
||||||
|
3 | attribute: str
|
||||||
|
| ^^^^^^^^^
|
||||||
4 |
|
4 |
|
||||||
5 | x = abc
|
5 | def __init__(self, value: str):
|
||||||
| ---
|
6 | self.attribute = value
|
||||||
6 | y = warnings
|
| ---------
|
||||||
|
7 |
|
||||||
|
8 | class Child(Test):
|
||||||
|
9 | def test(self):
|
||||||
|
10 | return self.attribute
|
||||||
|
| ---------
|
||||||
|
|
|
||||||
|
::: foo.py:15:9
|
||||||
|
|
|
||||||
|
13 | c = Child("test")
|
||||||
|
14 |
|
||||||
|
15 | print(c.attribute)
|
||||||
|
| ---------
|
||||||
|
16 | c.attribute = "new_value"
|
||||||
|
| ---------
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: This should rename all attribute usages
|
||||||
|
// Note: Pylance only renames the assignment in `__init__`.
|
||||||
|
#[test]
|
||||||
|
fn rename_implicit_attribute() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"main.py",
|
||||||
|
r#"
|
||||||
|
class Test:
|
||||||
|
def __init__(self, value: str):
|
||||||
|
self.<CURSOR>attribute = value
|
||||||
|
|
||||||
|
class Child(Test):
|
||||||
|
def __init__(self, value: str):
|
||||||
|
super().__init__(value)
|
||||||
|
self.attribute = value + "child"
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
return self.attribute
|
||||||
|
|
||||||
|
|
||||||
|
c = Child("test")
|
||||||
|
|
||||||
|
print(c.attribute)
|
||||||
|
c.attribute = "new_value"
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.rename("better_name"), @r"
|
||||||
|
info[rename]: Rename symbol (found 1 locations)
|
||||||
|
--> main.py:4:14
|
||||||
|
|
|
||||||
|
2 | class Test:
|
||||||
|
3 | def __init__(self, value: str):
|
||||||
|
4 | self.attribute = value
|
||||||
|
| ^^^^^^^^^
|
||||||
|
5 |
|
||||||
|
6 | class Child(Test):
|
||||||
|
|
|
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,14 @@
|
||||||
|
from typing import Protocol
|
||||||
|
|
||||||
|
class A(Protocol):
|
||||||
|
@property
|
||||||
|
def f(self): ...
|
||||||
|
|
||||||
|
type Recursive = int | tuple[Recursive, ...]
|
||||||
|
|
||||||
|
class B[T: A]: ...
|
||||||
|
|
||||||
|
class C[T: A](A):
|
||||||
|
x: tuple[Recursive, ...]
|
||||||
|
|
||||||
|
class D(B[C]): ...
|
||||||
|
|
@ -0,0 +1,6 @@
|
||||||
|
# This is a regression test for `store_expression_type`.
|
||||||
|
# ref: https://github.com/astral-sh/ty/issues/1688
|
||||||
|
|
||||||
|
x: int
|
||||||
|
|
||||||
|
type x[T] = x[T, U]
|
||||||
|
|
@ -0,0 +1,6 @@
|
||||||
|
class C[T: (A, B)]:
|
||||||
|
def f(foo: T):
|
||||||
|
try:
|
||||||
|
pass
|
||||||
|
except foo:
|
||||||
|
pass
|
||||||
|
|
@ -169,13 +169,13 @@ def f(x: Any[int]):
|
||||||
`Any` cannot be called (this leads to a `TypeError` at runtime):
|
`Any` cannot be called (this leads to a `TypeError` at runtime):
|
||||||
|
|
||||||
```py
|
```py
|
||||||
Any() # error: [call-non-callable] "Object of type `typing.Any` is not callable"
|
Any() # error: [call-non-callable] "Object of type `<special form 'typing.Any'>` is not callable"
|
||||||
```
|
```
|
||||||
|
|
||||||
`Any` also cannot be used as a metaclass (under the hood, this leads to an implicit call to `Any`):
|
`Any` also cannot be used as a metaclass (under the hood, this leads to an implicit call to `Any`):
|
||||||
|
|
||||||
```py
|
```py
|
||||||
class F(metaclass=Any): ... # error: [invalid-metaclass] "Metaclass type `typing.Any` is not callable"
|
class F(metaclass=Any): ... # error: [invalid-metaclass] "Metaclass type `<special form 'typing.Any'>` is not callable"
|
||||||
```
|
```
|
||||||
|
|
||||||
And `Any` cannot be used in `isinstance()` checks:
|
And `Any` cannot be used in `isinstance()` checks:
|
||||||
|
|
|
||||||
|
|
@ -59,7 +59,7 @@ python-version = "3.11"
|
||||||
```py
|
```py
|
||||||
from typing import Never
|
from typing import Never
|
||||||
|
|
||||||
reveal_type(Never) # revealed: typing.Never
|
reveal_type(Never) # revealed: <special form 'typing.Never'>
|
||||||
```
|
```
|
||||||
|
|
||||||
### Python 3.10
|
### Python 3.10
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ python-version = "3.10"
|
||||||
class A: ...
|
class A: ...
|
||||||
class B: ...
|
class B: ...
|
||||||
|
|
||||||
reveal_type(A | B) # revealed: types.UnionType
|
reveal_type(A | B) # revealed: <types.UnionType special form 'A | B'>
|
||||||
```
|
```
|
||||||
|
|
||||||
## Union of two classes (prior to 3.10)
|
## Union of two classes (prior to 3.10)
|
||||||
|
|
@ -43,14 +43,14 @@ class A: ...
|
||||||
class B: ...
|
class B: ...
|
||||||
|
|
||||||
def _(sub_a: type[A], sub_b: type[B]):
|
def _(sub_a: type[A], sub_b: type[B]):
|
||||||
reveal_type(A | sub_b) # revealed: types.UnionType
|
reveal_type(A | sub_b) # revealed: <types.UnionType special form>
|
||||||
reveal_type(sub_a | B) # revealed: types.UnionType
|
reveal_type(sub_a | B) # revealed: <types.UnionType special form>
|
||||||
reveal_type(sub_a | sub_b) # revealed: types.UnionType
|
reveal_type(sub_a | sub_b) # revealed: <types.UnionType special form>
|
||||||
|
|
||||||
class C[T]: ...
|
class C[T]: ...
|
||||||
class D[T]: ...
|
class D[T]: ...
|
||||||
|
|
||||||
reveal_type(C | D) # revealed: types.UnionType
|
reveal_type(C | D) # revealed: <types.UnionType special form 'C[Unknown] | D[Unknown]'>
|
||||||
|
|
||||||
reveal_type(C[int] | D[str]) # revealed: types.UnionType
|
reveal_type(C[int] | D[str]) # revealed: <types.UnionType special form 'C[int] | D[str]'>
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -227,17 +227,22 @@ def _(literals_2: Literal[0, 1], b: bool, flag: bool):
|
||||||
literals_16 = 4 * literals_4 + literals_4 # Literal[0, 1, .., 15]
|
literals_16 = 4 * literals_4 + literals_4 # Literal[0, 1, .., 15]
|
||||||
literals_64 = 4 * literals_16 + literals_4 # Literal[0, 1, .., 63]
|
literals_64 = 4 * literals_16 + literals_4 # Literal[0, 1, .., 63]
|
||||||
literals_128 = 2 * literals_64 + literals_2 # Literal[0, 1, .., 127]
|
literals_128 = 2 * literals_64 + literals_2 # Literal[0, 1, .., 127]
|
||||||
|
literals_256 = 2 * literals_128 + literals_2 # Literal[0, 1, .., 255]
|
||||||
|
|
||||||
# Going beyond the MAX_UNION_LITERALS limit (currently 200):
|
# Going beyond the MAX_UNION_LITERALS limit (currently 512):
|
||||||
literals_256 = 16 * literals_16 + literals_16
|
literals_512 = 2 * literals_256 + literals_2 # Literal[0, 1, .., 511]
|
||||||
reveal_type(literals_256) # revealed: int
|
reveal_type(literals_512 if flag else 512) # revealed: int
|
||||||
|
|
||||||
# Going beyond the limit when another type is already part of the union
|
# Going beyond the limit when another type is already part of the union
|
||||||
bool_and_literals_128 = b if flag else literals_128 # bool | Literal[0, 1, ..., 127]
|
bool_and_literals_128 = b if flag else literals_128 # bool | Literal[0, 1, ..., 127]
|
||||||
literals_128_shifted = literals_128 + 128 # Literal[128, 129, ..., 255]
|
literals_128_shifted = literals_128 + 128 # Literal[128, 129, ..., 255]
|
||||||
|
literals_256_shifted = literals_256 + 256 # Literal[256, 257, ..., 511]
|
||||||
|
|
||||||
# Now union the two:
|
# Now union the two:
|
||||||
reveal_type(bool_and_literals_128 if flag else literals_128_shifted) # revealed: int
|
two = bool_and_literals_128 if flag else literals_128_shifted
|
||||||
|
# revealed: bool | Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255]
|
||||||
|
reveal_type(two)
|
||||||
|
reveal_type(two if flag else literals_256_shifted) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
## Simplifying gradually-equivalent types
|
## Simplifying gradually-equivalent types
|
||||||
|
|
|
||||||
|
|
@ -603,12 +603,14 @@ super(object, object()).__class__
|
||||||
# Not all objects valid in a class's bases list are valid as the first argument to `super()`.
|
# Not all objects valid in a class's bases list are valid as the first argument to `super()`.
|
||||||
# For example, it's valid to inherit from `typing.ChainMap`, but it's not valid as the first argument to `super()`.
|
# For example, it's valid to inherit from `typing.ChainMap`, but it's not valid as the first argument to `super()`.
|
||||||
#
|
#
|
||||||
# error: [invalid-super-argument] "`typing.ChainMap` is not a valid class"
|
# error: [invalid-super-argument] "`<special form 'typing.ChainMap'>` is not a valid class"
|
||||||
reveal_type(super(typing.ChainMap, collections.ChainMap())) # revealed: Unknown
|
reveal_type(super(typing.ChainMap, collections.ChainMap())) # revealed: Unknown
|
||||||
|
|
||||||
# Meanwhile, it's not valid to inherit from unsubscripted `typing.Generic`,
|
# Meanwhile, it's not valid to inherit from unsubscripted `typing.Generic`,
|
||||||
# but it *is* valid as the first argument to `super()`.
|
# but it *is* valid as the first argument to `super()`.
|
||||||
reveal_type(super(typing.Generic, typing.SupportsInt)) # revealed: <super: typing.Generic, <class 'SupportsInt'>>
|
#
|
||||||
|
# revealed: <super: <special form 'typing.Generic'>, <class 'SupportsInt'>>
|
||||||
|
reveal_type(super(typing.Generic, typing.SupportsInt))
|
||||||
|
|
||||||
def _(x: type[typing.Any], y: typing.Any):
|
def _(x: type[typing.Any], y: typing.Any):
|
||||||
reveal_type(super(x, y)) # revealed: <super: Any, Any>
|
reveal_type(super(x, y)) # revealed: <super: Any, Any>
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,26 @@
|
||||||
|
# Diagnostics for invalid attribute access on special forms
|
||||||
|
|
||||||
|
<!-- snapshot-diagnostics -->
|
||||||
|
|
||||||
|
```py
|
||||||
|
from typing_extensions import Any, Final, LiteralString, Self
|
||||||
|
|
||||||
|
X = Any
|
||||||
|
|
||||||
|
class Foo:
|
||||||
|
X: Final = LiteralString
|
||||||
|
a: int
|
||||||
|
b: Self
|
||||||
|
|
||||||
|
class Bar:
|
||||||
|
def __init__(self):
|
||||||
|
self.y: Final = LiteralString
|
||||||
|
|
||||||
|
X.foo # error: [unresolved-attribute]
|
||||||
|
X.aaaaooooooo # error: [unresolved-attribute]
|
||||||
|
Foo.X.startswith # error: [unresolved-attribute]
|
||||||
|
Foo.Bar().y.startswith # error: [unresolved-attribute]
|
||||||
|
|
||||||
|
# TODO: false positive (just testing the diagnostic in the meantime)
|
||||||
|
Foo().b.a # error: [unresolved-attribute]
|
||||||
|
```
|
||||||
|
|
@ -7,10 +7,11 @@
|
||||||
```py
|
```py
|
||||||
from typing_extensions import assert_type
|
from typing_extensions import assert_type
|
||||||
|
|
||||||
def _(x: int):
|
def _(x: int, y: bool):
|
||||||
assert_type(x, int) # fine
|
assert_type(x, int) # fine
|
||||||
assert_type(x, str) # error: [type-assertion-failure]
|
assert_type(x, str) # error: [type-assertion-failure]
|
||||||
assert_type(assert_type(x, int), int)
|
assert_type(assert_type(x, int), int)
|
||||||
|
assert_type(y, int) # error: [type-assertion-failure]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Narrowing
|
## Narrowing
|
||||||
|
|
|
||||||
|
|
@ -80,7 +80,7 @@ class Foo(Protocol):
|
||||||
def f[T](self, v: T) -> T: ...
|
def f[T](self, v: T) -> T: ...
|
||||||
|
|
||||||
t = (Protocol, int)
|
t = (Protocol, int)
|
||||||
reveal_type(t[0]) # revealed: typing.Protocol
|
reveal_type(t[0]) # revealed: <special form 'typing.Protocol'>
|
||||||
|
|
||||||
class Lorem(t[0]):
|
class Lorem(t[0]):
|
||||||
def f(self) -> int: ...
|
def f(self) -> int: ...
|
||||||
|
|
|
||||||
|
|
@ -301,6 +301,7 @@ consistent with each other.
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
@ -308,6 +309,11 @@ class C(Generic[T]):
|
||||||
def __new__(cls, x: T) -> "C[T]":
|
def __new__(cls, x: T) -> "C[T]":
|
||||||
return object.__new__(cls)
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1)) # revealed: C[int]
|
reveal_type(C(1)) # revealed: C[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
||||||
|
|
@ -318,12 +324,18 @@ wrong_innards: C[int] = C("five")
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
class C(Generic[T]):
|
class C(Generic[T]):
|
||||||
def __init__(self, x: T) -> None: ...
|
def __init__(self, x: T) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1)) # revealed: C[int]
|
reveal_type(C(1)) # revealed: C[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
||||||
|
|
@ -334,6 +346,7 @@ wrong_innards: C[int] = C("five")
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
@ -343,6 +356,11 @@ class C(Generic[T]):
|
||||||
|
|
||||||
def __init__(self, x: T) -> None: ...
|
def __init__(self, x: T) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1)) # revealed: C[int]
|
reveal_type(C(1)) # revealed: C[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
||||||
|
|
@ -353,6 +371,7 @@ wrong_innards: C[int] = C("five")
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
@ -362,6 +381,11 @@ class C(Generic[T]):
|
||||||
|
|
||||||
def __init__(self, x: T) -> None: ...
|
def __init__(self, x: T) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1)) # revealed: C[int]
|
reveal_type(C(1)) # revealed: C[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
||||||
|
|
@ -373,6 +397,11 @@ class D(Generic[T]):
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs) -> None: ...
|
def __init__(self, *args, **kwargs) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D(1)) # revealed: D[int]
|
reveal_type(D(1)) # revealed: D[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `D[int | str]` is not assignable to `D[int]`"
|
# error: [invalid-assignment] "Object of type `D[int | str]` is not assignable to `D[int]`"
|
||||||
|
|
@ -386,6 +415,7 @@ to specialize the class.
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
U = TypeVar("U")
|
U = TypeVar("U")
|
||||||
|
|
@ -398,6 +428,11 @@ class C(Generic[T, U]):
|
||||||
class D(C[V, int]):
|
class D(C[V, int]):
|
||||||
def __init__(self, x: V) -> None: ...
|
def __init__(self, x: V) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[V@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[V@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D(1)) # revealed: D[int]
|
reveal_type(D(1)) # revealed: D[int]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -405,6 +440,7 @@ reveal_type(D(1)) # revealed: D[int]
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
U = TypeVar("U")
|
U = TypeVar("U")
|
||||||
|
|
@ -415,6 +451,11 @@ class C(Generic[T, U]):
|
||||||
class D(C[T, U]):
|
class D(C[T, U]):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(C(1, "str")) # revealed: C[int, str]
|
reveal_type(C(1, "str")) # revealed: C[int, str]
|
||||||
reveal_type(D(1, "str")) # revealed: D[int, str]
|
reveal_type(D(1, "str")) # revealed: D[int, str]
|
||||||
```
|
```
|
||||||
|
|
@ -425,6 +466,7 @@ This is a specific example of the above, since it was reported specifically by a
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
U = TypeVar("U")
|
U = TypeVar("U")
|
||||||
|
|
@ -432,6 +474,11 @@ U = TypeVar("U")
|
||||||
class D(dict[T, U]):
|
class D(dict[T, U]):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D(key=1)) # revealed: D[str, int]
|
reveal_type(D(key=1)) # revealed: D[str, int]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -443,12 +490,18 @@ context. But from the user's point of view, this is another example of the above
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
U = TypeVar("U")
|
U = TypeVar("U")
|
||||||
|
|
||||||
class C(tuple[T, U]): ...
|
class C(tuple[T, U]): ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, U@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, U@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C((1, 2))) # revealed: C[int, int]
|
reveal_type(C((1, 2))) # revealed: C[int, int]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -480,6 +533,7 @@ def func8(t1: tuple[complex, list[int]], t2: tuple[int, *tuple[str, ...]], t3: t
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
S = TypeVar("S")
|
S = TypeVar("S")
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
@ -487,6 +541,11 @@ T = TypeVar("T")
|
||||||
class C(Generic[T]):
|
class C(Generic[T]):
|
||||||
def __init__(self, x: T, y: S) -> None: ...
|
def __init__(self, x: T, y: S) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, S@__init__]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1, 1)) # revealed: C[int]
|
reveal_type(C(1, 1)) # revealed: C[int]
|
||||||
reveal_type(C(1, "string")) # revealed: C[int]
|
reveal_type(C(1, "string")) # revealed: C[int]
|
||||||
reveal_type(C(1, True)) # revealed: C[int]
|
reveal_type(C(1, True)) # revealed: C[int]
|
||||||
|
|
@ -499,6 +558,7 @@ wrong_innards: C[int] = C("five", 1)
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import overload, Generic, TypeVar
|
from typing_extensions import overload, Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
U = TypeVar("U")
|
U = TypeVar("U")
|
||||||
|
|
@ -514,6 +574,11 @@ class C(Generic[T]):
|
||||||
def __init__(self, x: int) -> None: ...
|
def __init__(self, x: int) -> None: ...
|
||||||
def __init__(self, x: str | bytes | int) -> None: ...
|
def __init__(self, x: str | bytes | int) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C("string")) # revealed: C[str]
|
reveal_type(C("string")) # revealed: C[str]
|
||||||
reveal_type(C(b"bytes")) # revealed: C[bytes]
|
reveal_type(C(b"bytes")) # revealed: C[bytes]
|
||||||
reveal_type(C(12)) # revealed: C[Unknown]
|
reveal_type(C(12)) # revealed: C[Unknown]
|
||||||
|
|
@ -541,6 +606,11 @@ class D(Generic[T, U]):
|
||||||
def __init__(self, t: T, u: U) -> None: ...
|
def __init__(self, t: T, u: U) -> None: ...
|
||||||
def __init__(self, *args) -> None: ...
|
def __init__(self, *args) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D("string")) # revealed: D[str, str]
|
reveal_type(D("string")) # revealed: D[str, str]
|
||||||
reveal_type(D(1)) # revealed: D[str, int]
|
reveal_type(D(1)) # revealed: D[str, int]
|
||||||
reveal_type(D(1, "string")) # revealed: D[int, str]
|
reveal_type(D(1, "string")) # revealed: D[int, str]
|
||||||
|
|
@ -551,6 +621,7 @@ reveal_type(D(1, "string")) # revealed: D[int, str]
|
||||||
```py
|
```py
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
@ -558,6 +629,11 @@ T = TypeVar("T")
|
||||||
class A(Generic[T]):
|
class A(Generic[T]):
|
||||||
x: T
|
x: T
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@A]
|
||||||
|
reveal_type(generic_context(A))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@A]
|
||||||
|
reveal_type(generic_context(into_callable(A)))
|
||||||
|
|
||||||
reveal_type(A(x=1)) # revealed: A[int]
|
reveal_type(A(x=1)) # revealed: A[int]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -565,17 +641,28 @@ reveal_type(A(x=1)) # revealed: A[int]
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
U = TypeVar("U", default=T)
|
U = TypeVar("U", default=T)
|
||||||
|
|
||||||
class C(Generic[T, U]): ...
|
class C(Generic[T, U]): ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, U@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, U@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C()) # revealed: C[Unknown, Unknown]
|
reveal_type(C()) # revealed: C[Unknown, Unknown]
|
||||||
|
|
||||||
class D(Generic[T, U]):
|
class D(Generic[T, U]):
|
||||||
def __init__(self) -> None: ...
|
def __init__(self) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D()) # revealed: D[Unknown, Unknown]
|
reveal_type(D()) # revealed: D[Unknown, Unknown]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -62,7 +62,7 @@ The specialization must match the generic types:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# error: [invalid-type-arguments] "Too many type arguments: expected 1, got 2"
|
# error: [invalid-type-arguments] "Too many type arguments: expected 1, got 2"
|
||||||
reveal_type(C[int, int]) # revealed: C[Unknown]
|
reveal_type(C[int, int]) # revealed: <type alias 'C[Unknown]'>
|
||||||
```
|
```
|
||||||
|
|
||||||
And non-generic types cannot be specialized:
|
And non-generic types cannot be specialized:
|
||||||
|
|
@ -85,19 +85,19 @@ type BoundedByUnion[T: int | str] = ...
|
||||||
|
|
||||||
class IntSubclass(int): ...
|
class IntSubclass(int): ...
|
||||||
|
|
||||||
reveal_type(Bounded[int]) # revealed: Bounded[int]
|
reveal_type(Bounded[int]) # revealed: <type alias 'Bounded[int]'>
|
||||||
reveal_type(Bounded[IntSubclass]) # revealed: Bounded[IntSubclass]
|
reveal_type(Bounded[IntSubclass]) # revealed: <type alias 'Bounded[IntSubclass]'>
|
||||||
|
|
||||||
# error: [invalid-type-arguments] "Type `str` is not assignable to upper bound `int` of type variable `T@Bounded`"
|
# error: [invalid-type-arguments] "Type `str` is not assignable to upper bound `int` of type variable `T@Bounded`"
|
||||||
reveal_type(Bounded[str]) # revealed: Bounded[Unknown]
|
reveal_type(Bounded[str]) # revealed: <type alias 'Bounded[Unknown]'>
|
||||||
|
|
||||||
# error: [invalid-type-arguments] "Type `int | str` is not assignable to upper bound `int` of type variable `T@Bounded`"
|
# error: [invalid-type-arguments] "Type `int | str` is not assignable to upper bound `int` of type variable `T@Bounded`"
|
||||||
reveal_type(Bounded[int | str]) # revealed: Bounded[Unknown]
|
reveal_type(Bounded[int | str]) # revealed: <type alias 'Bounded[Unknown]'>
|
||||||
|
|
||||||
reveal_type(BoundedByUnion[int]) # revealed: BoundedByUnion[int]
|
reveal_type(BoundedByUnion[int]) # revealed: <type alias 'BoundedByUnion[int]'>
|
||||||
reveal_type(BoundedByUnion[IntSubclass]) # revealed: BoundedByUnion[IntSubclass]
|
reveal_type(BoundedByUnion[IntSubclass]) # revealed: <type alias 'BoundedByUnion[IntSubclass]'>
|
||||||
reveal_type(BoundedByUnion[str]) # revealed: BoundedByUnion[str]
|
reveal_type(BoundedByUnion[str]) # revealed: <type alias 'BoundedByUnion[str]'>
|
||||||
reveal_type(BoundedByUnion[int | str]) # revealed: BoundedByUnion[int | str]
|
reveal_type(BoundedByUnion[int | str]) # revealed: <type alias 'BoundedByUnion[int | str]'>
|
||||||
```
|
```
|
||||||
|
|
||||||
If the type variable is constrained, the specialized type must satisfy those constraints:
|
If the type variable is constrained, the specialized type must satisfy those constraints:
|
||||||
|
|
@ -105,20 +105,20 @@ If the type variable is constrained, the specialized type must satisfy those con
|
||||||
```py
|
```py
|
||||||
type Constrained[T: (int, str)] = ...
|
type Constrained[T: (int, str)] = ...
|
||||||
|
|
||||||
reveal_type(Constrained[int]) # revealed: Constrained[int]
|
reveal_type(Constrained[int]) # revealed: <type alias 'Constrained[int]'>
|
||||||
|
|
||||||
# TODO: error: [invalid-argument-type]
|
# TODO: error: [invalid-argument-type]
|
||||||
# TODO: revealed: Constrained[Unknown]
|
# TODO: revealed: Constrained[Unknown]
|
||||||
reveal_type(Constrained[IntSubclass]) # revealed: Constrained[IntSubclass]
|
reveal_type(Constrained[IntSubclass]) # revealed: <type alias 'Constrained[IntSubclass]'>
|
||||||
|
|
||||||
reveal_type(Constrained[str]) # revealed: Constrained[str]
|
reveal_type(Constrained[str]) # revealed: <type alias 'Constrained[str]'>
|
||||||
|
|
||||||
# TODO: error: [invalid-argument-type]
|
# TODO: error: [invalid-argument-type]
|
||||||
# TODO: revealed: Unknown
|
# TODO: revealed: Unknown
|
||||||
reveal_type(Constrained[int | str]) # revealed: Constrained[int | str]
|
reveal_type(Constrained[int | str]) # revealed: <type alias 'Constrained[int | str]'>
|
||||||
|
|
||||||
# error: [invalid-type-arguments] "Type `object` does not satisfy constraints `int`, `str` of type variable `T@Constrained`"
|
# error: [invalid-type-arguments] "Type `object` does not satisfy constraints `int`, `str` of type variable `T@Constrained`"
|
||||||
reveal_type(Constrained[object]) # revealed: Constrained[Unknown]
|
reveal_type(Constrained[object]) # revealed: <type alias 'Constrained[Unknown]'>
|
||||||
```
|
```
|
||||||
|
|
||||||
If the type variable has a default, it can be omitted:
|
If the type variable has a default, it can be omitted:
|
||||||
|
|
@ -126,8 +126,8 @@ If the type variable has a default, it can be omitted:
|
||||||
```py
|
```py
|
||||||
type WithDefault[T, U = int] = ...
|
type WithDefault[T, U = int] = ...
|
||||||
|
|
||||||
reveal_type(WithDefault[str, str]) # revealed: WithDefault[str, str]
|
reveal_type(WithDefault[str, str]) # revealed: <type alias 'WithDefault[str, str]'>
|
||||||
reveal_type(WithDefault[str]) # revealed: WithDefault[str, int]
|
reveal_type(WithDefault[str]) # revealed: <type alias 'WithDefault[str, int]'>
|
||||||
```
|
```
|
||||||
|
|
||||||
If the type alias is not specialized explicitly, it is implicitly specialized to `Unknown`:
|
If the type alias is not specialized explicitly, it is implicitly specialized to `Unknown`:
|
||||||
|
|
|
||||||
|
|
@ -264,12 +264,19 @@ signatures don't count towards variance).
|
||||||
### `__new__` only
|
### `__new__` only
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T]:
|
class C[T]:
|
||||||
x: T
|
x: T
|
||||||
|
|
||||||
def __new__(cls, x: T) -> "C[T]":
|
def __new__(cls, x: T) -> "C[T]":
|
||||||
return object.__new__(cls)
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1)) # revealed: C[int]
|
reveal_type(C(1)) # revealed: C[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
||||||
|
|
@ -279,11 +286,18 @@ wrong_innards: C[int] = C("five")
|
||||||
### `__init__` only
|
### `__init__` only
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T]:
|
class C[T]:
|
||||||
x: T
|
x: T
|
||||||
|
|
||||||
def __init__(self, x: T) -> None: ...
|
def __init__(self, x: T) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1)) # revealed: C[int]
|
reveal_type(C(1)) # revealed: C[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
||||||
|
|
@ -293,6 +307,8 @@ wrong_innards: C[int] = C("five")
|
||||||
### Identical `__new__` and `__init__` signatures
|
### Identical `__new__` and `__init__` signatures
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T]:
|
class C[T]:
|
||||||
x: T
|
x: T
|
||||||
|
|
||||||
|
|
@ -301,6 +317,11 @@ class C[T]:
|
||||||
|
|
||||||
def __init__(self, x: T) -> None: ...
|
def __init__(self, x: T) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1)) # revealed: C[int]
|
reveal_type(C(1)) # revealed: C[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
||||||
|
|
@ -310,6 +331,8 @@ wrong_innards: C[int] = C("five")
|
||||||
### Compatible `__new__` and `__init__` signatures
|
### Compatible `__new__` and `__init__` signatures
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T]:
|
class C[T]:
|
||||||
x: T
|
x: T
|
||||||
|
|
||||||
|
|
@ -318,6 +341,11 @@ class C[T]:
|
||||||
|
|
||||||
def __init__(self, x: T) -> None: ...
|
def __init__(self, x: T) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1)) # revealed: C[int]
|
reveal_type(C(1)) # revealed: C[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
|
||||||
|
|
@ -331,6 +359,11 @@ class D[T]:
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs) -> None: ...
|
def __init__(self, *args, **kwargs) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D(1)) # revealed: D[int]
|
reveal_type(D(1)) # revealed: D[int]
|
||||||
|
|
||||||
# error: [invalid-assignment] "Object of type `D[int | str]` is not assignable to `D[int]`"
|
# error: [invalid-assignment] "Object of type `D[int | str]` is not assignable to `D[int]`"
|
||||||
|
|
@ -343,6 +376,8 @@ If either method comes from a generic base class, we don't currently use its inf
|
||||||
to specialize the class.
|
to specialize the class.
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T, U]:
|
class C[T, U]:
|
||||||
def __new__(cls, *args, **kwargs) -> "C[T, U]":
|
def __new__(cls, *args, **kwargs) -> "C[T, U]":
|
||||||
return object.__new__(cls)
|
return object.__new__(cls)
|
||||||
|
|
@ -350,18 +385,30 @@ class C[T, U]:
|
||||||
class D[V](C[V, int]):
|
class D[V](C[V, int]):
|
||||||
def __init__(self, x: V) -> None: ...
|
def __init__(self, x: V) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[V@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[V@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D(1)) # revealed: D[Literal[1]]
|
reveal_type(D(1)) # revealed: D[Literal[1]]
|
||||||
```
|
```
|
||||||
|
|
||||||
### Generic class inherits `__init__` from generic base class
|
### Generic class inherits `__init__` from generic base class
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T, U]:
|
class C[T, U]:
|
||||||
def __init__(self, t: T, u: U) -> None: ...
|
def __init__(self, t: T, u: U) -> None: ...
|
||||||
|
|
||||||
class D[T, U](C[T, U]):
|
class D[T, U](C[T, U]):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(C(1, "str")) # revealed: C[Literal[1], Literal["str"]]
|
reveal_type(C(1, "str")) # revealed: C[Literal[1], Literal["str"]]
|
||||||
reveal_type(D(1, "str")) # revealed: D[Literal[1], Literal["str"]]
|
reveal_type(D(1, "str")) # revealed: D[Literal[1], Literal["str"]]
|
||||||
```
|
```
|
||||||
|
|
@ -371,9 +418,16 @@ reveal_type(D(1, "str")) # revealed: D[Literal[1], Literal["str"]]
|
||||||
This is a specific example of the above, since it was reported specifically by a user.
|
This is a specific example of the above, since it was reported specifically by a user.
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class D[T, U](dict[T, U]):
|
class D[T, U](dict[T, U]):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D(key=1)) # revealed: D[str, int]
|
reveal_type(D(key=1)) # revealed: D[str, int]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -384,8 +438,15 @@ for `tuple`, so we use a different mechanism to make sure it has the right inher
|
||||||
context. But from the user's point of view, this is another example of the above.)
|
context. But from the user's point of view, this is another example of the above.)
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T, U](tuple[T, U]): ...
|
class C[T, U](tuple[T, U]): ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, U@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, U@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C((1, 2))) # revealed: C[Literal[1], Literal[2]]
|
reveal_type(C((1, 2))) # revealed: C[Literal[1], Literal[2]]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -409,11 +470,18 @@ def func8(t1: tuple[complex, list[int]], t2: tuple[int, *tuple[str, ...]], t3: t
|
||||||
### `__init__` is itself generic
|
### `__init__` is itself generic
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T]:
|
class C[T]:
|
||||||
x: T
|
x: T
|
||||||
|
|
||||||
def __init__[S](self, x: T, y: S) -> None: ...
|
def __init__[S](self, x: T, y: S) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, S@__init__]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C(1, 1)) # revealed: C[int]
|
reveal_type(C(1, 1)) # revealed: C[int]
|
||||||
reveal_type(C(1, "string")) # revealed: C[int]
|
reveal_type(C(1, "string")) # revealed: C[int]
|
||||||
reveal_type(C(1, True)) # revealed: C[int]
|
reveal_type(C(1, True)) # revealed: C[int]
|
||||||
|
|
@ -427,6 +495,7 @@ wrong_innards: C[int] = C("five", 1)
|
||||||
```py
|
```py
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import overload
|
from typing import overload
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T]:
|
class C[T]:
|
||||||
# we need to use the type variable or else the class is bivariant in T, and
|
# we need to use the type variable or else the class is bivariant in T, and
|
||||||
|
|
@ -443,6 +512,11 @@ class C[T]:
|
||||||
def __init__(self, x: int) -> None: ...
|
def __init__(self, x: int) -> None: ...
|
||||||
def __init__(self, x: str | bytes | int) -> None: ...
|
def __init__(self, x: str | bytes | int) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C("string")) # revealed: C[str]
|
reveal_type(C("string")) # revealed: C[str]
|
||||||
reveal_type(C(b"bytes")) # revealed: C[bytes]
|
reveal_type(C(b"bytes")) # revealed: C[bytes]
|
||||||
reveal_type(C(12)) # revealed: C[Unknown]
|
reveal_type(C(12)) # revealed: C[Unknown]
|
||||||
|
|
@ -470,6 +544,11 @@ class D[T, U]:
|
||||||
def __init__(self, t: T, u: U) -> None: ...
|
def __init__(self, t: T, u: U) -> None: ...
|
||||||
def __init__(self, *args) -> None: ...
|
def __init__(self, *args) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D("string")) # revealed: D[str, Literal["string"]]
|
reveal_type(D("string")) # revealed: D[str, Literal["string"]]
|
||||||
reveal_type(D(1)) # revealed: D[str, Literal[1]]
|
reveal_type(D(1)) # revealed: D[str, Literal[1]]
|
||||||
reveal_type(D(1, "string")) # revealed: D[Literal[1], Literal["string"]]
|
reveal_type(D(1, "string")) # revealed: D[Literal[1], Literal["string"]]
|
||||||
|
|
@ -479,24 +558,42 @@ reveal_type(D(1, "string")) # revealed: D[Literal[1], Literal["string"]]
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class A[T]:
|
class A[T]:
|
||||||
x: T
|
x: T
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@A]
|
||||||
|
reveal_type(generic_context(A))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@A]
|
||||||
|
reveal_type(generic_context(into_callable(A)))
|
||||||
|
|
||||||
reveal_type(A(x=1)) # revealed: A[int]
|
reveal_type(A(x=1)) # revealed: A[int]
|
||||||
```
|
```
|
||||||
|
|
||||||
### Class typevar has another typevar as a default
|
### Class typevar has another typevar as a default
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
from ty_extensions import generic_context, into_callable
|
||||||
|
|
||||||
class C[T, U = T]: ...
|
class C[T, U = T]: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, U@C]
|
||||||
|
reveal_type(generic_context(C))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@C, U@C]
|
||||||
|
reveal_type(generic_context(into_callable(C)))
|
||||||
|
|
||||||
reveal_type(C()) # revealed: C[Unknown, Unknown]
|
reveal_type(C()) # revealed: C[Unknown, Unknown]
|
||||||
|
|
||||||
class D[T, U = T]:
|
class D[T, U = T]:
|
||||||
def __init__(self) -> None: ...
|
def __init__(self) -> None: ...
|
||||||
|
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(D))
|
||||||
|
# revealed: ty_extensions.GenericContext[T@D, U@D]
|
||||||
|
reveal_type(generic_context(into_callable(D)))
|
||||||
|
|
||||||
reveal_type(D()) # revealed: D[Unknown, Unknown]
|
reveal_type(D()) # revealed: D[Unknown, Unknown]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -77,44 +77,44 @@ IntOrTypeVar = int | T
|
||||||
TypeVarOrNone = T | None
|
TypeVarOrNone = T | None
|
||||||
NoneOrTypeVar = None | T
|
NoneOrTypeVar = None | T
|
||||||
|
|
||||||
reveal_type(IntOrStr) # revealed: types.UnionType
|
reveal_type(IntOrStr) # revealed: <types.UnionType special form 'int | str'>
|
||||||
reveal_type(IntOrStrOrBytes1) # revealed: types.UnionType
|
reveal_type(IntOrStrOrBytes1) # revealed: <types.UnionType special form 'int | str | bytes'>
|
||||||
reveal_type(IntOrStrOrBytes2) # revealed: types.UnionType
|
reveal_type(IntOrStrOrBytes2) # revealed: <types.UnionType special form 'int | str | bytes'>
|
||||||
reveal_type(IntOrStrOrBytes3) # revealed: types.UnionType
|
reveal_type(IntOrStrOrBytes3) # revealed: <types.UnionType special form 'int | str | bytes'>
|
||||||
reveal_type(IntOrStrOrBytes4) # revealed: types.UnionType
|
reveal_type(IntOrStrOrBytes4) # revealed: <types.UnionType special form 'int | str | bytes'>
|
||||||
reveal_type(IntOrStrOrBytes5) # revealed: types.UnionType
|
reveal_type(IntOrStrOrBytes5) # revealed: <types.UnionType special form 'int | str | bytes'>
|
||||||
reveal_type(IntOrStrOrBytes6) # revealed: types.UnionType
|
reveal_type(IntOrStrOrBytes6) # revealed: <types.UnionType special form 'int | str | bytes'>
|
||||||
reveal_type(BytesOrIntOrStr) # revealed: types.UnionType
|
reveal_type(BytesOrIntOrStr) # revealed: <types.UnionType special form 'bytes | int | str'>
|
||||||
reveal_type(IntOrNone) # revealed: types.UnionType
|
reveal_type(IntOrNone) # revealed: <types.UnionType special form 'int | None'>
|
||||||
reveal_type(NoneOrInt) # revealed: types.UnionType
|
reveal_type(NoneOrInt) # revealed: <types.UnionType special form 'None | int'>
|
||||||
reveal_type(IntOrStrOrNone) # revealed: types.UnionType
|
reveal_type(IntOrStrOrNone) # revealed: <types.UnionType special form 'int | str | None'>
|
||||||
reveal_type(NoneOrIntOrStr) # revealed: types.UnionType
|
reveal_type(NoneOrIntOrStr) # revealed: <types.UnionType special form 'None | int | str'>
|
||||||
reveal_type(IntOrAny) # revealed: types.UnionType
|
reveal_type(IntOrAny) # revealed: <types.UnionType special form 'int | Any'>
|
||||||
reveal_type(AnyOrInt) # revealed: types.UnionType
|
reveal_type(AnyOrInt) # revealed: <types.UnionType special form 'Any | int'>
|
||||||
reveal_type(NoneOrAny) # revealed: types.UnionType
|
reveal_type(NoneOrAny) # revealed: <types.UnionType special form 'None | Any'>
|
||||||
reveal_type(AnyOrNone) # revealed: types.UnionType
|
reveal_type(AnyOrNone) # revealed: <types.UnionType special form 'Any | None'>
|
||||||
reveal_type(NeverOrAny) # revealed: types.UnionType
|
reveal_type(NeverOrAny) # revealed: <types.UnionType special form 'Any'>
|
||||||
reveal_type(AnyOrNever) # revealed: types.UnionType
|
reveal_type(AnyOrNever) # revealed: <types.UnionType special form 'Any'>
|
||||||
reveal_type(UnknownOrInt) # revealed: types.UnionType
|
reveal_type(UnknownOrInt) # revealed: <types.UnionType special form 'Unknown | int'>
|
||||||
reveal_type(IntOrUnknown) # revealed: types.UnionType
|
reveal_type(IntOrUnknown) # revealed: <types.UnionType special form 'int | Unknown'>
|
||||||
reveal_type(StrOrZero) # revealed: types.UnionType
|
reveal_type(StrOrZero) # revealed: <types.UnionType special form 'str | Literal[0]'>
|
||||||
reveal_type(ZeroOrStr) # revealed: types.UnionType
|
reveal_type(ZeroOrStr) # revealed: <types.UnionType special form 'Literal[0] | str'>
|
||||||
reveal_type(IntOrLiteralString) # revealed: types.UnionType
|
reveal_type(IntOrLiteralString) # revealed: <types.UnionType special form 'int | LiteralString'>
|
||||||
reveal_type(LiteralStringOrInt) # revealed: types.UnionType
|
reveal_type(LiteralStringOrInt) # revealed: <types.UnionType special form 'LiteralString | int'>
|
||||||
reveal_type(NoneOrTuple) # revealed: types.UnionType
|
reveal_type(NoneOrTuple) # revealed: <types.UnionType special form 'None | tuple[int, str]'>
|
||||||
reveal_type(TupleOrNone) # revealed: types.UnionType
|
reveal_type(TupleOrNone) # revealed: <types.UnionType special form 'tuple[int, str] | None'>
|
||||||
reveal_type(IntOrAnnotated) # revealed: types.UnionType
|
reveal_type(IntOrAnnotated) # revealed: <types.UnionType special form 'int | str'>
|
||||||
reveal_type(AnnotatedOrInt) # revealed: types.UnionType
|
reveal_type(AnnotatedOrInt) # revealed: <types.UnionType special form 'str | int'>
|
||||||
reveal_type(IntOrOptional) # revealed: types.UnionType
|
reveal_type(IntOrOptional) # revealed: <types.UnionType special form 'int | str | None'>
|
||||||
reveal_type(OptionalOrInt) # revealed: types.UnionType
|
reveal_type(OptionalOrInt) # revealed: <types.UnionType special form 'str | None | int'>
|
||||||
reveal_type(IntOrTypeOfStr) # revealed: types.UnionType
|
reveal_type(IntOrTypeOfStr) # revealed: <types.UnionType special form 'int | type[str]'>
|
||||||
reveal_type(TypeOfStrOrInt) # revealed: types.UnionType
|
reveal_type(TypeOfStrOrInt) # revealed: <types.UnionType special form 'type[str] | int'>
|
||||||
reveal_type(IntOrCallable) # revealed: types.UnionType
|
reveal_type(IntOrCallable) # revealed: <types.UnionType special form 'int | ((str, /) -> bytes)'>
|
||||||
reveal_type(CallableOrInt) # revealed: types.UnionType
|
reveal_type(CallableOrInt) # revealed: <types.UnionType special form '((str, /) -> bytes) | int'>
|
||||||
reveal_type(TypeVarOrInt) # revealed: types.UnionType
|
reveal_type(TypeVarOrInt) # revealed: <types.UnionType special form 'T@TypeVarOrInt | int'>
|
||||||
reveal_type(IntOrTypeVar) # revealed: types.UnionType
|
reveal_type(IntOrTypeVar) # revealed: <types.UnionType special form 'int | T@IntOrTypeVar'>
|
||||||
reveal_type(TypeVarOrNone) # revealed: types.UnionType
|
reveal_type(TypeVarOrNone) # revealed: <types.UnionType special form 'T@TypeVarOrNone | None'>
|
||||||
reveal_type(NoneOrTypeVar) # revealed: types.UnionType
|
reveal_type(NoneOrTypeVar) # revealed: <types.UnionType special form 'None | T@NoneOrTypeVar'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
int_or_str: IntOrStr,
|
int_or_str: IntOrStr,
|
||||||
|
|
@ -295,7 +295,7 @@ X = Foo | Bar
|
||||||
# In an ideal world, perhaps we would respect `Meta.__or__` here and reveal `str`?
|
# In an ideal world, perhaps we would respect `Meta.__or__` here and reveal `str`?
|
||||||
# But we still need to record what the elements are, since (according to the typing spec)
|
# But we still need to record what the elements are, since (according to the typing spec)
|
||||||
# `X` is still a valid type alias
|
# `X` is still a valid type alias
|
||||||
reveal_type(X) # revealed: types.UnionType
|
reveal_type(X) # revealed: <types.UnionType special form 'Foo | Bar'>
|
||||||
|
|
||||||
def f(obj: X):
|
def f(obj: X):
|
||||||
reveal_type(obj) # revealed: Foo | Bar
|
reveal_type(obj) # revealed: Foo | Bar
|
||||||
|
|
@ -391,16 +391,17 @@ MyOptional = T | None
|
||||||
|
|
||||||
reveal_type(MyList) # revealed: <class 'list[T@MyList]'>
|
reveal_type(MyList) # revealed: <class 'list[T@MyList]'>
|
||||||
reveal_type(MyDict) # revealed: <class 'dict[T@MyDict, U@MyDict]'>
|
reveal_type(MyDict) # revealed: <class 'dict[T@MyDict, U@MyDict]'>
|
||||||
reveal_type(MyType) # revealed: GenericAlias
|
reveal_type(MyType) # revealed: <special form 'type[T@MyType]'>
|
||||||
reveal_type(IntAndType) # revealed: <class 'tuple[int, T@IntAndType]'>
|
reveal_type(IntAndType) # revealed: <class 'tuple[int, T@IntAndType]'>
|
||||||
reveal_type(Pair) # revealed: <class 'tuple[T@Pair, T@Pair]'>
|
reveal_type(Pair) # revealed: <class 'tuple[T@Pair, T@Pair]'>
|
||||||
reveal_type(Sum) # revealed: <class 'tuple[T@Sum, U@Sum]'>
|
reveal_type(Sum) # revealed: <class 'tuple[T@Sum, U@Sum]'>
|
||||||
reveal_type(ListOrTuple) # revealed: types.UnionType
|
reveal_type(ListOrTuple) # revealed: <types.UnionType special form 'list[T@ListOrTuple] | tuple[T@ListOrTuple, ...]'>
|
||||||
reveal_type(ListOrTupleLegacy) # revealed: types.UnionType
|
# revealed: <types.UnionType special form 'list[T@ListOrTupleLegacy] | tuple[T@ListOrTupleLegacy, ...]'>
|
||||||
|
reveal_type(ListOrTupleLegacy)
|
||||||
reveal_type(MyCallable) # revealed: @Todo(Callable[..] specialized with ParamSpec)
|
reveal_type(MyCallable) # revealed: @Todo(Callable[..] specialized with ParamSpec)
|
||||||
reveal_type(AnnotatedType) # revealed: <typing.Annotated special form>
|
reveal_type(AnnotatedType) # revealed: <special form 'typing.Annotated[T@AnnotatedType, <metadata>]'>
|
||||||
reveal_type(TransparentAlias) # revealed: typing.TypeVar
|
reveal_type(TransparentAlias) # revealed: typing.TypeVar
|
||||||
reveal_type(MyOptional) # revealed: types.UnionType
|
reveal_type(MyOptional) # revealed: <types.UnionType special form 'T@MyOptional | None'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
list_of_ints: MyList[int],
|
list_of_ints: MyList[int],
|
||||||
|
|
@ -456,12 +457,12 @@ AnnotatedInt = AnnotatedType[int]
|
||||||
SubclassOfInt = MyType[int]
|
SubclassOfInt = MyType[int]
|
||||||
CallableIntToStr = MyCallable[[int], str]
|
CallableIntToStr = MyCallable[[int], str]
|
||||||
|
|
||||||
reveal_type(IntsOrNone) # revealed: types.UnionType
|
reveal_type(IntsOrNone) # revealed: <types.UnionType special form 'list[int] | None'>
|
||||||
reveal_type(IntsOrStrs) # revealed: types.UnionType
|
reveal_type(IntsOrStrs) # revealed: <types.UnionType special form 'tuple[int, int] | tuple[str, str]'>
|
||||||
reveal_type(ListOfPairs) # revealed: <class 'list[tuple[str, str]]'>
|
reveal_type(ListOfPairs) # revealed: <class 'list[tuple[str, str]]'>
|
||||||
reveal_type(ListOrTupleOfInts) # revealed: types.UnionType
|
reveal_type(ListOrTupleOfInts) # revealed: <types.UnionType special form 'list[int] | tuple[int, ...]'>
|
||||||
reveal_type(AnnotatedInt) # revealed: <typing.Annotated special form>
|
reveal_type(AnnotatedInt) # revealed: <special form 'typing.Annotated[int, <metadata>]'>
|
||||||
reveal_type(SubclassOfInt) # revealed: GenericAlias
|
reveal_type(SubclassOfInt) # revealed: <special form 'type[int]'>
|
||||||
reveal_type(CallableIntToStr) # revealed: @Todo(Callable[..] specialized with ParamSpec)
|
reveal_type(CallableIntToStr) # revealed: @Todo(Callable[..] specialized with ParamSpec)
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
|
|
@ -495,8 +496,8 @@ MyOtherType = MyType[T]
|
||||||
TypeOrList = MyType[B] | MyList[B]
|
TypeOrList = MyType[B] | MyList[B]
|
||||||
|
|
||||||
reveal_type(MyOtherList) # revealed: <class 'list[T@MyOtherList]'>
|
reveal_type(MyOtherList) # revealed: <class 'list[T@MyOtherList]'>
|
||||||
reveal_type(MyOtherType) # revealed: GenericAlias
|
reveal_type(MyOtherType) # revealed: <special form 'type[T@MyOtherType]'>
|
||||||
reveal_type(TypeOrList) # revealed: types.UnionType
|
reveal_type(TypeOrList) # revealed: <types.UnionType special form 'type[B@TypeOrList] | list[B@TypeOrList]'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
list_of_ints: MyOtherList[int],
|
list_of_ints: MyOtherList[int],
|
||||||
|
|
@ -898,7 +899,7 @@ from typing import Optional
|
||||||
|
|
||||||
MyOptionalInt = Optional[int]
|
MyOptionalInt = Optional[int]
|
||||||
|
|
||||||
reveal_type(MyOptionalInt) # revealed: types.UnionType
|
reveal_type(MyOptionalInt) # revealed: <types.UnionType special form 'int | None'>
|
||||||
|
|
||||||
def _(optional_int: MyOptionalInt):
|
def _(optional_int: MyOptionalInt):
|
||||||
reveal_type(optional_int) # revealed: int | None
|
reveal_type(optional_int) # revealed: int | None
|
||||||
|
|
@ -931,9 +932,9 @@ MyLiteralString = LiteralString
|
||||||
MyNoReturn = NoReturn
|
MyNoReturn = NoReturn
|
||||||
MyNever = Never
|
MyNever = Never
|
||||||
|
|
||||||
reveal_type(MyLiteralString) # revealed: typing.LiteralString
|
reveal_type(MyLiteralString) # revealed: <special form 'typing.LiteralString'>
|
||||||
reveal_type(MyNoReturn) # revealed: typing.NoReturn
|
reveal_type(MyNoReturn) # revealed: <special form 'typing.NoReturn'>
|
||||||
reveal_type(MyNever) # revealed: typing.Never
|
reveal_type(MyNever) # revealed: <special form 'typing.Never'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
ls: MyLiteralString,
|
ls: MyLiteralString,
|
||||||
|
|
@ -986,8 +987,8 @@ from typing import Union
|
||||||
IntOrStr = Union[int, str]
|
IntOrStr = Union[int, str]
|
||||||
IntOrStrOrBytes = Union[int, Union[str, bytes]]
|
IntOrStrOrBytes = Union[int, Union[str, bytes]]
|
||||||
|
|
||||||
reveal_type(IntOrStr) # revealed: types.UnionType
|
reveal_type(IntOrStr) # revealed: <types.UnionType special form 'int | str'>
|
||||||
reveal_type(IntOrStrOrBytes) # revealed: types.UnionType
|
reveal_type(IntOrStrOrBytes) # revealed: <types.UnionType special form 'int | str | bytes'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
int_or_str: IntOrStr,
|
int_or_str: IntOrStr,
|
||||||
|
|
@ -1015,7 +1016,7 @@ An empty `typing.Union` leads to a `TypeError` at runtime, so we emit an error.
|
||||||
# error: [invalid-type-form] "`typing.Union` requires at least one type argument"
|
# error: [invalid-type-form] "`typing.Union` requires at least one type argument"
|
||||||
EmptyUnion = Union[()]
|
EmptyUnion = Union[()]
|
||||||
|
|
||||||
reveal_type(EmptyUnion) # revealed: types.UnionType
|
reveal_type(EmptyUnion) # revealed: <types.UnionType special form 'Never'>
|
||||||
|
|
||||||
def _(empty: EmptyUnion):
|
def _(empty: EmptyUnion):
|
||||||
reveal_type(empty) # revealed: Never
|
reveal_type(empty) # revealed: Never
|
||||||
|
|
@ -1060,14 +1061,14 @@ SubclassOfG = type[G]
|
||||||
SubclassOfGInt = type[G[int]]
|
SubclassOfGInt = type[G[int]]
|
||||||
SubclassOfP = type[P]
|
SubclassOfP = type[P]
|
||||||
|
|
||||||
reveal_type(SubclassOfA) # revealed: GenericAlias
|
reveal_type(SubclassOfA) # revealed: <special form 'type[A]'>
|
||||||
reveal_type(SubclassOfAny) # revealed: GenericAlias
|
reveal_type(SubclassOfAny) # revealed: <special form 'type[Any]'>
|
||||||
reveal_type(SubclassOfAOrB1) # revealed: GenericAlias
|
reveal_type(SubclassOfAOrB1) # revealed: <special form 'type[A | B]'>
|
||||||
reveal_type(SubclassOfAOrB2) # revealed: types.UnionType
|
reveal_type(SubclassOfAOrB2) # revealed: <types.UnionType special form 'type[A] | type[B]'>
|
||||||
reveal_type(SubclassOfAOrB3) # revealed: types.UnionType
|
reveal_type(SubclassOfAOrB3) # revealed: <types.UnionType special form 'type[A] | type[B]'>
|
||||||
reveal_type(SubclassOfG) # revealed: GenericAlias
|
reveal_type(SubclassOfG) # revealed: <special form 'type[G[Unknown]]'>
|
||||||
reveal_type(SubclassOfGInt) # revealed: GenericAlias
|
reveal_type(SubclassOfGInt) # revealed: <special form 'type[G[int]]'>
|
||||||
reveal_type(SubclassOfP) # revealed: GenericAlias
|
reveal_type(SubclassOfP) # revealed: <special form 'type[P]'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
subclass_of_a: SubclassOfA,
|
subclass_of_a: SubclassOfA,
|
||||||
|
|
@ -1148,14 +1149,14 @@ SubclassOfG = Type[G]
|
||||||
SubclassOfGInt = Type[G[int]]
|
SubclassOfGInt = Type[G[int]]
|
||||||
SubclassOfP = Type[P]
|
SubclassOfP = Type[P]
|
||||||
|
|
||||||
reveal_type(SubclassOfA) # revealed: GenericAlias
|
reveal_type(SubclassOfA) # revealed: <special form 'type[A]'>
|
||||||
reveal_type(SubclassOfAny) # revealed: GenericAlias
|
reveal_type(SubclassOfAny) # revealed: <special form 'type[Any]'>
|
||||||
reveal_type(SubclassOfAOrB1) # revealed: GenericAlias
|
reveal_type(SubclassOfAOrB1) # revealed: <special form 'type[A | B]'>
|
||||||
reveal_type(SubclassOfAOrB2) # revealed: types.UnionType
|
reveal_type(SubclassOfAOrB2) # revealed: <types.UnionType special form 'type[A] | type[B]'>
|
||||||
reveal_type(SubclassOfAOrB3) # revealed: types.UnionType
|
reveal_type(SubclassOfAOrB3) # revealed: <types.UnionType special form 'type[A] | type[B]'>
|
||||||
reveal_type(SubclassOfG) # revealed: GenericAlias
|
reveal_type(SubclassOfG) # revealed: <special form 'type[G[Unknown]]'>
|
||||||
reveal_type(SubclassOfGInt) # revealed: GenericAlias
|
reveal_type(SubclassOfGInt) # revealed: <special form 'type[G[int]]'>
|
||||||
reveal_type(SubclassOfP) # revealed: GenericAlias
|
reveal_type(SubclassOfP) # revealed: <special form 'type[P]'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
subclass_of_a: SubclassOfA,
|
subclass_of_a: SubclassOfA,
|
||||||
|
|
@ -1270,25 +1271,25 @@ DefaultDictOrNone = DefaultDict[str, int] | None
|
||||||
DequeOrNone = Deque[str] | None
|
DequeOrNone = Deque[str] | None
|
||||||
OrderedDictOrNone = OrderedDict[str, int] | None
|
OrderedDictOrNone = OrderedDict[str, int] | None
|
||||||
|
|
||||||
reveal_type(NoneOrList) # revealed: types.UnionType
|
reveal_type(NoneOrList) # revealed: <types.UnionType special form 'None | list[str]'>
|
||||||
reveal_type(NoneOrSet) # revealed: types.UnionType
|
reveal_type(NoneOrSet) # revealed: <types.UnionType special form 'None | set[str]'>
|
||||||
reveal_type(NoneOrDict) # revealed: types.UnionType
|
reveal_type(NoneOrDict) # revealed: <types.UnionType special form 'None | dict[str, int]'>
|
||||||
reveal_type(NoneOrFrozenSet) # revealed: types.UnionType
|
reveal_type(NoneOrFrozenSet) # revealed: <types.UnionType special form 'None | frozenset[str]'>
|
||||||
reveal_type(NoneOrChainMap) # revealed: types.UnionType
|
reveal_type(NoneOrChainMap) # revealed: <types.UnionType special form 'None | ChainMap[str, int]'>
|
||||||
reveal_type(NoneOrCounter) # revealed: types.UnionType
|
reveal_type(NoneOrCounter) # revealed: <types.UnionType special form 'None | Counter[str]'>
|
||||||
reveal_type(NoneOrDefaultDict) # revealed: types.UnionType
|
reveal_type(NoneOrDefaultDict) # revealed: <types.UnionType special form 'None | defaultdict[str, int]'>
|
||||||
reveal_type(NoneOrDeque) # revealed: types.UnionType
|
reveal_type(NoneOrDeque) # revealed: <types.UnionType special form 'None | deque[str]'>
|
||||||
reveal_type(NoneOrOrderedDict) # revealed: types.UnionType
|
reveal_type(NoneOrOrderedDict) # revealed: <types.UnionType special form 'None | OrderedDict[str, int]'>
|
||||||
|
|
||||||
reveal_type(ListOrNone) # revealed: types.UnionType
|
reveal_type(ListOrNone) # revealed: <types.UnionType special form 'list[int] | None'>
|
||||||
reveal_type(SetOrNone) # revealed: types.UnionType
|
reveal_type(SetOrNone) # revealed: <types.UnionType special form 'set[int] | None'>
|
||||||
reveal_type(DictOrNone) # revealed: types.UnionType
|
reveal_type(DictOrNone) # revealed: <types.UnionType special form 'dict[str, int] | None'>
|
||||||
reveal_type(FrozenSetOrNone) # revealed: types.UnionType
|
reveal_type(FrozenSetOrNone) # revealed: <types.UnionType special form 'frozenset[int] | None'>
|
||||||
reveal_type(ChainMapOrNone) # revealed: types.UnionType
|
reveal_type(ChainMapOrNone) # revealed: <types.UnionType special form 'ChainMap[str, int] | None'>
|
||||||
reveal_type(CounterOrNone) # revealed: types.UnionType
|
reveal_type(CounterOrNone) # revealed: <types.UnionType special form 'Counter[str] | None'>
|
||||||
reveal_type(DefaultDictOrNone) # revealed: types.UnionType
|
reveal_type(DefaultDictOrNone) # revealed: <types.UnionType special form 'defaultdict[str, int] | None'>
|
||||||
reveal_type(DequeOrNone) # revealed: types.UnionType
|
reveal_type(DequeOrNone) # revealed: <types.UnionType special form 'deque[str] | None'>
|
||||||
reveal_type(OrderedDictOrNone) # revealed: types.UnionType
|
reveal_type(OrderedDictOrNone) # revealed: <types.UnionType special form 'OrderedDict[str, int] | None'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
none_or_list: NoneOrList,
|
none_or_list: NoneOrList,
|
||||||
|
|
@ -1381,9 +1382,9 @@ CallableNoArgs = Callable[[], None]
|
||||||
BasicCallable = Callable[[int, str], bytes]
|
BasicCallable = Callable[[int, str], bytes]
|
||||||
GradualCallable = Callable[..., str]
|
GradualCallable = Callable[..., str]
|
||||||
|
|
||||||
reveal_type(CallableNoArgs) # revealed: GenericAlias
|
reveal_type(CallableNoArgs) # revealed: <typing.Callable special form '() -> None'>
|
||||||
reveal_type(BasicCallable) # revealed: GenericAlias
|
reveal_type(BasicCallable) # revealed: <typing.Callable special form '(int, str, /) -> bytes'>
|
||||||
reveal_type(GradualCallable) # revealed: GenericAlias
|
reveal_type(GradualCallable) # revealed: <typing.Callable special form '(...) -> str'>
|
||||||
|
|
||||||
def _(
|
def _(
|
||||||
callable_no_args: CallableNoArgs,
|
callable_no_args: CallableNoArgs,
|
||||||
|
|
@ -1415,8 +1416,8 @@ InvalidCallable1 = Callable[[int]]
|
||||||
# error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
|
# error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
|
||||||
InvalidCallable2 = Callable[int, str]
|
InvalidCallable2 = Callable[int, str]
|
||||||
|
|
||||||
reveal_type(InvalidCallable1) # revealed: GenericAlias
|
reveal_type(InvalidCallable1) # revealed: <typing.Callable special form '(...) -> Unknown'>
|
||||||
reveal_type(InvalidCallable2) # revealed: GenericAlias
|
reveal_type(InvalidCallable2) # revealed: <typing.Callable special form '(...) -> Unknown'>
|
||||||
|
|
||||||
def _(invalid_callable1: InvalidCallable1, invalid_callable2: InvalidCallable2):
|
def _(invalid_callable1: InvalidCallable1, invalid_callable2: InvalidCallable2):
|
||||||
reveal_type(invalid_callable1) # revealed: (...) -> Unknown
|
reveal_type(invalid_callable1) # revealed: (...) -> Unknown
|
||||||
|
|
|
||||||
|
|
@ -53,8 +53,8 @@ in `import os.path as os.path` the `os.path` is not a valid identifier.
|
||||||
```py
|
```py
|
||||||
from b import Any, Literal, foo
|
from b import Any, Literal, foo
|
||||||
|
|
||||||
reveal_type(Any) # revealed: typing.Any
|
reveal_type(Any) # revealed: <special form 'typing.Any'>
|
||||||
reveal_type(Literal) # revealed: typing.Literal
|
reveal_type(Literal) # revealed: <special form 'typing.Literal'>
|
||||||
reveal_type(foo) # revealed: <module 'foo'>
|
reveal_type(foo) # revealed: <module 'foo'>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -132,7 +132,7 @@ reveal_type(Any) # revealed: Unknown
|
||||||
```pyi
|
```pyi
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
reveal_type(Any) # revealed: typing.Any
|
reveal_type(Any) # revealed: <special form 'typing.Any'>
|
||||||
```
|
```
|
||||||
|
|
||||||
## Nested mixed re-export and not
|
## Nested mixed re-export and not
|
||||||
|
|
@ -169,7 +169,7 @@ reveal_type(Any) # revealed: Unknown
|
||||||
```pyi
|
```pyi
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
reveal_type(Any) # revealed: typing.Any
|
reveal_type(Any) # revealed: <special form 'typing.Any'>
|
||||||
```
|
```
|
||||||
|
|
||||||
## Exported as different name
|
## Exported as different name
|
||||||
|
|
|
||||||
|
|
@ -22,10 +22,10 @@ python = "/.venv"
|
||||||
`/.venv/pyvenv.cfg`:
|
`/.venv/pyvenv.cfg`:
|
||||||
|
|
||||||
```cfg
|
```cfg
|
||||||
home = /doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin
|
home = /do/re/mi//cpython-3.13.2-macos-aarch64-none/bin
|
||||||
```
|
```
|
||||||
|
|
||||||
`/doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin/python`:
|
`/do/re/mi//cpython-3.13.2-macos-aarch64-none/bin/python`:
|
||||||
|
|
||||||
```text
|
```text
|
||||||
```
|
```
|
||||||
|
|
@ -54,11 +54,11 @@ python = "/.venv"
|
||||||
`/.venv/pyvenv.cfg`:
|
`/.venv/pyvenv.cfg`:
|
||||||
|
|
||||||
```cfg
|
```cfg
|
||||||
home = /doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin
|
home = /do/re/mi//cpython-3.13.2-macos-aarch64-none/bin
|
||||||
version = wut
|
version = wut
|
||||||
```
|
```
|
||||||
|
|
||||||
`/doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin/python`:
|
`/do/re/mi//cpython-3.13.2-macos-aarch64-none/bin/python`:
|
||||||
|
|
||||||
```text
|
```text
|
||||||
```
|
```
|
||||||
|
|
@ -87,11 +87,11 @@ python = "/.venv"
|
||||||
`/.venv/pyvenv.cfg`:
|
`/.venv/pyvenv.cfg`:
|
||||||
|
|
||||||
```cfg
|
```cfg
|
||||||
home = /doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin
|
home = /do/re/mi//cpython-3.13.2-macos-aarch64-none/bin
|
||||||
version_info = no-really-wut
|
version_info = no-really-wut
|
||||||
```
|
```
|
||||||
|
|
||||||
`/doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin/python`:
|
`/do/re/mi//cpython-3.13.2-macos-aarch64-none/bin/python`:
|
||||||
|
|
||||||
```text
|
```text
|
||||||
```
|
```
|
||||||
|
|
@ -132,7 +132,7 @@ python = "/.venv"
|
||||||
`/.venv/pyvenv.cfg`:
|
`/.venv/pyvenv.cfg`:
|
||||||
|
|
||||||
```cfg
|
```cfg
|
||||||
home = /doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin
|
home = /do/re/mi//cpython-3.13.2-macos-aarch64-none/bin
|
||||||
implementation = CPython
|
implementation = CPython
|
||||||
uv = 0.7.6
|
uv = 0.7.6
|
||||||
version_info = 3.13.2
|
version_info = 3.13.2
|
||||||
|
|
@ -141,7 +141,7 @@ prompt = ruff
|
||||||
extends-environment = /.other-environment
|
extends-environment = /.other-environment
|
||||||
```
|
```
|
||||||
|
|
||||||
`/doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin/python`:
|
`/do/re/mi//cpython-3.13.2-macos-aarch64-none/bin/python`:
|
||||||
|
|
||||||
```text
|
```text
|
||||||
```
|
```
|
||||||
|
|
@ -182,12 +182,12 @@ python = "/.venv"
|
||||||
`/.venv/pyvenv.cfg`:
|
`/.venv/pyvenv.cfg`:
|
||||||
|
|
||||||
```cfg
|
```cfg
|
||||||
home = /doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin
|
home = /do/re/mi//cpython-3.13.2-macos-aarch64-none/bin
|
||||||
version_info = 3.13
|
version_info = 3.13
|
||||||
command = /.pyenv/versions/3.13.3/bin/python3.13 -m venv --without-pip --prompt="python-default/3.13.3" /somewhere-else/python/virtualenvs/python-default/3.13.3
|
command = /.pyenv/versions/3.13.3/bin/python3.13 -m venv --without-pip --prompt="python-default/3.13.3" /somewhere-else/python/virtualenvs/python-default/3.13.3
|
||||||
```
|
```
|
||||||
|
|
||||||
`/doo/doo/wop/cpython-3.13.2-macos-aarch64-none/bin/python`:
|
`/do/re/mi//cpython-3.13.2-macos-aarch64-none/bin/python`:
|
||||||
|
|
||||||
```text
|
```text
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -1336,6 +1336,69 @@ reveal_type(g) # revealed: Unknown
|
||||||
reveal_type(h) # revealed: Unknown
|
reveal_type(h) # revealed: Unknown
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Star-imports can affect member states
|
||||||
|
|
||||||
|
If a star-import pulls in a symbol that was previously defined in the importing module (e.g. `obj`),
|
||||||
|
it can affect the state of associated member expressions (e.g. `obj.attr` or `obj[0]`). In the test
|
||||||
|
below, note how the types of the corresponding attribute expressions change after the star import
|
||||||
|
affects the object:
|
||||||
|
|
||||||
|
`common.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
class C:
|
||||||
|
attr: int | None
|
||||||
|
```
|
||||||
|
|
||||||
|
`exporter.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
from common import C
|
||||||
|
|
||||||
|
def flag() -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
should_be_imported: C = C()
|
||||||
|
|
||||||
|
if flag():
|
||||||
|
might_be_imported: C = C()
|
||||||
|
|
||||||
|
if False:
|
||||||
|
should_not_be_imported: C = C()
|
||||||
|
```
|
||||||
|
|
||||||
|
`main.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
from common import C
|
||||||
|
|
||||||
|
should_be_imported = C()
|
||||||
|
might_be_imported = C()
|
||||||
|
should_not_be_imported = C()
|
||||||
|
|
||||||
|
# We start with the plain attribute types:
|
||||||
|
reveal_type(should_be_imported.attr) # revealed: int | None
|
||||||
|
reveal_type(might_be_imported.attr) # revealed: int | None
|
||||||
|
reveal_type(should_not_be_imported.attr) # revealed: int | None
|
||||||
|
|
||||||
|
# Now we narrow the types by assignment:
|
||||||
|
should_be_imported.attr = 1
|
||||||
|
might_be_imported.attr = 1
|
||||||
|
should_not_be_imported.attr = 1
|
||||||
|
|
||||||
|
reveal_type(should_be_imported.attr) # revealed: Literal[1]
|
||||||
|
reveal_type(might_be_imported.attr) # revealed: Literal[1]
|
||||||
|
reveal_type(should_not_be_imported.attr) # revealed: Literal[1]
|
||||||
|
|
||||||
|
# This star import adds bindings for `should_be_imported` and `might_be_imported`:
|
||||||
|
from exporter import *
|
||||||
|
|
||||||
|
# As expected, narrowing is "reset" for the first two variables, but not for the third:
|
||||||
|
reveal_type(should_be_imported.attr) # revealed: int | None
|
||||||
|
reveal_type(might_be_imported.attr) # revealed: int | None
|
||||||
|
reveal_type(should_not_be_imported.attr) # revealed: Literal[1]
|
||||||
|
```
|
||||||
|
|
||||||
## Cyclic star imports
|
## Cyclic star imports
|
||||||
|
|
||||||
Believe it or not, this code does *not* raise an exception at runtime!
|
Believe it or not, this code does *not* raise an exception at runtime!
|
||||||
|
|
@ -1374,7 +1437,7 @@ are present due to `*` imports.
|
||||||
import collections.abc
|
import collections.abc
|
||||||
|
|
||||||
reveal_type(collections.abc.Sequence) # revealed: <class 'Sequence'>
|
reveal_type(collections.abc.Sequence) # revealed: <class 'Sequence'>
|
||||||
reveal_type(collections.abc.Callable) # revealed: typing.Callable
|
reveal_type(collections.abc.Callable) # revealed: <special form 'typing.Callable'>
|
||||||
reveal_type(collections.abc.Set) # revealed: <class 'AbstractSet'>
|
reveal_type(collections.abc.Set) # revealed: <class 'AbstractSet'>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,15 @@ python file in some random workspace, and so we need to be more tolerant of situ
|
||||||
fly in a published package, cases where we're not configured as well as we'd like, or cases where
|
fly in a published package, cases where we're not configured as well as we'd like, or cases where
|
||||||
two projects in a monorepo have conflicting definitions (but we want to analyze both at once).
|
two projects in a monorepo have conflicting definitions (but we want to analyze both at once).
|
||||||
|
|
||||||
|
In practice these tests cover what we call "desperate module resolution" which, when an import
|
||||||
|
fails, results in us walking up the ancestor directories of the importing file and trying those as
|
||||||
|
"desperate search-paths".
|
||||||
|
|
||||||
|
Currently desperate search-paths are restricted to subdirectories of the first-party search-path
|
||||||
|
(the directory you're running `ty` in). Currently we only consider one desperate search-path: the
|
||||||
|
closest ancestor directory containing a `pyproject.toml`. In the future we may want to try every
|
||||||
|
ancestor `pyproject.toml` or every ancestor directory.
|
||||||
|
|
||||||
## Invalid Names
|
## Invalid Names
|
||||||
|
|
||||||
While you can't syntactically refer to a module with an invalid name (i.e. one with a `-`, or that
|
While you can't syntactically refer to a module with an invalid name (i.e. one with a `-`, or that
|
||||||
|
|
@ -18,9 +27,10 @@ strings and does in fact allow syntactically invalid module names.
|
||||||
|
|
||||||
### Current File Is Invalid Module Name
|
### Current File Is Invalid Module Name
|
||||||
|
|
||||||
Relative and absolute imports should resolve fine in a file that isn't a valid module name.
|
Relative and absolute imports should resolve fine in a file that isn't a valid module name (in this
|
||||||
|
case, it could be imported via `importlib.import_module`).
|
||||||
|
|
||||||
`my-main.py`:
|
`tests/my-mod.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file
|
# TODO: there should be no errors in this file
|
||||||
|
|
@ -37,13 +47,13 @@ reveal_type(mod2.y) # revealed: Unknown
|
||||||
reveal_type(mod3.z) # revealed: int
|
reveal_type(mod3.z) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
`mod1.py`:
|
`tests/mod1.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
x: int = 1
|
x: int = 1
|
||||||
```
|
```
|
||||||
|
|
||||||
`mod2.py`:
|
`tests/mod2.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
y: int = 2
|
y: int = 2
|
||||||
|
|
@ -57,13 +67,16 @@ z: int = 2
|
||||||
|
|
||||||
### Current Directory Is Invalid Module Name
|
### Current Directory Is Invalid Module Name
|
||||||
|
|
||||||
Relative and absolute imports should resolve fine in a dir that isn't a valid module name.
|
If python files are rooted in a directory with an invalid module name and they relatively import
|
||||||
|
each other, there's probably no coherent explanation for what's going on and it's fine that the
|
||||||
|
relative import don't resolve (but maybe we could provide some good diagnostics).
|
||||||
|
|
||||||
`my-tests/main.py`:
|
This is a case that sufficient desperation might "accidentally" make work, so it's included here as
|
||||||
|
a canary in the coal mine.
|
||||||
|
|
||||||
|
`my-tests/mymod.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
# error: [unresolved-import]
|
||||||
from .mod1 import x
|
from .mod1 import x
|
||||||
|
|
||||||
|
|
@ -94,46 +107,97 @@ y: int = 2
|
||||||
z: int = 2
|
z: int = 2
|
||||||
```
|
```
|
||||||
|
|
||||||
### Current Directory Is Invalid Package Name
|
### Ancestor Directory Is Invalid Module Name
|
||||||
|
|
||||||
Relative and absolute imports should resolve fine in a dir that isn't a valid package name, even if
|
Relative and absolute imports *could* resolve fine in the first-party search-path, even if one of
|
||||||
it contains an `__init__.py`:
|
the ancestor dirs is an invalid module. i.e. in this case we will be inclined to compute module
|
||||||
|
names like `my-proj.tests.mymod`, but it could be that in practice the user always runs this code
|
||||||
|
rooted in the `my-proj` directory.
|
||||||
|
|
||||||
`my-tests/__init__.py`:
|
This case is hard for us to detect and handle in a principled way, but two more extreme kinds of
|
||||||
|
desperation could handle this:
|
||||||
|
|
||||||
|
- try every ancestor as a desperate search-path
|
||||||
|
- try the closest ancestor with an invalid module name as a desperate search-path
|
||||||
|
|
||||||
|
The second one is a bit messed up because it could result in situations where someone can get a
|
||||||
|
worse experience because a directory happened to *not* be invalid as a module name (`myproj` or
|
||||||
|
`my_proj`).
|
||||||
|
|
||||||
|
`my-proj/tests/mymod.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
```
|
# TODO: it would be *nice* if there were no errors in this file
|
||||||
|
|
||||||
`my-tests/main.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
# TODO: there should be no errors in this file
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
# error: [unresolved-import]
|
||||||
from .mod1 import x
|
from .mod1 import x
|
||||||
|
|
||||||
# error: [unresolved-import]
|
# error: [unresolved-import]
|
||||||
from . import mod2
|
from . import mod2
|
||||||
|
|
||||||
|
# error: [unresolved-import]
|
||||||
import mod3
|
import mod3
|
||||||
|
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: Unknown
|
||||||
reveal_type(mod2.y) # revealed: Unknown
|
reveal_type(mod2.y) # revealed: Unknown
|
||||||
reveal_type(mod3.z) # revealed: int
|
reveal_type(mod3.z) # revealed: Unknown
|
||||||
```
|
```
|
||||||
|
|
||||||
`my-tests/mod1.py`:
|
`my-proj/tests/mod1.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
x: int = 1
|
x: int = 1
|
||||||
```
|
```
|
||||||
|
|
||||||
`my-tests/mod2.py`:
|
`my-proj/tests/mod2.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
y: int = 2
|
y: int = 2
|
||||||
```
|
```
|
||||||
|
|
||||||
`mod3.py`:
|
`my-proj/mod3.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
z: int = 2
|
||||||
|
```
|
||||||
|
|
||||||
|
### Ancestor Directory Above `pyproject.toml` is invalid
|
||||||
|
|
||||||
|
Like the previous tests but with a `pyproject.toml` existing between the invalid name and the python
|
||||||
|
files. This is an "easier" case in case we use the `pyproject.toml` as a hint about what's going on.
|
||||||
|
|
||||||
|
`my-proj/pyproject.toml`:
|
||||||
|
|
||||||
|
```text
|
||||||
|
name = "my_proj"
|
||||||
|
version = "0.1.0"
|
||||||
|
```
|
||||||
|
|
||||||
|
`my-proj/tests/main.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
from .mod1 import x
|
||||||
|
from . import mod2
|
||||||
|
import mod3
|
||||||
|
|
||||||
|
reveal_type(x) # revealed: int
|
||||||
|
reveal_type(mod2.y) # revealed: int
|
||||||
|
reveal_type(mod3.z) # revealed: int
|
||||||
|
```
|
||||||
|
|
||||||
|
`my-proj/tests/mod1.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
x: int = 1
|
||||||
|
```
|
||||||
|
|
||||||
|
`my-proj/tests/mod2.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
y: int = 2
|
||||||
|
```
|
||||||
|
|
||||||
|
`my-proj/mod3.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
z: int = 2
|
z: int = 2
|
||||||
|
|
@ -141,7 +205,7 @@ z: int = 2
|
||||||
|
|
||||||
## Multiple Projects
|
## Multiple Projects
|
||||||
|
|
||||||
It's common for a monorepo to define many separate projects that may or may not depend on eachother
|
It's common for a monorepo to define many separate projects that may or may not depend on each other
|
||||||
and are stitched together with a package manager like `uv` or `poetry`, often as editables. In this
|
and are stitched together with a package manager like `uv` or `poetry`, often as editables. In this
|
||||||
case, especially when running as an LSP, we want to be able to analyze all of the projects at once,
|
case, especially when running as an LSP, we want to be able to analyze all of the projects at once,
|
||||||
allowing us to reuse results between projects, without getting confused about things that only make
|
allowing us to reuse results between projects, without getting confused about things that only make
|
||||||
|
|
@ -150,7 +214,7 @@ sense when analyzing the project separately.
|
||||||
The following tests will feature two projects, `a` and `b` where the "real" packages are found under
|
The following tests will feature two projects, `a` and `b` where the "real" packages are found under
|
||||||
`src/` subdirectories (and we've been configured to understand that), but each project also contains
|
`src/` subdirectories (and we've been configured to understand that), but each project also contains
|
||||||
other python files in their roots or subdirectories that contains python files which relatively
|
other python files in their roots or subdirectories that contains python files which relatively
|
||||||
import eachother and also absolutely import the main package of the project. All of these imports
|
import each other and also absolutely import the main package of the project. All of these imports
|
||||||
*should* resolve.
|
*should* resolve.
|
||||||
|
|
||||||
Often the fact that there is both an `a` and `b` project seemingly won't matter, but many possible
|
Often the fact that there is both an `a` and `b` project seemingly won't matter, but many possible
|
||||||
|
|
@ -164,13 +228,36 @@ following examples include them in case they help.
|
||||||
|
|
||||||
Here we have fairly typical situation where there are two projects `aproj` and `bproj` where the
|
Here we have fairly typical situation where there are two projects `aproj` and `bproj` where the
|
||||||
"real" packages are found under `src/` subdirectories, but each project also contains a `tests/`
|
"real" packages are found under `src/` subdirectories, but each project also contains a `tests/`
|
||||||
directory that contains python files which relatively import eachother and also absolutely import
|
directory that contains python files which relatively import each other and also absolutely import
|
||||||
the package they test. All of these imports *should* resolve.
|
the package they test. All of these imports *should* resolve.
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
[environment]
|
[environment]
|
||||||
# This is similar to what we would compute for installed editables
|
# Setup a venv with editables for aproj/src/ and bproj/src/
|
||||||
extra-paths = ["aproj/src/", "bproj/src/"]
|
python = "/.venv"
|
||||||
|
```
|
||||||
|
|
||||||
|
`/.venv/pyvenv.cfg`:
|
||||||
|
|
||||||
|
```cfg
|
||||||
|
home = /do/re/mi//cpython-3.13.2-macos-aarch64-none/bin
|
||||||
|
```
|
||||||
|
|
||||||
|
`/do/re/mi//cpython-3.13.2-macos-aarch64-none/bin/python`:
|
||||||
|
|
||||||
|
```text
|
||||||
|
```
|
||||||
|
|
||||||
|
`/.venv/<path-to-site-packages>/a.pth`:
|
||||||
|
|
||||||
|
```pth
|
||||||
|
aproj/src/
|
||||||
|
```
|
||||||
|
|
||||||
|
`/.venv/<path-to-site-packages>/b.pth`:
|
||||||
|
|
||||||
|
```pth
|
||||||
|
bproj/src/
|
||||||
```
|
```
|
||||||
|
|
||||||
`aproj/tests/test1.py`:
|
`aproj/tests/test1.py`:
|
||||||
|
|
@ -239,16 +326,60 @@ version = "0.1.0"
|
||||||
y: str = "20"
|
y: str = "20"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Tests Directory With Ambiguous Project Directories
|
### Tests Directory With Ambiguous Project Directories Via Editables
|
||||||
|
|
||||||
The same situation as the previous test but instead of the project `a` being in a directory `aproj`
|
The same situation as the previous test but instead of the project `a` being in a directory `aproj`
|
||||||
to disambiguate, we now need to avoid getting confused about whether `a/` or `a/src/a/` is the
|
to disambiguate, we now need to avoid getting confused about whether `a/` or `a/src/a/` is the
|
||||||
package `a` while still resolving imports.
|
package `a` while still resolving imports.
|
||||||
|
|
||||||
|
Unfortunately this is a quite difficult square to circle as `a/` is a namespace package of `a` and
|
||||||
|
`a/src/a/` is a regular package of `a`. **This is a very bad situation you're not supposed to ever
|
||||||
|
create, and we are now very sensitive to precise search-path ordering.**
|
||||||
|
|
||||||
|
Here the use of editables means that `a/` has higher priority than `a/src/a/`.
|
||||||
|
|
||||||
|
Somehow this results in `a/tests/test1.py` being able to resolve `.setup` but not `.`.
|
||||||
|
|
||||||
|
My best guess is that in this state we can resolve regular modules in `a/tests/` but not namespace
|
||||||
|
packages because we have some extra validation for namespace packages conflicted by regular
|
||||||
|
packages, but that validation isn't applied when we successfully resolve a submodule of the
|
||||||
|
namespace package.
|
||||||
|
|
||||||
|
In this case, as we find that `a/tests/test1.py` matches on the first-party path as `a.tests.test1`
|
||||||
|
and is syntactically valid. We then resolve `a.tests.test1` and because the namespace package
|
||||||
|
(`/a/`) comes first we succeed. We then syntactically compute `.` to be `a.tests`.
|
||||||
|
|
||||||
|
When we go to lookup `a.tests.setup`, whatever grace that allowed `a.tests.test1` to resolve still
|
||||||
|
works so it resolves too. However when we try to resolve `a.tests` on its own some additional
|
||||||
|
validation rejects the namespace package conflicting with the regular package.
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
[environment]
|
[environment]
|
||||||
# This is similar to what we would compute for installed editables
|
# Setup a venv with editables for a/src/ and b/src/
|
||||||
extra-paths = ["a/src/", "b/src/"]
|
python = "/.venv"
|
||||||
|
```
|
||||||
|
|
||||||
|
`/.venv/pyvenv.cfg`:
|
||||||
|
|
||||||
|
```cfg
|
||||||
|
home = /do/re/mi//cpython-3.13.2-macos-aarch64-none/bin
|
||||||
|
```
|
||||||
|
|
||||||
|
`/do/re/mi//cpython-3.13.2-macos-aarch64-none/bin/python`:
|
||||||
|
|
||||||
|
```text
|
||||||
|
```
|
||||||
|
|
||||||
|
`/.venv/<path-to-site-packages>/a.pth`:
|
||||||
|
|
||||||
|
```pth
|
||||||
|
a/src/
|
||||||
|
```
|
||||||
|
|
||||||
|
`/.venv/<path-to-site-packages>/b.pth`:
|
||||||
|
|
||||||
|
```pth
|
||||||
|
b/src/
|
||||||
```
|
```
|
||||||
|
|
||||||
`a/tests/test1.py`:
|
`a/tests/test1.py`:
|
||||||
|
|
@ -256,7 +387,6 @@ extra-paths = ["a/src/", "b/src/"]
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file.
|
# TODO: there should be no errors in this file.
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from .setup import x
|
from .setup import x
|
||||||
|
|
||||||
# error: [unresolved-import]
|
# error: [unresolved-import]
|
||||||
|
|
@ -264,7 +394,7 @@ from . import setup
|
||||||
from a import y
|
from a import y
|
||||||
import a
|
import a
|
||||||
|
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: int
|
||||||
reveal_type(setup.x) # revealed: Unknown
|
reveal_type(setup.x) # revealed: Unknown
|
||||||
reveal_type(y) # revealed: int
|
reveal_type(y) # revealed: int
|
||||||
reveal_type(a.y) # revealed: int
|
reveal_type(a.y) # revealed: int
|
||||||
|
|
@ -294,7 +424,6 @@ y: int = 10
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file
|
# TODO: there should be no errors in this file
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from .setup import x
|
from .setup import x
|
||||||
|
|
||||||
# error: [unresolved-import]
|
# error: [unresolved-import]
|
||||||
|
|
@ -302,7 +431,7 @@ from . import setup
|
||||||
from b import y
|
from b import y
|
||||||
import b
|
import b
|
||||||
|
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: str
|
||||||
reveal_type(setup.x) # revealed: Unknown
|
reveal_type(setup.x) # revealed: Unknown
|
||||||
reveal_type(y) # revealed: str
|
reveal_type(y) # revealed: str
|
||||||
reveal_type(b.y) # revealed: str
|
reveal_type(b.y) # revealed: str
|
||||||
|
|
@ -327,10 +456,15 @@ version = "0.1.0"
|
||||||
y: str = "20"
|
y: str = "20"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Tests Package With Ambiguous Project Directories
|
### Tests Directory With Ambiguous Project Directories Via `extra-paths`
|
||||||
|
|
||||||
The same situation as the previous test but `tests/__init__.py` is also defined, in case that
|
The same situation as the previous test but instead of using editables we use `extra-paths` which
|
||||||
complicates the situation.
|
have higher priority than the first-party search-path. Thus, `/a/src/a/` is always seen before
|
||||||
|
`/a/`.
|
||||||
|
|
||||||
|
In this case everything works well because the namespace package `a.tests` (`a/tests/`) is
|
||||||
|
completely hidden by the regular package `a` (`a/src/a/`) and so we immediately enter desperate
|
||||||
|
resolution and use the now-unambiguous namespace package `tests`.
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
[environment]
|
[environment]
|
||||||
|
|
@ -340,27 +474,17 @@ extra-paths = ["a/src/", "b/src/"]
|
||||||
`a/tests/test1.py`:
|
`a/tests/test1.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file.
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from .setup import x
|
from .setup import x
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from . import setup
|
from . import setup
|
||||||
from a import y
|
from a import y
|
||||||
import a
|
import a
|
||||||
|
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: int
|
||||||
reveal_type(setup.x) # revealed: Unknown
|
reveal_type(setup.x) # revealed: int
|
||||||
reveal_type(y) # revealed: int
|
reveal_type(y) # revealed: int
|
||||||
reveal_type(a.y) # revealed: int
|
reveal_type(a.y) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
`a/tests/__init__.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
```
|
|
||||||
|
|
||||||
`a/tests/setup.py`:
|
`a/tests/setup.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
|
@ -383,27 +507,17 @@ y: int = 10
|
||||||
`b/tests/test1.py`:
|
`b/tests/test1.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from .setup import x
|
from .setup import x
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from . import setup
|
from . import setup
|
||||||
from b import y
|
from b import y
|
||||||
import b
|
import b
|
||||||
|
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: str
|
||||||
reveal_type(setup.x) # revealed: Unknown
|
reveal_type(setup.x) # revealed: str
|
||||||
reveal_type(y) # revealed: str
|
reveal_type(y) # revealed: str
|
||||||
reveal_type(b.y) # revealed: str
|
reveal_type(b.y) # revealed: str
|
||||||
```
|
```
|
||||||
|
|
||||||
`b/tests/__init__.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
```
|
|
||||||
|
|
||||||
`b/tests/setup.py`:
|
`b/tests/setup.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
|
@ -431,21 +545,16 @@ that `import main` and expect that to work.
|
||||||
`a/tests/test1.py`:
|
`a/tests/test1.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file.
|
|
||||||
|
|
||||||
from .setup import x
|
from .setup import x
|
||||||
from . import setup
|
from . import setup
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from main import y
|
from main import y
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
import main
|
import main
|
||||||
|
|
||||||
reveal_type(x) # revealed: int
|
reveal_type(x) # revealed: int
|
||||||
reveal_type(setup.x) # revealed: int
|
reveal_type(setup.x) # revealed: int
|
||||||
reveal_type(y) # revealed: Unknown
|
reveal_type(y) # revealed: int
|
||||||
reveal_type(main.y) # revealed: Unknown
|
reveal_type(main.y) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
`a/tests/setup.py`:
|
`a/tests/setup.py`:
|
||||||
|
|
@ -470,113 +579,16 @@ y: int = 10
|
||||||
`b/tests/test1.py`:
|
`b/tests/test1.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file
|
|
||||||
|
|
||||||
from .setup import x
|
from .setup import x
|
||||||
from . import setup
|
from . import setup
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from main import y
|
from main import y
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
import main
|
import main
|
||||||
|
|
||||||
reveal_type(x) # revealed: str
|
reveal_type(x) # revealed: str
|
||||||
reveal_type(setup.x) # revealed: str
|
reveal_type(setup.x) # revealed: str
|
||||||
reveal_type(y) # revealed: Unknown
|
reveal_type(y) # revealed: str
|
||||||
reveal_type(main.y) # revealed: Unknown
|
reveal_type(main.y) # revealed: str
|
||||||
```
|
|
||||||
|
|
||||||
`b/tests/setup.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
x: str = "2"
|
|
||||||
```
|
|
||||||
|
|
||||||
`b/pyproject.toml`:
|
|
||||||
|
|
||||||
```text
|
|
||||||
name = "a"
|
|
||||||
version = "0.1.0"
|
|
||||||
```
|
|
||||||
|
|
||||||
`b/main.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
y: str = "20"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Tests Package Absolute Importing `main.py`
|
|
||||||
|
|
||||||
The same as the previous case but `tests/__init__.py` exists in case that causes different issues.
|
|
||||||
|
|
||||||
`a/tests/test1.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
# TODO: there should be no errors in this file.
|
|
||||||
|
|
||||||
from .setup import x
|
|
||||||
from . import setup
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from main import y
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
import main
|
|
||||||
|
|
||||||
reveal_type(x) # revealed: int
|
|
||||||
reveal_type(setup.x) # revealed: int
|
|
||||||
reveal_type(y) # revealed: Unknown
|
|
||||||
reveal_type(main.y) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
`a/tests/__init__.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
```
|
|
||||||
|
|
||||||
`a/tests/setup.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
x: int = 1
|
|
||||||
```
|
|
||||||
|
|
||||||
`a/pyproject.toml`:
|
|
||||||
|
|
||||||
```text
|
|
||||||
name = "a"
|
|
||||||
version = "0.1.0"
|
|
||||||
```
|
|
||||||
|
|
||||||
`a/main.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
y: int = 10
|
|
||||||
```
|
|
||||||
|
|
||||||
`b/tests/test1.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
# TODO: there should be no errors in this file
|
|
||||||
|
|
||||||
from .setup import x
|
|
||||||
from . import setup
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from main import y
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
import main
|
|
||||||
|
|
||||||
reveal_type(x) # revealed: str
|
|
||||||
reveal_type(setup.x) # revealed: str
|
|
||||||
reveal_type(y) # revealed: Unknown
|
|
||||||
reveal_type(main.y) # revealed: Unknown
|
|
||||||
```
|
|
||||||
|
|
||||||
`b/tests/__init__.py`:
|
|
||||||
|
|
||||||
```py
|
|
||||||
```
|
```
|
||||||
|
|
||||||
`b/tests/setup.py`:
|
`b/tests/setup.py`:
|
||||||
|
|
@ -606,16 +618,11 @@ imports it.
|
||||||
`a/main.py`:
|
`a/main.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file.
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from utils import x
|
from utils import x
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: int
|
||||||
reveal_type(utils.x) # revealed: Unknown
|
reveal_type(utils.x) # revealed: int
|
||||||
```
|
```
|
||||||
|
|
||||||
`a/utils/__init__.py`:
|
`a/utils/__init__.py`:
|
||||||
|
|
@ -634,16 +641,11 @@ version = "0.1.0"
|
||||||
`b/main.py`:
|
`b/main.py`:
|
||||||
|
|
||||||
```py
|
```py
|
||||||
# TODO: there should be no errors in this file.
|
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
from utils import x
|
from utils import x
|
||||||
|
|
||||||
# error: [unresolved-import]
|
|
||||||
import utils
|
import utils
|
||||||
|
|
||||||
reveal_type(x) # revealed: Unknown
|
reveal_type(x) # revealed: str
|
||||||
reveal_type(utils.x) # revealed: Unknown
|
reveal_type(utils.x) # revealed: str
|
||||||
```
|
```
|
||||||
|
|
||||||
`b/utils/__init__.py`:
|
`b/utils/__init__.py`:
|
||||||
|
|
|
||||||
|
|
@ -218,8 +218,8 @@ class E(A[int]):
|
||||||
def method(self, x: object) -> None: ... # fine
|
def method(self, x: object) -> None: ... # fine
|
||||||
|
|
||||||
class F[T](A[T]):
|
class F[T](A[T]):
|
||||||
# TODO: we should emit `invalid-method-override` on this:
|
|
||||||
# `str` is not necessarily a supertype of `T`!
|
# `str` is not necessarily a supertype of `T`!
|
||||||
|
# error: [invalid-method-override]
|
||||||
def method(self, x: str) -> None: ...
|
def method(self, x: str) -> None: ...
|
||||||
|
|
||||||
class G(A[int]):
|
class G(A[int]):
|
||||||
|
|
|
||||||
|
|
@ -301,7 +301,7 @@ class B: ...
|
||||||
|
|
||||||
EitherOr = A | B
|
EitherOr = A | B
|
||||||
|
|
||||||
# error: [invalid-base] "Invalid class base with type `types.UnionType`"
|
# error: [invalid-base] "Invalid class base with type `<types.UnionType special form 'A | B'>`"
|
||||||
class Foo(EitherOr): ...
|
class Foo(EitherOr): ...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -156,7 +156,7 @@ from typing import Union
|
||||||
|
|
||||||
IntOrStr = Union[int, str]
|
IntOrStr = Union[int, str]
|
||||||
|
|
||||||
reveal_type(IntOrStr) # revealed: types.UnionType
|
reveal_type(IntOrStr) # revealed: <types.UnionType special form 'int | str'>
|
||||||
|
|
||||||
def _(x: int | str | bytes | memoryview | range):
|
def _(x: int | str | bytes | memoryview | range):
|
||||||
if isinstance(x, IntOrStr):
|
if isinstance(x, IntOrStr):
|
||||||
|
|
|
||||||
|
|
@ -209,7 +209,7 @@ from typing import Union
|
||||||
|
|
||||||
IntOrStr = Union[int, str]
|
IntOrStr = Union[int, str]
|
||||||
|
|
||||||
reveal_type(IntOrStr) # revealed: types.UnionType
|
reveal_type(IntOrStr) # revealed: <types.UnionType special form 'int | str'>
|
||||||
|
|
||||||
def f(x: type[int | str | bytes | range]):
|
def f(x: type[int | str | bytes | range]):
|
||||||
if issubclass(x, IntOrStr):
|
if issubclass(x, IntOrStr):
|
||||||
|
|
|
||||||
|
|
@ -113,7 +113,7 @@ MyList: TypeAlias = list[T]
|
||||||
ListOrSet: TypeAlias = list[T] | set[T]
|
ListOrSet: TypeAlias = list[T] | set[T]
|
||||||
|
|
||||||
reveal_type(MyList) # revealed: <class 'list[T]'>
|
reveal_type(MyList) # revealed: <class 'list[T]'>
|
||||||
reveal_type(ListOrSet) # revealed: types.UnionType
|
reveal_type(ListOrSet) # revealed: <types.UnionType special form 'list[T] | set[T]'>
|
||||||
|
|
||||||
def _(list_of_int: MyList[int], list_or_set_of_str: ListOrSet[str]):
|
def _(list_of_int: MyList[int], list_or_set_of_str: ListOrSet[str]):
|
||||||
reveal_type(list_of_int) # revealed: list[int]
|
reveal_type(list_of_int) # revealed: list[int]
|
||||||
|
|
@ -293,7 +293,7 @@ def _(rec: RecursiveHomogeneousTuple):
|
||||||
reveal_type(rec) # revealed: tuple[Divergent, ...]
|
reveal_type(rec) # revealed: tuple[Divergent, ...]
|
||||||
|
|
||||||
ClassInfo: TypeAlias = type | UnionType | tuple["ClassInfo", ...]
|
ClassInfo: TypeAlias = type | UnionType | tuple["ClassInfo", ...]
|
||||||
reveal_type(ClassInfo) # revealed: types.UnionType
|
reveal_type(ClassInfo) # revealed: <types.UnionType special form 'type | UnionType | tuple[Divergent, ...]'>
|
||||||
|
|
||||||
def my_isinstance(obj: object, classinfo: ClassInfo) -> bool:
|
def my_isinstance(obj: object, classinfo: ClassInfo) -> bool:
|
||||||
# TODO should be `type | UnionType | tuple[ClassInfo, ...]`
|
# TODO should be `type | UnionType | tuple[ClassInfo, ...]`
|
||||||
|
|
|
||||||
|
|
@ -3184,14 +3184,9 @@ from ty_extensions import reveal_protocol_interface
|
||||||
reveal_protocol_interface(Foo)
|
reveal_protocol_interface(Foo)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Known panics
|
## Protocols generic over TypeVars bound to forward references
|
||||||
|
|
||||||
### Protocols generic over TypeVars bound to forward references
|
Protocols can have TypeVars with forward reference bounds that form cycles.
|
||||||
|
|
||||||
This test currently panics because the `ClassLiteral::explicit_bases` query fails to converge. See
|
|
||||||
issue <https://github.com/astral-sh/ty/issues/1587>.
|
|
||||||
|
|
||||||
<!-- expect-panic: execute: too many cycle iterations -->
|
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing import Any, Protocol, TypeVar
|
from typing import Any, Protocol, TypeVar
|
||||||
|
|
@ -3209,6 +3204,19 @@ class A2(Protocol[T2]):
|
||||||
|
|
||||||
class B1(A1[T3], Protocol[T3]): ...
|
class B1(A1[T3], Protocol[T3]): ...
|
||||||
class B2(A2[T4], Protocol[T4]): ...
|
class B2(A2[T4], Protocol[T4]): ...
|
||||||
|
|
||||||
|
# TODO should just be `B2[Any]`
|
||||||
|
reveal_type(T3.__bound__) # revealed: B2[Any] | @Todo(specialized non-generic class)
|
||||||
|
|
||||||
|
# TODO error: [invalid-type-arguments]
|
||||||
|
def f(x: B1[int]):
|
||||||
|
pass
|
||||||
|
|
||||||
|
reveal_type(T4.__bound__) # revealed: B1[Any]
|
||||||
|
|
||||||
|
# error: [invalid-type-arguments]
|
||||||
|
def g(x: B2[int]):
|
||||||
|
pass
|
||||||
```
|
```
|
||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
|
||||||
|
|
@ -14,10 +14,11 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/directives/assert_type.m
|
||||||
```
|
```
|
||||||
1 | from typing_extensions import assert_type
|
1 | from typing_extensions import assert_type
|
||||||
2 |
|
2 |
|
||||||
3 | def _(x: int):
|
3 | def _(x: int, y: bool):
|
||||||
4 | assert_type(x, int) # fine
|
4 | assert_type(x, int) # fine
|
||||||
5 | assert_type(x, str) # error: [type-assertion-failure]
|
5 | assert_type(x, str) # error: [type-assertion-failure]
|
||||||
6 | assert_type(assert_type(x, int), int)
|
6 | assert_type(assert_type(x, int), int)
|
||||||
|
7 | assert_type(y, int) # error: [type-assertion-failure]
|
||||||
```
|
```
|
||||||
|
|
||||||
# Diagnostics
|
# Diagnostics
|
||||||
|
|
@ -26,15 +27,32 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/directives/assert_type.m
|
||||||
error[type-assertion-failure]: Argument does not have asserted type `str`
|
error[type-assertion-failure]: Argument does not have asserted type `str`
|
||||||
--> src/mdtest_snippet.py:5:5
|
--> src/mdtest_snippet.py:5:5
|
||||||
|
|
|
|
||||||
3 | def _(x: int):
|
3 | def _(x: int, y: bool):
|
||||||
4 | assert_type(x, int) # fine
|
4 | assert_type(x, int) # fine
|
||||||
5 | assert_type(x, str) # error: [type-assertion-failure]
|
5 | assert_type(x, str) # error: [type-assertion-failure]
|
||||||
| ^^^^^^^^^^^^-^^^^^^
|
| ^^^^^^^^^^^^-^^^^^^
|
||||||
| |
|
| |
|
||||||
| Inferred type of argument is `int`
|
| Inferred type is `int`
|
||||||
6 | assert_type(assert_type(x, int), int)
|
6 | assert_type(assert_type(x, int), int)
|
||||||
|
7 | assert_type(y, int) # error: [type-assertion-failure]
|
||||||
|
|
|
|
||||||
info: `str` and `int` are not equivalent types
|
info: `str` and `int` are not equivalent types
|
||||||
info: rule `type-assertion-failure` is enabled by default
|
info: rule `type-assertion-failure` is enabled by default
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
error[type-assertion-failure]: Argument does not have asserted type `int`
|
||||||
|
--> src/mdtest_snippet.py:7:5
|
||||||
|
|
|
||||||
|
5 | assert_type(x, str) # error: [type-assertion-failure]
|
||||||
|
6 | assert_type(assert_type(x, int), int)
|
||||||
|
7 | assert_type(y, int) # error: [type-assertion-failure]
|
||||||
|
| ^^^^^^^^^^^^-^^^^^^
|
||||||
|
| |
|
||||||
|
| Inferred type is `bool`
|
||||||
|
|
|
||||||
|
info: `bool` is a subtype of `int`, but they are not equivalent
|
||||||
|
info: rule `type-assertion-failure` is enabled by default
|
||||||
|
|
||||||
|
```
|
||||||
|
|
|
||||||
|
|
@ -91,14 +91,14 @@ error[missing-argument]: No argument provided for required parameter `arg` of bo
|
||||||
7 | from typing_extensions import deprecated
|
7 | from typing_extensions import deprecated
|
||||||
|
|
|
|
||||||
info: Parameter declared here
|
info: Parameter declared here
|
||||||
--> stdlib/typing_extensions.pyi:1000:28
|
--> stdlib/typing_extensions.pyi:1001:28
|
||||||
|
|
|
|
||||||
998 | stacklevel: int
|
999 | stacklevel: int
|
||||||
999 | def __init__(self, message: LiteralString, /, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> None: ...
|
1000 | def __init__(self, message: LiteralString, /, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> None: ...
|
||||||
1000 | def __call__(self, arg: _T, /) -> _T: ...
|
1001 | def __call__(self, arg: _T, /) -> _T: ...
|
||||||
| ^^^^^^^
|
| ^^^^^^^
|
||||||
1001 |
|
1002 |
|
||||||
1002 | @final
|
1003 | @final
|
||||||
|
|
|
|
||||||
info: rule `missing-argument` is enabled by default
|
info: rule `missing-argument` is enabled by default
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -63,7 +63,7 @@ error[invalid-argument-type]: Invalid second argument to `isinstance`
|
||||||
10 | # error: [invalid-argument-type]
|
10 | # error: [invalid-argument-type]
|
||||||
|
|
|
|
||||||
info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects
|
info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects
|
||||||
info: Elements `<typing.Literal special form>` and `<class 'list[int]'>` in the union are not class objects
|
info: Elements `<special form 'Literal[42]'>` and `<class 'list[int]'>` in the union are not class objects
|
||||||
info: rule `invalid-argument-type` is enabled by default
|
info: rule `invalid-argument-type` is enabled by default
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
@ -82,7 +82,7 @@ error[invalid-argument-type]: Invalid second argument to `isinstance`
|
||||||
13 | else:
|
13 | else:
|
||||||
|
|
|
|
||||||
info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects
|
info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects
|
||||||
info: Element `typing.Any` in the union, and 2 more elements, are not class objects
|
info: Element `<special form 'typing.Any'>` in the union, and 2 more elements, are not class objects
|
||||||
info: rule `invalid-argument-type` is enabled by default
|
info: rule `invalid-argument-type` is enabled by default
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/mro.md
|
||||||
# Diagnostics
|
# Diagnostics
|
||||||
|
|
||||||
```
|
```
|
||||||
error[inconsistent-mro]: Cannot create a consistent method resolution order (MRO) for class `Baz` with bases list `[typing.Protocol[T], <class 'Foo'>, <class 'Bar[T@Baz]'>]`
|
error[inconsistent-mro]: Cannot create a consistent method resolution order (MRO) for class `Baz` with bases list `[<special form 'typing.Protocol[T]'>, <class 'Foo'>, <class 'Bar[T@Baz]'>]`
|
||||||
--> src/mdtest_snippet.py:7:1
|
--> src/mdtest_snippet.py:7:1
|
||||||
|
|
|
|
||||||
5 | class Foo(Protocol): ...
|
5 | class Foo(Protocol): ...
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/protocols.md
|
||||||
# Diagnostics
|
# Diagnostics
|
||||||
|
|
||||||
```
|
```
|
||||||
error[call-non-callable]: Object of type `typing.Protocol` is not callable
|
error[call-non-callable]: Object of type `<special form 'typing.Protocol'>` is not callable
|
||||||
--> src/mdtest_snippet.py:4:13
|
--> src/mdtest_snippet.py:4:13
|
||||||
|
|
|
|
||||||
3 | # error: [call-non-callable]
|
3 | # error: [call-non-callable]
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,114 @@
|
||||||
|
---
|
||||||
|
source: crates/ty_test/src/lib.rs
|
||||||
|
expression: snapshot
|
||||||
|
---
|
||||||
|
---
|
||||||
|
mdtest name: special_form_attributes.md - Diagnostics for invalid attribute access on special forms
|
||||||
|
mdtest path: crates/ty_python_semantic/resources/mdtest/diagnostics/special_form_attributes.md
|
||||||
|
---
|
||||||
|
|
||||||
|
# Python source files
|
||||||
|
|
||||||
|
## mdtest_snippet.py
|
||||||
|
|
||||||
|
```
|
||||||
|
1 | from typing_extensions import Any, Final, LiteralString, Self
|
||||||
|
2 |
|
||||||
|
3 | X = Any
|
||||||
|
4 |
|
||||||
|
5 | class Foo:
|
||||||
|
6 | X: Final = LiteralString
|
||||||
|
7 | a: int
|
||||||
|
8 | b: Self
|
||||||
|
9 |
|
||||||
|
10 | class Bar:
|
||||||
|
11 | def __init__(self):
|
||||||
|
12 | self.y: Final = LiteralString
|
||||||
|
13 |
|
||||||
|
14 | X.foo # error: [unresolved-attribute]
|
||||||
|
15 | X.aaaaooooooo # error: [unresolved-attribute]
|
||||||
|
16 | Foo.X.startswith # error: [unresolved-attribute]
|
||||||
|
17 | Foo.Bar().y.startswith # error: [unresolved-attribute]
|
||||||
|
18 |
|
||||||
|
19 | # TODO: false positive (just testing the diagnostic in the meantime)
|
||||||
|
20 | Foo().b.a # error: [unresolved-attribute]
|
||||||
|
```
|
||||||
|
|
||||||
|
# Diagnostics
|
||||||
|
|
||||||
|
```
|
||||||
|
error[unresolved-attribute]: Special form `typing.Any` has no attribute `foo`
|
||||||
|
--> src/mdtest_snippet.py:14:1
|
||||||
|
|
|
||||||
|
12 | self.y: Final = LiteralString
|
||||||
|
13 |
|
||||||
|
14 | X.foo # error: [unresolved-attribute]
|
||||||
|
| ^^^^^
|
||||||
|
15 | X.aaaaooooooo # error: [unresolved-attribute]
|
||||||
|
16 | Foo.X.startswith # error: [unresolved-attribute]
|
||||||
|
|
|
||||||
|
help: Objects with type `Any` have a `foo` attribute, but the symbol `typing.Any` does not itself inhabit the type `Any`
|
||||||
|
help: This error may indicate that `X` was defined as `X = typing.Any` when `X: typing.Any` was intended
|
||||||
|
info: rule `unresolved-attribute` is enabled by default
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
error[unresolved-attribute]: Special form `typing.Any` has no attribute `aaaaooooooo`
|
||||||
|
--> src/mdtest_snippet.py:15:1
|
||||||
|
|
|
||||||
|
14 | X.foo # error: [unresolved-attribute]
|
||||||
|
15 | X.aaaaooooooo # error: [unresolved-attribute]
|
||||||
|
| ^^^^^^^^^^^^^
|
||||||
|
16 | Foo.X.startswith # error: [unresolved-attribute]
|
||||||
|
17 | Foo.Bar().y.startswith # error: [unresolved-attribute]
|
||||||
|
|
|
||||||
|
help: Objects with type `Any` have an `aaaaooooooo` attribute, but the symbol `typing.Any` does not itself inhabit the type `Any`
|
||||||
|
help: This error may indicate that `X` was defined as `X = typing.Any` when `X: typing.Any` was intended
|
||||||
|
info: rule `unresolved-attribute` is enabled by default
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
error[unresolved-attribute]: Special form `typing.LiteralString` has no attribute `startswith`
|
||||||
|
--> src/mdtest_snippet.py:16:1
|
||||||
|
|
|
||||||
|
14 | X.foo # error: [unresolved-attribute]
|
||||||
|
15 | X.aaaaooooooo # error: [unresolved-attribute]
|
||||||
|
16 | Foo.X.startswith # error: [unresolved-attribute]
|
||||||
|
| ^^^^^^^^^^^^^^^^
|
||||||
|
17 | Foo.Bar().y.startswith # error: [unresolved-attribute]
|
||||||
|
|
|
||||||
|
help: Objects with type `LiteralString` have a `startswith` attribute, but the symbol `typing.LiteralString` does not itself inhabit the type `LiteralString`
|
||||||
|
help: This error may indicate that `Foo.X` was defined as `Foo.X = typing.LiteralString` when `Foo.X: typing.LiteralString` was intended
|
||||||
|
info: rule `unresolved-attribute` is enabled by default
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
error[unresolved-attribute]: Special form `typing.LiteralString` has no attribute `startswith`
|
||||||
|
--> src/mdtest_snippet.py:17:1
|
||||||
|
|
|
||||||
|
15 | X.aaaaooooooo # error: [unresolved-attribute]
|
||||||
|
16 | Foo.X.startswith # error: [unresolved-attribute]
|
||||||
|
17 | Foo.Bar().y.startswith # error: [unresolved-attribute]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
18 |
|
||||||
|
19 | # TODO: false positive (just testing the diagnostic in the meantime)
|
||||||
|
|
|
||||||
|
help: Objects with type `LiteralString` have a `startswith` attribute, but the symbol `typing.LiteralString` does not itself inhabit the type `LiteralString`
|
||||||
|
info: rule `unresolved-attribute` is enabled by default
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
error[unresolved-attribute]: Special form `typing.Self` has no attribute `a`
|
||||||
|
--> src/mdtest_snippet.py:20:1
|
||||||
|
|
|
||||||
|
19 | # TODO: false positive (just testing the diagnostic in the meantime)
|
||||||
|
20 | Foo().b.a # error: [unresolved-attribute]
|
||||||
|
| ^^^^^^^^^
|
||||||
|
|
|
||||||
|
info: rule `unresolved-attribute` is enabled by default
|
||||||
|
|
||||||
|
```
|
||||||
|
|
@ -166,7 +166,7 @@ impl<'db> DunderAllNamesCollector<'db> {
|
||||||
) -> Option<&'db FxHashSet<Name>> {
|
) -> Option<&'db FxHashSet<Name>> {
|
||||||
let module_name =
|
let module_name =
|
||||||
ModuleName::from_import_statement(self.db, self.file, import_from).ok()?;
|
ModuleName::from_import_statement(self.db, self.file, import_from).ok()?;
|
||||||
let module = resolve_module(self.db, &module_name)?;
|
let module = resolve_module(self.db, self.file, &module_name)?;
|
||||||
dunder_all_names(self.db, module.file(self.db)?)
|
dunder_all_names(self.db, module.file(self.db)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,8 +13,8 @@ pub use diagnostic::add_inferred_python_version_hint_to_diagnostic;
|
||||||
pub use module_name::{ModuleName, ModuleNameResolutionError};
|
pub use module_name::{ModuleName, ModuleNameResolutionError};
|
||||||
pub use module_resolver::{
|
pub use module_resolver::{
|
||||||
KnownModule, Module, SearchPath, SearchPathValidationError, SearchPaths, all_modules,
|
KnownModule, Module, SearchPath, SearchPathValidationError, SearchPaths, all_modules,
|
||||||
list_modules, resolve_module, resolve_real_module, resolve_real_shadowable_module,
|
list_modules, resolve_module, resolve_module_confident, resolve_real_module,
|
||||||
system_module_search_paths,
|
resolve_real_module_confident, resolve_real_shadowable_module, system_module_search_paths,
|
||||||
};
|
};
|
||||||
pub use program::{
|
pub use program::{
|
||||||
Program, ProgramSettings, PythonVersionFileSource, PythonVersionSource,
|
Program, ProgramSettings, PythonVersionFileSource, PythonVersionSource,
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,10 @@ pub use module::Module;
|
||||||
pub use path::{SearchPath, SearchPathValidationError};
|
pub use path::{SearchPath, SearchPathValidationError};
|
||||||
pub use resolver::SearchPaths;
|
pub use resolver::SearchPaths;
|
||||||
pub(crate) use resolver::file_to_module;
|
pub(crate) use resolver::file_to_module;
|
||||||
pub use resolver::{resolve_module, resolve_real_module, resolve_real_shadowable_module};
|
pub use resolver::{
|
||||||
|
resolve_module, resolve_module_confident, resolve_real_module, resolve_real_module_confident,
|
||||||
|
resolve_real_shadowable_module,
|
||||||
|
};
|
||||||
use ruff_db::system::SystemPath;
|
use ruff_db::system::SystemPath;
|
||||||
|
|
||||||
use crate::Db;
|
use crate::Db;
|
||||||
|
|
|
||||||
|
|
@ -594,7 +594,7 @@ impl SearchPath {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn is_first_party(&self) -> bool {
|
pub fn is_first_party(&self) -> bool {
|
||||||
matches!(&*self.0, SearchPathInner::FirstParty(_))
|
matches!(&*self.0, SearchPathInner::FirstParty(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -608,6 +608,18 @@ impl SearchPath {
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub(crate) fn relativize_system_path(&self, path: &SystemPath) -> Option<ModulePath> {
|
pub(crate) fn relativize_system_path(&self, path: &SystemPath) -> Option<ModulePath> {
|
||||||
|
self.relativize_system_path_only(path)
|
||||||
|
.map(|relative_path| ModulePath {
|
||||||
|
search_path: self.clone(),
|
||||||
|
relative_path: relative_path.as_utf8_path().to_path_buf(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn relativize_system_path_only<'a>(
|
||||||
|
&self,
|
||||||
|
path: &'a SystemPath,
|
||||||
|
) -> Option<&'a SystemPath> {
|
||||||
if path
|
if path
|
||||||
.extension()
|
.extension()
|
||||||
.is_some_and(|extension| !self.is_valid_extension(extension))
|
.is_some_and(|extension| !self.is_valid_extension(extension))
|
||||||
|
|
@ -621,14 +633,7 @@ impl SearchPath {
|
||||||
| SearchPathInner::StandardLibraryCustom(search_path)
|
| SearchPathInner::StandardLibraryCustom(search_path)
|
||||||
| SearchPathInner::StandardLibraryReal(search_path)
|
| SearchPathInner::StandardLibraryReal(search_path)
|
||||||
| SearchPathInner::SitePackages(search_path)
|
| SearchPathInner::SitePackages(search_path)
|
||||||
| SearchPathInner::Editable(search_path) => {
|
| SearchPathInner::Editable(search_path) => path.strip_prefix(search_path).ok(),
|
||||||
path.strip_prefix(search_path)
|
|
||||||
.ok()
|
|
||||||
.map(|relative_path| ModulePath {
|
|
||||||
search_path: self.clone(),
|
|
||||||
relative_path: relative_path.as_utf8_path().to_path_buf(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
SearchPathInner::StandardLibraryVendored(_) => None,
|
SearchPathInner::StandardLibraryVendored(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -783,7 +788,7 @@ impl fmt::Display for SearchPath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
pub(super) enum SystemOrVendoredPathRef<'db> {
|
pub(super) enum SystemOrVendoredPathRef<'db> {
|
||||||
System(&'db SystemPath),
|
System(&'db SystemPath),
|
||||||
Vendored(&'db VendoredPath),
|
Vendored(&'db VendoredPath),
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,31 @@
|
||||||
/*!
|
/*!
|
||||||
This module principally provides two routines for resolving a particular module
|
This module principally provides several routines for resolving a particular module
|
||||||
name to a `Module`: [`resolve_module`] and [`resolve_real_module`]. You'll
|
name to a `Module`:
|
||||||
usually want the former, unless you're certain you want to forbid stubs, in
|
|
||||||
which case, use the latter.
|
* [`file_to_module`][]: resolves the module `.<self>` (often as the first step in resolving `.`)
|
||||||
|
* [`resolve_module`][]: resolves an absolute module name
|
||||||
|
|
||||||
|
You may notice that we actually provide `resolve_(real)_(shadowable)_module_(confident)`.
|
||||||
|
You almost certainly just want [`resolve_module`][]. The other variations represent
|
||||||
|
restrictions to answer specific kinds of questions, usually to empower IDE features.
|
||||||
|
|
||||||
|
* The `real` variation disallows all stub files, including the vendored typeshed.
|
||||||
|
This enables the goto-definition ("real") vs goto-declaration ("stub or real") distinction.
|
||||||
|
|
||||||
|
* The `confident` variation disallows "desperate resolution", which is a fallback
|
||||||
|
mode where we start trying to use ancestor directories of the importing file
|
||||||
|
as search-paths, but only if we failed to resolve it with the normal search-paths.
|
||||||
|
This is mostly just a convenience for cases where we don't want to try to define
|
||||||
|
the importing file (resolving a `KnownModule` and tests).
|
||||||
|
|
||||||
|
* The `shadowable` variation disables some guards that prevents third-party code
|
||||||
|
from shadowing any vendored non-stdlib `KnownModule`. In particular `typing_extensions`,
|
||||||
|
which we vendor and heavily assume the contents of (and so don't ever want to shadow).
|
||||||
|
This enables checking if the user *actually* has `typing_extensions` installed,
|
||||||
|
in which case it's ok to suggest it in features like auto-imports.
|
||||||
|
|
||||||
|
There is some awkwardness to the structure of the code to specifically enable caching
|
||||||
|
of queries, as module resolution happens a lot and involves a lot of disk access.
|
||||||
|
|
||||||
For implementors, see `import-resolution-diagram.svg` for a flow diagram that
|
For implementors, see `import-resolution-diagram.svg` for a flow diagram that
|
||||||
specifies ty's implementation of Python's import resolution algorithm.
|
specifies ty's implementation of Python's import resolution algorithm.
|
||||||
|
|
@ -33,14 +56,51 @@ use super::module::{Module, ModuleKind};
|
||||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError, SystemOrVendoredPathRef};
|
use super::path::{ModulePath, SearchPath, SearchPathValidationError, SystemOrVendoredPathRef};
|
||||||
|
|
||||||
/// Resolves a module name to a module.
|
/// Resolves a module name to a module.
|
||||||
pub fn resolve_module<'db>(db: &'db dyn Db, module_name: &ModuleName) -> Option<Module<'db>> {
|
pub fn resolve_module<'db>(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
importing_file: File,
|
||||||
|
module_name: &ModuleName,
|
||||||
|
) -> Option<Module<'db>> {
|
||||||
|
let interned_name = ModuleNameIngredient::new(db, module_name, ModuleResolveMode::StubsAllowed);
|
||||||
|
|
||||||
|
resolve_module_query(db, interned_name)
|
||||||
|
.or_else(|| desperately_resolve_module(db, importing_file, interned_name))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolves a module name to a module, without desperate resolution available.
|
||||||
|
///
|
||||||
|
/// This is appropriate for resolving a `KnownModule`, or cases where for whatever reason
|
||||||
|
/// we don't have a well-defined importing file.
|
||||||
|
pub fn resolve_module_confident<'db>(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
module_name: &ModuleName,
|
||||||
|
) -> Option<Module<'db>> {
|
||||||
let interned_name = ModuleNameIngredient::new(db, module_name, ModuleResolveMode::StubsAllowed);
|
let interned_name = ModuleNameIngredient::new(db, module_name, ModuleResolveMode::StubsAllowed);
|
||||||
|
|
||||||
resolve_module_query(db, interned_name)
|
resolve_module_query(db, interned_name)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolves a module name to a module (stubs not allowed).
|
/// Resolves a module name to a module (stubs not allowed).
|
||||||
pub fn resolve_real_module<'db>(db: &'db dyn Db, module_name: &ModuleName) -> Option<Module<'db>> {
|
pub fn resolve_real_module<'db>(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
importing_file: File,
|
||||||
|
module_name: &ModuleName,
|
||||||
|
) -> Option<Module<'db>> {
|
||||||
|
let interned_name =
|
||||||
|
ModuleNameIngredient::new(db, module_name, ModuleResolveMode::StubsNotAllowed);
|
||||||
|
|
||||||
|
resolve_module_query(db, interned_name)
|
||||||
|
.or_else(|| desperately_resolve_module(db, importing_file, interned_name))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolves a module name to a module, without desperate resolution available (stubs not allowed).
|
||||||
|
///
|
||||||
|
/// This is appropriate for resolving a `KnownModule`, or cases where for whatever reason
|
||||||
|
/// we don't have a well-defined importing file.
|
||||||
|
pub fn resolve_real_module_confident<'db>(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
module_name: &ModuleName,
|
||||||
|
) -> Option<Module<'db>> {
|
||||||
let interned_name =
|
let interned_name =
|
||||||
ModuleNameIngredient::new(db, module_name, ModuleResolveMode::StubsNotAllowed);
|
ModuleNameIngredient::new(db, module_name, ModuleResolveMode::StubsNotAllowed);
|
||||||
|
|
||||||
|
|
@ -60,6 +120,7 @@ pub fn resolve_real_module<'db>(db: &'db dyn Db, module_name: &ModuleName) -> Op
|
||||||
/// are involved in an import cycle with `builtins`.
|
/// are involved in an import cycle with `builtins`.
|
||||||
pub fn resolve_real_shadowable_module<'db>(
|
pub fn resolve_real_shadowable_module<'db>(
|
||||||
db: &'db dyn Db,
|
db: &'db dyn Db,
|
||||||
|
importing_file: File,
|
||||||
module_name: &ModuleName,
|
module_name: &ModuleName,
|
||||||
) -> Option<Module<'db>> {
|
) -> Option<Module<'db>> {
|
||||||
let interned_name = ModuleNameIngredient::new(
|
let interned_name = ModuleNameIngredient::new(
|
||||||
|
|
@ -69,6 +130,7 @@ pub fn resolve_real_shadowable_module<'db>(
|
||||||
);
|
);
|
||||||
|
|
||||||
resolve_module_query(db, interned_name)
|
resolve_module_query(db, interned_name)
|
||||||
|
.or_else(|| desperately_resolve_module(db, importing_file, interned_name))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Which files should be visible when doing a module query
|
/// Which files should be visible when doing a module query
|
||||||
|
|
@ -181,6 +243,55 @@ fn resolve_module_query<'db>(
|
||||||
Some(module)
|
Some(module)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Like `resolve_module_query` but for cases where it failed to resolve the module
|
||||||
|
/// and we are now Getting Desperate and willing to try the ancestor directories of
|
||||||
|
/// the `importing_file` as potential temporary search paths that are private
|
||||||
|
/// to this import.
|
||||||
|
///
|
||||||
|
/// The reason this is split out is because in 99.9% of cases `resolve_module_query`
|
||||||
|
/// will find the right answer (or no valid answer exists), and we want it to be
|
||||||
|
/// aggressively cached. Including the `importing_file` as part of that query would
|
||||||
|
/// trash the caching of import resolution between files.
|
||||||
|
///
|
||||||
|
/// TODO: should (some) of this also be cached? If an entire directory of python files
|
||||||
|
/// is misunderstood we'll end up in here a lot.
|
||||||
|
fn desperately_resolve_module<'db>(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
importing_file: File,
|
||||||
|
module_name: ModuleNameIngredient<'db>,
|
||||||
|
) -> Option<Module<'db>> {
|
||||||
|
let name = module_name.name(db);
|
||||||
|
let mode = module_name.mode(db);
|
||||||
|
let _span = tracing::trace_span!("desperately_resolve_module", %name).entered();
|
||||||
|
|
||||||
|
let Some(resolved) = desperately_resolve_name(db, importing_file, name, mode) else {
|
||||||
|
tracing::debug!("Module `{name}` not found while looking in parent dirs");
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
let module = match resolved {
|
||||||
|
ResolvedName::FileModule(module) => {
|
||||||
|
tracing::trace!(
|
||||||
|
"Resolved module `{name}` to `{path}`",
|
||||||
|
path = module.file.path(db)
|
||||||
|
);
|
||||||
|
Module::file_module(
|
||||||
|
db,
|
||||||
|
name.clone(),
|
||||||
|
module.kind,
|
||||||
|
module.search_path,
|
||||||
|
module.file,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ResolvedName::NamespacePackage => {
|
||||||
|
tracing::trace!("Module `{name}` is a namespace package");
|
||||||
|
Module::namespace_package(db, name.clone())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(module)
|
||||||
|
}
|
||||||
|
|
||||||
/// Resolves the module for the given path.
|
/// Resolves the module for the given path.
|
||||||
///
|
///
|
||||||
/// Returns `None` if the path is not a module locatable via any of the known search paths.
|
/// Returns `None` if the path is not a module locatable via any of the known search paths.
|
||||||
|
|
@ -201,13 +312,33 @@ pub(crate) fn path_to_module<'db>(db: &'db dyn Db, path: &FilePath) -> Option<Mo
|
||||||
/// Resolves the module for the file with the given id.
|
/// Resolves the module for the file with the given id.
|
||||||
///
|
///
|
||||||
/// Returns `None` if the file is not a module locatable via any of the known search paths.
|
/// Returns `None` if the file is not a module locatable via any of the known search paths.
|
||||||
|
///
|
||||||
|
/// This function can be understood as essentially resolving `import .<self>` in the file itself,
|
||||||
|
/// and indeed, one of its primary jobs is resolving `.<self>` to derive the module name of `.`.
|
||||||
|
/// This intuition is particularly useful for understanding why it's correct that we pass
|
||||||
|
/// the file itself as `importing_file` to various subroutines.
|
||||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||||
pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module<'_>> {
|
pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module<'_>> {
|
||||||
let _span = tracing::trace_span!("file_to_module", ?file).entered();
|
let _span = tracing::trace_span!("file_to_module", ?file).entered();
|
||||||
|
|
||||||
let path = SystemOrVendoredPathRef::try_from_file(db, file)?;
|
let path = SystemOrVendoredPathRef::try_from_file(db, file)?;
|
||||||
|
|
||||||
let module_name = search_paths(db, ModuleResolveMode::StubsAllowed).find_map(|candidate| {
|
file_to_module_impl(
|
||||||
|
db,
|
||||||
|
file,
|
||||||
|
path,
|
||||||
|
search_paths(db, ModuleResolveMode::StubsAllowed),
|
||||||
|
)
|
||||||
|
.or_else(|| file_to_module_impl(db, file, path, desperate_search_paths(db, file).iter()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file_to_module_impl<'db, 'a>(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
file: File,
|
||||||
|
path: SystemOrVendoredPathRef<'a>,
|
||||||
|
mut search_paths: impl Iterator<Item = &'a SearchPath>,
|
||||||
|
) -> Option<Module<'db>> {
|
||||||
|
let module_name = search_paths.find_map(|candidate: &SearchPath| {
|
||||||
let relative_path = match path {
|
let relative_path = match path {
|
||||||
SystemOrVendoredPathRef::System(path) => candidate.relativize_system_path(path),
|
SystemOrVendoredPathRef::System(path) => candidate.relativize_system_path(path),
|
||||||
SystemOrVendoredPathRef::Vendored(path) => candidate.relativize_vendored_path(path),
|
SystemOrVendoredPathRef::Vendored(path) => candidate.relativize_vendored_path(path),
|
||||||
|
|
@ -219,7 +350,7 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module<'_>> {
|
||||||
// If it doesn't, then that means that multiple modules have the same name in different
|
// If it doesn't, then that means that multiple modules have the same name in different
|
||||||
// root paths, but that the module corresponding to `path` is in a lower priority search path,
|
// root paths, but that the module corresponding to `path` is in a lower priority search path,
|
||||||
// in which case we ignore it.
|
// in which case we ignore it.
|
||||||
let module = resolve_module(db, &module_name)?;
|
let module = resolve_module(db, file, &module_name)?;
|
||||||
let module_file = module.file(db)?;
|
let module_file = module.file(db)?;
|
||||||
|
|
||||||
if file.path(db) == module_file.path(db) {
|
if file.path(db) == module_file.path(db) {
|
||||||
|
|
@ -230,7 +361,7 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module<'_>> {
|
||||||
// If a .py and .pyi are both defined, the .pyi will be the one returned by `resolve_module().file`,
|
// If a .py and .pyi are both defined, the .pyi will be the one returned by `resolve_module().file`,
|
||||||
// which would make us erroneously believe the `.py` is *not* also this module (breaking things
|
// which would make us erroneously believe the `.py` is *not* also this module (breaking things
|
||||||
// like relative imports). So here we try `resolve_real_module().file` to cover both cases.
|
// like relative imports). So here we try `resolve_real_module().file` to cover both cases.
|
||||||
let module = resolve_real_module(db, &module_name)?;
|
let module = resolve_real_module(db, file, &module_name)?;
|
||||||
let module_file = module.file(db)?;
|
let module_file = module.file(db)?;
|
||||||
if file.path(db) == module_file.path(db) {
|
if file.path(db) == module_file.path(db) {
|
||||||
return Some(module);
|
return Some(module);
|
||||||
|
|
@ -250,6 +381,58 @@ pub(crate) fn search_paths(db: &dyn Db, resolve_mode: ModuleResolveMode) -> Sear
|
||||||
Program::get(db).search_paths(db).iter(db, resolve_mode)
|
Program::get(db).search_paths(db).iter(db, resolve_mode)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the search-paths that should be used for desperate resolution of imports in this file
|
||||||
|
///
|
||||||
|
/// Currently this is "the closest ancestor dir that contains a pyproject.toml", which is
|
||||||
|
/// a completely arbitrary decision. We could potentially change this to return an iterator
|
||||||
|
/// of every ancestor with a pyproject.toml or every ancestor.
|
||||||
|
///
|
||||||
|
/// For now this works well in common cases where we have some larger workspace that contains
|
||||||
|
/// one or more python projects in sub-directories, and those python projects assume that
|
||||||
|
/// absolute imports resolve relative to the pyproject.toml they live under.
|
||||||
|
///
|
||||||
|
/// Being so strict minimizes concerns about this going off a lot and doing random
|
||||||
|
/// chaotic things. In particular, all files under a given pyproject.toml will currently
|
||||||
|
/// agree on this being their desperate search-path, which is really nice.
|
||||||
|
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||||
|
fn desperate_search_paths(db: &dyn Db, importing_file: File) -> Option<SearchPath> {
|
||||||
|
let system = db.system();
|
||||||
|
let importing_path = importing_file.path(db).as_system_path()?;
|
||||||
|
|
||||||
|
// Only allow this if the importing_file is under the first-party search path
|
||||||
|
let (base_path, rel_path) =
|
||||||
|
search_paths(db, ModuleResolveMode::StubsAllowed).find_map(|search_path| {
|
||||||
|
if !search_path.is_first_party() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Some((
|
||||||
|
search_path.as_system_path()?,
|
||||||
|
search_path.relativize_system_path_only(importing_path)?,
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Read the revision on the corresponding file root to
|
||||||
|
// register an explicit dependency on this directory. When
|
||||||
|
// the revision gets bumped, the cache that Salsa creates
|
||||||
|
// for this routine will be invalidated.
|
||||||
|
//
|
||||||
|
// (This is conditional because ruff uses this code too and doesn't set roots)
|
||||||
|
if let Some(root) = db.files().root(db, base_path) {
|
||||||
|
let _ = root.revision(db);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only allow searching up to the first-party path's root
|
||||||
|
for rel_dir in rel_path.ancestors() {
|
||||||
|
let candidate_path = base_path.join(rel_dir);
|
||||||
|
if system.path_exists(&candidate_path.join("pyproject.toml"))
|
||||||
|
|| system.path_exists(&candidate_path.join("ty.toml"))
|
||||||
|
{
|
||||||
|
let search_path = SearchPath::first_party(system, candidate_path).ok()?;
|
||||||
|
return Some(search_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, get_size2::GetSize)]
|
#[derive(Clone, Debug, PartialEq, Eq, get_size2::GetSize)]
|
||||||
pub struct SearchPaths {
|
pub struct SearchPaths {
|
||||||
/// Search paths that have been statically determined purely from reading
|
/// Search paths that have been statically determined purely from reading
|
||||||
|
|
@ -756,6 +939,30 @@ struct ModuleNameIngredient<'db> {
|
||||||
/// Given a module name and a list of search paths in which to lookup modules,
|
/// Given a module name and a list of search paths in which to lookup modules,
|
||||||
/// attempt to resolve the module name
|
/// attempt to resolve the module name
|
||||||
fn resolve_name(db: &dyn Db, name: &ModuleName, mode: ModuleResolveMode) -> Option<ResolvedName> {
|
fn resolve_name(db: &dyn Db, name: &ModuleName, mode: ModuleResolveMode) -> Option<ResolvedName> {
|
||||||
|
let search_paths = search_paths(db, mode);
|
||||||
|
resolve_name_impl(db, name, mode, search_paths)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Like `resolve_name` but for cases where it failed to resolve the module
|
||||||
|
/// and we are now Getting Desperate and willing to try the ancestor directories of
|
||||||
|
/// the `importing_file` as potential temporary search paths that are private
|
||||||
|
/// to this import.
|
||||||
|
fn desperately_resolve_name(
|
||||||
|
db: &dyn Db,
|
||||||
|
importing_file: File,
|
||||||
|
name: &ModuleName,
|
||||||
|
mode: ModuleResolveMode,
|
||||||
|
) -> Option<ResolvedName> {
|
||||||
|
let search_paths = desperate_search_paths(db, importing_file);
|
||||||
|
resolve_name_impl(db, name, mode, search_paths.iter())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_name_impl<'a>(
|
||||||
|
db: &dyn Db,
|
||||||
|
name: &ModuleName,
|
||||||
|
mode: ModuleResolveMode,
|
||||||
|
search_paths: impl Iterator<Item = &'a SearchPath>,
|
||||||
|
) -> Option<ResolvedName> {
|
||||||
let program = Program::get(db);
|
let program = Program::get(db);
|
||||||
let python_version = program.python_version(db);
|
let python_version = program.python_version(db);
|
||||||
let resolver_state = ResolverContext::new(db, python_version, mode);
|
let resolver_state = ResolverContext::new(db, python_version, mode);
|
||||||
|
|
@ -765,7 +972,7 @@ fn resolve_name(db: &dyn Db, name: &ModuleName, mode: ModuleResolveMode) -> Opti
|
||||||
let stub_name = name.to_stub_package();
|
let stub_name = name.to_stub_package();
|
||||||
let mut is_namespace_package = false;
|
let mut is_namespace_package = false;
|
||||||
|
|
||||||
for search_path in search_paths(db, mode) {
|
for search_path in search_paths {
|
||||||
// When a builtin module is imported, standard module resolution is bypassed:
|
// When a builtin module is imported, standard module resolution is bypassed:
|
||||||
// the module name always resolves to the stdlib module,
|
// the module name always resolves to the stdlib module,
|
||||||
// even if there's a module of the same name in the first-party root
|
// even if there's a module of the same name in the first-party root
|
||||||
|
|
@ -1409,11 +1616,11 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(&foo_module),
|
Some(&foo_module),
|
||||||
resolve_module(&db, &foo_module_name).as_ref()
|
resolve_module_confident(&db, &foo_module_name).as_ref()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!("foo", foo_module.name(&db));
|
assert_eq!("foo", foo_module.name(&db));
|
||||||
|
|
@ -1435,11 +1642,11 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(&foo_module),
|
Some(&foo_module),
|
||||||
resolve_module(&db, &foo_module_name).as_ref()
|
resolve_module_confident(&db, &foo_module_name).as_ref()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!("foo", foo_module.name(&db));
|
assert_eq!("foo", foo_module.name(&db));
|
||||||
|
|
@ -1467,11 +1674,11 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(&foo_module),
|
Some(&foo_module),
|
||||||
resolve_module(&db, &foo_module_name).as_ref()
|
resolve_module_confident(&db, &foo_module_name).as_ref()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!("foo", foo_module.name(&db));
|
assert_eq!("foo", foo_module.name(&db));
|
||||||
|
|
@ -1494,7 +1701,8 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||||
let builtins = resolve_module(&db, &builtins_module_name).expect("builtins to resolve");
|
let builtins =
|
||||||
|
resolve_module_confident(&db, &builtins_module_name).expect("builtins to resolve");
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
builtins.file(&db).unwrap().path(&db),
|
builtins.file(&db).unwrap().path(&db),
|
||||||
|
|
@ -1518,7 +1726,8 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||||
let builtins = resolve_module(&db, &builtins_module_name).expect("builtins to resolve");
|
let builtins =
|
||||||
|
resolve_module_confident(&db, &builtins_module_name).expect("builtins to resolve");
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
builtins.file(&db).unwrap().path(&db),
|
builtins.file(&db).unwrap().path(&db),
|
||||||
|
|
@ -1539,11 +1748,11 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||||
let functools_module = resolve_module(&db, &functools_module_name).unwrap();
|
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(&functools_module),
|
Some(&functools_module),
|
||||||
resolve_module(&db, &functools_module_name).as_ref()
|
resolve_module_confident(&db, &functools_module_name).as_ref()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(&stdlib, functools_module.search_path(&db).unwrap());
|
assert_eq!(&stdlib, functools_module.search_path(&db).unwrap());
|
||||||
|
|
@ -1596,7 +1805,8 @@ mod tests {
|
||||||
|
|
||||||
let existing_modules = create_module_names(&["asyncio", "functools", "xml.etree"]);
|
let existing_modules = create_module_names(&["asyncio", "functools", "xml.etree"]);
|
||||||
for module_name in existing_modules {
|
for module_name in existing_modules {
|
||||||
let resolved_module = resolve_module(&db, &module_name).unwrap_or_else(|| {
|
let resolved_module =
|
||||||
|
resolve_module_confident(&db, &module_name).unwrap_or_else(|| {
|
||||||
panic!("Expected module {module_name} to exist in the mock stdlib")
|
panic!("Expected module {module_name} to exist in the mock stdlib")
|
||||||
});
|
});
|
||||||
let search_path = resolved_module.search_path(&db).unwrap();
|
let search_path = resolved_module.search_path(&db).unwrap();
|
||||||
|
|
@ -1649,7 +1859,7 @@ mod tests {
|
||||||
|
|
||||||
for module_name in nonexisting_modules {
|
for module_name in nonexisting_modules {
|
||||||
assert!(
|
assert!(
|
||||||
resolve_module(&db, &module_name).is_none(),
|
resolve_module_confident(&db, &module_name).is_none(),
|
||||||
"Unexpectedly resolved a module for {module_name}"
|
"Unexpectedly resolved a module for {module_name}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -1692,7 +1902,8 @@ mod tests {
|
||||||
]);
|
]);
|
||||||
|
|
||||||
for module_name in existing_modules {
|
for module_name in existing_modules {
|
||||||
let resolved_module = resolve_module(&db, &module_name).unwrap_or_else(|| {
|
let resolved_module =
|
||||||
|
resolve_module_confident(&db, &module_name).unwrap_or_else(|| {
|
||||||
panic!("Expected module {module_name} to exist in the mock stdlib")
|
panic!("Expected module {module_name} to exist in the mock stdlib")
|
||||||
});
|
});
|
||||||
let search_path = resolved_module.search_path(&db).unwrap();
|
let search_path = resolved_module.search_path(&db).unwrap();
|
||||||
|
|
@ -1728,7 +1939,7 @@ mod tests {
|
||||||
let nonexisting_modules = create_module_names(&["importlib", "xml", "xml.etree"]);
|
let nonexisting_modules = create_module_names(&["importlib", "xml", "xml.etree"]);
|
||||||
for module_name in nonexisting_modules {
|
for module_name in nonexisting_modules {
|
||||||
assert!(
|
assert!(
|
||||||
resolve_module(&db, &module_name).is_none(),
|
resolve_module_confident(&db, &module_name).is_none(),
|
||||||
"Unexpectedly resolved a module for {module_name}"
|
"Unexpectedly resolved a module for {module_name}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -1750,11 +1961,11 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||||
let functools_module = resolve_module(&db, &functools_module_name).unwrap();
|
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(&functools_module),
|
Some(&functools_module),
|
||||||
resolve_module(&db, &functools_module_name).as_ref()
|
resolve_module_confident(&db, &functools_module_name).as_ref()
|
||||||
);
|
);
|
||||||
assert_eq!(&src, functools_module.search_path(&db).unwrap());
|
assert_eq!(&src, functools_module.search_path(&db).unwrap());
|
||||||
assert_eq!(ModuleKind::Module, functools_module.kind(&db));
|
assert_eq!(ModuleKind::Module, functools_module.kind(&db));
|
||||||
|
|
@ -1777,7 +1988,7 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let pydoc_data_topics_name = ModuleName::new_static("pydoc_data.topics").unwrap();
|
let pydoc_data_topics_name = ModuleName::new_static("pydoc_data.topics").unwrap();
|
||||||
let pydoc_data_topics = resolve_module(&db, &pydoc_data_topics_name).unwrap();
|
let pydoc_data_topics = resolve_module_confident(&db, &pydoc_data_topics_name).unwrap();
|
||||||
|
|
||||||
assert_eq!("pydoc_data.topics", pydoc_data_topics.name(&db));
|
assert_eq!("pydoc_data.topics", pydoc_data_topics.name(&db));
|
||||||
assert_eq!(pydoc_data_topics.search_path(&db).unwrap(), &stdlib);
|
assert_eq!(pydoc_data_topics.search_path(&db).unwrap(), &stdlib);
|
||||||
|
|
@ -1794,7 +2005,8 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let foo_path = src.join("foo/__init__.py");
|
let foo_path = src.join("foo/__init__.py");
|
||||||
let foo_module = resolve_module(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
let foo_module =
|
||||||
|
resolve_module_confident(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
||||||
|
|
||||||
assert_eq!("foo", foo_module.name(&db));
|
assert_eq!("foo", foo_module.name(&db));
|
||||||
assert_eq!(&src, foo_module.search_path(&db).unwrap());
|
assert_eq!(&src, foo_module.search_path(&db).unwrap());
|
||||||
|
|
@ -1821,7 +2033,8 @@ mod tests {
|
||||||
|
|
||||||
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
||||||
|
|
||||||
let foo_module = resolve_module(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
let foo_module =
|
||||||
|
resolve_module_confident(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
||||||
let foo_init_path = src.join("foo/__init__.py");
|
let foo_init_path = src.join("foo/__init__.py");
|
||||||
|
|
||||||
assert_eq!(&src, foo_module.search_path(&db).unwrap());
|
assert_eq!(&src, foo_module.search_path(&db).unwrap());
|
||||||
|
|
@ -1844,8 +2057,9 @@ mod tests {
|
||||||
|
|
||||||
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
||||||
|
|
||||||
let foo = resolve_module(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
let foo = resolve_module_confident(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
||||||
let foo_real = resolve_real_module(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
let foo_real =
|
||||||
|
resolve_real_module_confident(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
||||||
let foo_stub = src.join("foo.pyi");
|
let foo_stub = src.join("foo.pyi");
|
||||||
|
|
||||||
assert_eq!(&src, foo.search_path(&db).unwrap());
|
assert_eq!(&src, foo.search_path(&db).unwrap());
|
||||||
|
|
@ -1870,7 +2084,7 @@ mod tests {
|
||||||
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
||||||
|
|
||||||
let baz_module =
|
let baz_module =
|
||||||
resolve_module(&db, &ModuleName::new_static("foo.bar.baz").unwrap()).unwrap();
|
resolve_module_confident(&db, &ModuleName::new_static("foo.bar.baz").unwrap()).unwrap();
|
||||||
let baz_path = src.join("foo/bar/baz.py");
|
let baz_path = src.join("foo/bar/baz.py");
|
||||||
|
|
||||||
assert_eq!(&src, baz_module.search_path(&db).unwrap());
|
assert_eq!(&src, baz_module.search_path(&db).unwrap());
|
||||||
|
|
@ -1894,7 +2108,8 @@ mod tests {
|
||||||
.with_site_packages_files(&[("foo.py", "")])
|
.with_site_packages_files(&[("foo.py", "")])
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let foo_module = resolve_module(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
let foo_module =
|
||||||
|
resolve_module_confident(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
||||||
let foo_src_path = src.join("foo.py");
|
let foo_src_path = src.join("foo.py");
|
||||||
|
|
||||||
assert_eq!(&src, foo_module.search_path(&db).unwrap());
|
assert_eq!(&src, foo_module.search_path(&db).unwrap());
|
||||||
|
|
@ -1965,8 +2180,10 @@ mod tests {
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
let foo_module = resolve_module(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
let foo_module =
|
||||||
let bar_module = resolve_module(&db, &ModuleName::new_static("bar").unwrap()).unwrap();
|
resolve_module_confident(&db, &ModuleName::new_static("foo").unwrap()).unwrap();
|
||||||
|
let bar_module =
|
||||||
|
resolve_module_confident(&db, &ModuleName::new_static("bar").unwrap()).unwrap();
|
||||||
|
|
||||||
assert_ne!(foo_module, bar_module);
|
assert_ne!(foo_module, bar_module);
|
||||||
|
|
||||||
|
|
@ -2001,7 +2218,7 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
let foo_pieces = (
|
let foo_pieces = (
|
||||||
foo_module.name(&db).clone(),
|
foo_module.name(&db).clone(),
|
||||||
foo_module.file(&db),
|
foo_module.file(&db),
|
||||||
|
|
@ -2022,7 +2239,7 @@ mod tests {
|
||||||
// Re-query the foo module. The foo module should still be cached
|
// Re-query the foo module. The foo module should still be cached
|
||||||
// because `bar.py` isn't relevant for resolving `foo`.
|
// because `bar.py` isn't relevant for resolving `foo`.
|
||||||
|
|
||||||
let foo_module2 = resolve_module(&db, &foo_module_name);
|
let foo_module2 = resolve_module_confident(&db, &foo_module_name);
|
||||||
let foo_pieces2 = foo_module2.map(|foo_module2| {
|
let foo_pieces2 = foo_module2.map(|foo_module2| {
|
||||||
(
|
(
|
||||||
foo_module2.name(&db).clone(),
|
foo_module2.name(&db).clone(),
|
||||||
|
|
@ -2049,14 +2266,15 @@ mod tests {
|
||||||
let foo_path = src.join("foo.py");
|
let foo_path = src.join("foo.py");
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
assert_eq!(resolve_module(&db, &foo_module_name), None);
|
assert_eq!(resolve_module_confident(&db, &foo_module_name), None);
|
||||||
|
|
||||||
// Now write the foo file
|
// Now write the foo file
|
||||||
db.write_file(&foo_path, "x = 1")?;
|
db.write_file(&foo_path, "x = 1")?;
|
||||||
|
|
||||||
let foo_file = system_path_to_file(&db, &foo_path).expect("foo.py to exist");
|
let foo_file = system_path_to_file(&db, &foo_path).expect("foo.py to exist");
|
||||||
|
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).expect("Foo module to resolve");
|
let foo_module =
|
||||||
|
resolve_module_confident(&db, &foo_module_name).expect("Foo module to resolve");
|
||||||
assert_eq!(foo_file, foo_module.file(&db).unwrap());
|
assert_eq!(foo_file, foo_module.file(&db).unwrap());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -2070,7 +2288,8 @@ mod tests {
|
||||||
let TestCase { mut db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
let TestCase { mut db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).expect("foo module to exist");
|
let foo_module =
|
||||||
|
resolve_module_confident(&db, &foo_module_name).expect("foo module to exist");
|
||||||
let foo_init_path = src.join("foo/__init__.py");
|
let foo_init_path = src.join("foo/__init__.py");
|
||||||
|
|
||||||
assert_eq!(&foo_init_path, foo_module.file(&db).unwrap().path(&db));
|
assert_eq!(&foo_init_path, foo_module.file(&db).unwrap().path(&db));
|
||||||
|
|
@ -2082,7 +2301,8 @@ mod tests {
|
||||||
File::sync_path(&mut db, &foo_init_path);
|
File::sync_path(&mut db, &foo_init_path);
|
||||||
File::sync_path(&mut db, foo_init_path.parent().unwrap());
|
File::sync_path(&mut db, foo_init_path.parent().unwrap());
|
||||||
|
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).expect("Foo module to resolve");
|
let foo_module =
|
||||||
|
resolve_module_confident(&db, &foo_module_name).expect("Foo module to resolve");
|
||||||
assert_eq!(&src.join("foo.py"), foo_module.file(&db).unwrap().path(&db));
|
assert_eq!(&src.join("foo.py"), foo_module.file(&db).unwrap().path(&db));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -2108,7 +2328,7 @@ mod tests {
|
||||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||||
let stdlib_functools_path = stdlib.join("functools.pyi");
|
let stdlib_functools_path = stdlib.join("functools.pyi");
|
||||||
|
|
||||||
let functools_module = resolve_module(&db, &functools_module_name).unwrap();
|
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
|
||||||
assert_eq!(functools_module.search_path(&db).unwrap(), &stdlib);
|
assert_eq!(functools_module.search_path(&db).unwrap(), &stdlib);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Ok(functools_module.file(&db).unwrap()),
|
Ok(functools_module.file(&db).unwrap()),
|
||||||
|
|
@ -2121,7 +2341,7 @@ mod tests {
|
||||||
let site_packages_functools_path = site_packages.join("functools.py");
|
let site_packages_functools_path = site_packages.join("functools.py");
|
||||||
db.write_file(&site_packages_functools_path, "f: int")
|
db.write_file(&site_packages_functools_path, "f: int")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let functools_module = resolve_module(&db, &functools_module_name).unwrap();
|
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
|
||||||
let functools_file = functools_module.file(&db).unwrap();
|
let functools_file = functools_module.file(&db).unwrap();
|
||||||
let functools_search_path = functools_module.search_path(&db).unwrap().clone();
|
let functools_search_path = functools_module.search_path(&db).unwrap().clone();
|
||||||
let events = db.take_salsa_events();
|
let events = db.take_salsa_events();
|
||||||
|
|
@ -2156,7 +2376,7 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||||
let functools_module = resolve_module(&db, &functools_module_name).unwrap();
|
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
|
||||||
assert_eq!(functools_module.search_path(&db).unwrap(), &stdlib);
|
assert_eq!(functools_module.search_path(&db).unwrap(), &stdlib);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Ok(functools_module.file(&db).unwrap()),
|
Ok(functools_module.file(&db).unwrap()),
|
||||||
|
|
@ -2167,7 +2387,7 @@ mod tests {
|
||||||
// since first-party files take higher priority in module resolution:
|
// since first-party files take higher priority in module resolution:
|
||||||
let src_functools_path = src.join("functools.py");
|
let src_functools_path = src.join("functools.py");
|
||||||
db.write_file(&src_functools_path, "FOO: int").unwrap();
|
db.write_file(&src_functools_path, "FOO: int").unwrap();
|
||||||
let functools_module = resolve_module(&db, &functools_module_name).unwrap();
|
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
|
||||||
assert_eq!(functools_module.search_path(&db).unwrap(), &src);
|
assert_eq!(functools_module.search_path(&db).unwrap(), &src);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Ok(functools_module.file(&db).unwrap()),
|
Ok(functools_module.file(&db).unwrap()),
|
||||||
|
|
@ -2198,7 +2418,7 @@ mod tests {
|
||||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||||
let src_functools_path = src.join("functools.py");
|
let src_functools_path = src.join("functools.py");
|
||||||
|
|
||||||
let functools_module = resolve_module(&db, &functools_module_name).unwrap();
|
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
|
||||||
assert_eq!(functools_module.search_path(&db).unwrap(), &src);
|
assert_eq!(functools_module.search_path(&db).unwrap(), &src);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Ok(functools_module.file(&db).unwrap()),
|
Ok(functools_module.file(&db).unwrap()),
|
||||||
|
|
@ -2211,7 +2431,7 @@ mod tests {
|
||||||
.remove_file(&src_functools_path)
|
.remove_file(&src_functools_path)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
File::sync_path(&mut db, &src_functools_path);
|
File::sync_path(&mut db, &src_functools_path);
|
||||||
let functools_module = resolve_module(&db, &functools_module_name).unwrap();
|
let functools_module = resolve_module_confident(&db, &functools_module_name).unwrap();
|
||||||
assert_eq!(functools_module.search_path(&db).unwrap(), &stdlib);
|
assert_eq!(functools_module.search_path(&db).unwrap(), &stdlib);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Ok(functools_module.file(&db).unwrap()),
|
Ok(functools_module.file(&db).unwrap()),
|
||||||
|
|
@ -2233,8 +2453,8 @@ mod tests {
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_bar_module_name = ModuleName::new_static("foo.bar").unwrap();
|
let foo_bar_module_name = ModuleName::new_static("foo.bar").unwrap();
|
||||||
|
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
let foo_bar_module = resolve_module(&db, &foo_bar_module_name).unwrap();
|
let foo_bar_module = resolve_module_confident(&db, &foo_bar_module_name).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
foo_module.file(&db).unwrap().path(&db),
|
foo_module.file(&db).unwrap().path(&db),
|
||||||
|
|
@ -2262,11 +2482,11 @@ mod tests {
|
||||||
|
|
||||||
// Lines with leading whitespace in `.pth` files do not parse:
|
// Lines with leading whitespace in `.pth` files do not parse:
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
assert_eq!(resolve_module(&db, &foo_module_name), None);
|
assert_eq!(resolve_module_confident(&db, &foo_module_name), None);
|
||||||
|
|
||||||
// Lines with trailing whitespace in `.pth` files do:
|
// Lines with trailing whitespace in `.pth` files do:
|
||||||
let bar_module_name = ModuleName::new_static("bar").unwrap();
|
let bar_module_name = ModuleName::new_static("bar").unwrap();
|
||||||
let bar_module = resolve_module(&db, &bar_module_name).unwrap();
|
let bar_module = resolve_module_confident(&db, &bar_module_name).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
bar_module.file(&db).unwrap().path(&db),
|
bar_module.file(&db).unwrap().path(&db),
|
||||||
&FilePath::system("/y/src/bar.py")
|
&FilePath::system("/y/src/bar.py")
|
||||||
|
|
@ -2285,7 +2505,7 @@ mod tests {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
foo_module.file(&db).unwrap().path(&db),
|
foo_module.file(&db).unwrap().path(&db),
|
||||||
|
|
@ -2333,10 +2553,10 @@ not_a_directory
|
||||||
let b_module_name = ModuleName::new_static("b").unwrap();
|
let b_module_name = ModuleName::new_static("b").unwrap();
|
||||||
let spam_module_name = ModuleName::new_static("spam").unwrap();
|
let spam_module_name = ModuleName::new_static("spam").unwrap();
|
||||||
|
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
let a_module = resolve_module(&db, &a_module_name).unwrap();
|
let a_module = resolve_module_confident(&db, &a_module_name).unwrap();
|
||||||
let b_module = resolve_module(&db, &b_module_name).unwrap();
|
let b_module = resolve_module_confident(&db, &b_module_name).unwrap();
|
||||||
let spam_module = resolve_module(&db, &spam_module_name).unwrap();
|
let spam_module = resolve_module_confident(&db, &spam_module_name).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
foo_module.file(&db).unwrap().path(&db),
|
foo_module.file(&db).unwrap().path(&db),
|
||||||
|
|
@ -2370,14 +2590,14 @@ not_a_directory
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let bar_module_name = ModuleName::new_static("bar").unwrap();
|
let bar_module_name = ModuleName::new_static("bar").unwrap();
|
||||||
|
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
foo_module.file(&db).unwrap().path(&db),
|
foo_module.file(&db).unwrap().path(&db),
|
||||||
&FilePath::system("/x/src/foo.py")
|
&FilePath::system("/x/src/foo.py")
|
||||||
);
|
);
|
||||||
|
|
||||||
db.clear_salsa_events();
|
db.clear_salsa_events();
|
||||||
let bar_module = resolve_module(&db, &bar_module_name).unwrap();
|
let bar_module = resolve_module_confident(&db, &bar_module_name).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
bar_module.file(&db).unwrap().path(&db),
|
bar_module.file(&db).unwrap().path(&db),
|
||||||
&FilePath::system("/y/src/bar.py")
|
&FilePath::system("/y/src/bar.py")
|
||||||
|
|
@ -2407,7 +2627,7 @@ not_a_directory
|
||||||
db.write_files(x_directory).unwrap();
|
db.write_files(x_directory).unwrap();
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
foo_module.file(&db).unwrap().path(&db),
|
foo_module.file(&db).unwrap().path(&db),
|
||||||
&FilePath::system("/x/src/foo.py")
|
&FilePath::system("/x/src/foo.py")
|
||||||
|
|
@ -2419,7 +2639,7 @@ not_a_directory
|
||||||
|
|
||||||
File::sync_path(&mut db, &site_packages.join("_foo.pth"));
|
File::sync_path(&mut db, &site_packages.join("_foo.pth"));
|
||||||
|
|
||||||
assert_eq!(resolve_module(&db, &foo_module_name), None);
|
assert_eq!(resolve_module_confident(&db, &foo_module_name), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -2434,7 +2654,7 @@ not_a_directory
|
||||||
db.write_files(x_directory).unwrap();
|
db.write_files(x_directory).unwrap();
|
||||||
|
|
||||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||||
let foo_module = resolve_module(&db, &foo_module_name).unwrap();
|
let foo_module = resolve_module_confident(&db, &foo_module_name).unwrap();
|
||||||
let src_path = SystemPathBuf::from("/x/src");
|
let src_path = SystemPathBuf::from("/x/src");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
foo_module.file(&db).unwrap().path(&db),
|
foo_module.file(&db).unwrap().path(&db),
|
||||||
|
|
@ -2447,7 +2667,7 @@ not_a_directory
|
||||||
db.memory_file_system().remove_directory(&src_path).unwrap();
|
db.memory_file_system().remove_directory(&src_path).unwrap();
|
||||||
File::sync_path(&mut db, &src_path.join("foo.py"));
|
File::sync_path(&mut db, &src_path.join("foo.py"));
|
||||||
File::sync_path(&mut db, &src_path);
|
File::sync_path(&mut db, &src_path);
|
||||||
assert_eq!(resolve_module(&db, &foo_module_name), None);
|
assert_eq!(resolve_module_confident(&db, &foo_module_name), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -2507,7 +2727,7 @@ not_a_directory
|
||||||
// The editable installs discovered from the `.pth` file in the first `site-packages` directory
|
// The editable installs discovered from the `.pth` file in the first `site-packages` directory
|
||||||
// take precedence over the second `site-packages` directory...
|
// take precedence over the second `site-packages` directory...
|
||||||
let a_module_name = ModuleName::new_static("a").unwrap();
|
let a_module_name = ModuleName::new_static("a").unwrap();
|
||||||
let a_module = resolve_module(&db, &a_module_name).unwrap();
|
let a_module = resolve_module_confident(&db, &a_module_name).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
a_module.file(&db).unwrap().path(&db),
|
a_module.file(&db).unwrap().path(&db),
|
||||||
&editable_install_location
|
&editable_install_location
|
||||||
|
|
@ -2521,7 +2741,7 @@ not_a_directory
|
||||||
// ...But now that the `.pth` file in the first `site-packages` directory has been deleted,
|
// ...But now that the `.pth` file in the first `site-packages` directory has been deleted,
|
||||||
// the editable install no longer exists, so the module now resolves to the file in the
|
// the editable install no longer exists, so the module now resolves to the file in the
|
||||||
// second `site-packages` directory
|
// second `site-packages` directory
|
||||||
let a_module = resolve_module(&db, &a_module_name).unwrap();
|
let a_module = resolve_module_confident(&db, &a_module_name).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
a_module.file(&db).unwrap().path(&db),
|
a_module.file(&db).unwrap().path(&db),
|
||||||
&system_site_packages_location
|
&system_site_packages_location
|
||||||
|
|
@ -2579,12 +2799,12 @@ not_a_directory
|
||||||
|
|
||||||
// Now try to resolve the module `A` (note the capital `A` instead of `a`).
|
// Now try to resolve the module `A` (note the capital `A` instead of `a`).
|
||||||
let a_module_name = ModuleName::new_static("A").unwrap();
|
let a_module_name = ModuleName::new_static("A").unwrap();
|
||||||
assert_eq!(resolve_module(&db, &a_module_name), None);
|
assert_eq!(resolve_module_confident(&db, &a_module_name), None);
|
||||||
|
|
||||||
// Now lookup the same module using the lowercase `a` and it should
|
// Now lookup the same module using the lowercase `a` and it should
|
||||||
// resolve to the file in the system site-packages
|
// resolve to the file in the system site-packages
|
||||||
let a_module_name = ModuleName::new_static("a").unwrap();
|
let a_module_name = ModuleName::new_static("a").unwrap();
|
||||||
let a_module = resolve_module(&db, &a_module_name).expect("a.py to resolve");
|
let a_module = resolve_module_confident(&db, &a_module_name).expect("a.py to resolve");
|
||||||
assert!(
|
assert!(
|
||||||
a_module
|
a_module
|
||||||
.file(&db)
|
.file(&db)
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
use ruff_db::files::File;
|
use ruff_db::files::File;
|
||||||
|
|
||||||
use crate::dunder_all::dunder_all_names;
|
use crate::dunder_all::dunder_all_names;
|
||||||
use crate::module_resolver::{KnownModule, file_to_module};
|
use crate::module_resolver::{KnownModule, file_to_module, resolve_module_confident};
|
||||||
use crate::semantic_index::definition::{Definition, DefinitionState};
|
use crate::semantic_index::definition::{Definition, DefinitionState};
|
||||||
use crate::semantic_index::place::{PlaceExprRef, ScopedPlaceId};
|
use crate::semantic_index::place::{PlaceExprRef, ScopedPlaceId};
|
||||||
use crate::semantic_index::scope::ScopeId;
|
use crate::semantic_index::scope::ScopeId;
|
||||||
|
|
@ -14,7 +14,7 @@ use crate::types::{
|
||||||
Truthiness, Type, TypeAndQualifiers, TypeQualifiers, UnionBuilder, UnionType, binding_type,
|
Truthiness, Type, TypeAndQualifiers, TypeQualifiers, UnionBuilder, UnionType, binding_type,
|
||||||
declaration_type, todo_type,
|
declaration_type, todo_type,
|
||||||
};
|
};
|
||||||
use crate::{Db, FxOrderSet, Program, resolve_module};
|
use crate::{Db, FxOrderSet, Program};
|
||||||
|
|
||||||
pub(crate) use implicit_globals::{
|
pub(crate) use implicit_globals::{
|
||||||
module_type_implicit_global_declaration, module_type_implicit_global_symbol,
|
module_type_implicit_global_declaration, module_type_implicit_global_symbol,
|
||||||
|
|
@ -379,7 +379,7 @@ pub(crate) fn imported_symbol<'db>(
|
||||||
/// and should not be used when a symbol is being explicitly imported from the `builtins` module
|
/// and should not be used when a symbol is being explicitly imported from the `builtins` module
|
||||||
/// (e.g. `from builtins import int`).
|
/// (e.g. `from builtins import int`).
|
||||||
pub(crate) fn builtins_symbol<'db>(db: &'db dyn Db, symbol: &str) -> PlaceAndQualifiers<'db> {
|
pub(crate) fn builtins_symbol<'db>(db: &'db dyn Db, symbol: &str) -> PlaceAndQualifiers<'db> {
|
||||||
resolve_module(db, &KnownModule::Builtins.name())
|
resolve_module_confident(db, &KnownModule::Builtins.name())
|
||||||
.and_then(|module| {
|
.and_then(|module| {
|
||||||
let file = module.file(db)?;
|
let file = module.file(db)?;
|
||||||
Some(
|
Some(
|
||||||
|
|
@ -409,7 +409,7 @@ pub(crate) fn known_module_symbol<'db>(
|
||||||
known_module: KnownModule,
|
known_module: KnownModule,
|
||||||
symbol: &str,
|
symbol: &str,
|
||||||
) -> PlaceAndQualifiers<'db> {
|
) -> PlaceAndQualifiers<'db> {
|
||||||
resolve_module(db, &known_module.name())
|
resolve_module_confident(db, &known_module.name())
|
||||||
.and_then(|module| {
|
.and_then(|module| {
|
||||||
let file = module.file(db)?;
|
let file = module.file(db)?;
|
||||||
Some(imported_symbol(db, file, symbol, None))
|
Some(imported_symbol(db, file, symbol, None))
|
||||||
|
|
@ -448,7 +448,7 @@ pub(crate) fn builtins_module_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||||
///
|
///
|
||||||
/// Can return `None` if a custom typeshed is used that is missing the core module in question.
|
/// Can return `None` if a custom typeshed is used that is missing the core module in question.
|
||||||
fn core_module_scope(db: &dyn Db, core_module: KnownModule) -> Option<ScopeId<'_>> {
|
fn core_module_scope(db: &dyn Db, core_module: KnownModule) -> Option<ScopeId<'_>> {
|
||||||
let module = resolve_module(db, &core_module.name())?;
|
let module = resolve_module_confident(db, &core_module.name())?;
|
||||||
Some(global_scope(db, module.file(db)?))
|
Some(global_scope(db, module.file(db)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1582,7 +1582,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(module) = resolve_module(self.db, &module_name) else {
|
let Some(module) = resolve_module(self.db, self.file, &module_name) else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1616,9 +1616,12 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> {
|
||||||
|
|
||||||
let star_import_predicate = self.add_predicate(star_import.into());
|
let star_import_predicate = self.add_predicate(star_import.into());
|
||||||
|
|
||||||
|
let associated_member_ids = self.place_tables[self.current_scope()]
|
||||||
|
.associated_place_ids(ScopedPlaceId::Symbol(symbol_id));
|
||||||
let pre_definition = self
|
let pre_definition = self
|
||||||
.current_use_def_map()
|
.current_use_def_map()
|
||||||
.single_symbol_place_snapshot(symbol_id);
|
.single_symbol_snapshot(symbol_id, associated_member_ids);
|
||||||
|
|
||||||
let pre_definition_reachability =
|
let pre_definition_reachability =
|
||||||
self.current_use_def_map().reachability;
|
self.current_use_def_map().reachability;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -250,7 +250,9 @@ impl<'db> Visitor<'db> for ExportFinder<'db> {
|
||||||
for export in
|
for export in
|
||||||
ModuleName::from_import_statement(self.db, self.file, node)
|
ModuleName::from_import_statement(self.db, self.file, node)
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|module_name| resolve_module(self.db, &module_name))
|
.and_then(|module_name| {
|
||||||
|
resolve_module(self.db, self.file, &module_name)
|
||||||
|
})
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|module| {
|
.flat_map(|module| {
|
||||||
module
|
module
|
||||||
|
|
|
||||||
|
|
@ -801,6 +801,13 @@ pub(super) struct FlowSnapshot {
|
||||||
reachability: ScopedReachabilityConstraintId,
|
reachability: ScopedReachabilityConstraintId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A snapshot of the state of a single symbol (e.g. `obj`) and all of its associated members
|
||||||
|
/// (e.g. `obj.attr`, `obj["key"]`).
|
||||||
|
pub(super) struct SingleSymbolSnapshot {
|
||||||
|
symbol_state: PlaceState,
|
||||||
|
associated_member_states: FxHashMap<ScopedMemberId, PlaceState>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(super) struct UseDefMapBuilder<'db> {
|
pub(super) struct UseDefMapBuilder<'db> {
|
||||||
/// Append-only array of [`DefinitionState`].
|
/// Append-only array of [`DefinitionState`].
|
||||||
|
|
@ -991,13 +998,26 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Snapshot the state of a single place at the current point in control flow.
|
/// Snapshot the state of a single symbol and all of its associated members, at the current
|
||||||
|
/// point in control flow.
|
||||||
///
|
///
|
||||||
/// This is only used for `*`-import reachability constraints, which are handled differently
|
/// This is only used for `*`-import reachability constraints, which are handled differently
|
||||||
/// to most other reachability constraints. See the doc-comment for
|
/// to most other reachability constraints. See the doc-comment for
|
||||||
/// [`Self::record_and_negate_star_import_reachability_constraint`] for more details.
|
/// [`Self::record_and_negate_star_import_reachability_constraint`] for more details.
|
||||||
pub(super) fn single_symbol_place_snapshot(&self, symbol: ScopedSymbolId) -> PlaceState {
|
pub(super) fn single_symbol_snapshot(
|
||||||
self.symbol_states[symbol].clone()
|
&self,
|
||||||
|
symbol: ScopedSymbolId,
|
||||||
|
associated_member_ids: &[ScopedMemberId],
|
||||||
|
) -> SingleSymbolSnapshot {
|
||||||
|
let symbol_state = self.symbol_states[symbol].clone();
|
||||||
|
let mut associated_member_states = FxHashMap::default();
|
||||||
|
for &member_id in associated_member_ids {
|
||||||
|
associated_member_states.insert(member_id, self.member_states[member_id].clone());
|
||||||
|
}
|
||||||
|
SingleSymbolSnapshot {
|
||||||
|
symbol_state,
|
||||||
|
associated_member_states,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This method exists solely for handling `*`-import reachability constraints.
|
/// This method exists solely for handling `*`-import reachability constraints.
|
||||||
|
|
@ -1033,14 +1053,14 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||||
&mut self,
|
&mut self,
|
||||||
reachability_id: ScopedReachabilityConstraintId,
|
reachability_id: ScopedReachabilityConstraintId,
|
||||||
symbol: ScopedSymbolId,
|
symbol: ScopedSymbolId,
|
||||||
pre_definition_state: PlaceState,
|
pre_definition: SingleSymbolSnapshot,
|
||||||
) {
|
) {
|
||||||
let negated_reachability_id = self
|
let negated_reachability_id = self
|
||||||
.reachability_constraints
|
.reachability_constraints
|
||||||
.add_not_constraint(reachability_id);
|
.add_not_constraint(reachability_id);
|
||||||
|
|
||||||
let mut post_definition_state =
|
let mut post_definition_state =
|
||||||
std::mem::replace(&mut self.symbol_states[symbol], pre_definition_state);
|
std::mem::replace(&mut self.symbol_states[symbol], pre_definition.symbol_state);
|
||||||
|
|
||||||
post_definition_state
|
post_definition_state
|
||||||
.record_reachability_constraint(&mut self.reachability_constraints, reachability_id);
|
.record_reachability_constraint(&mut self.reachability_constraints, reachability_id);
|
||||||
|
|
@ -1055,6 +1075,30 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||||
&mut self.narrowing_constraints,
|
&mut self.narrowing_constraints,
|
||||||
&mut self.reachability_constraints,
|
&mut self.reachability_constraints,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// And similarly for all associated members:
|
||||||
|
for (member_id, pre_definition_member_state) in pre_definition.associated_member_states {
|
||||||
|
let mut post_definition_state = std::mem::replace(
|
||||||
|
&mut self.member_states[member_id],
|
||||||
|
pre_definition_member_state,
|
||||||
|
);
|
||||||
|
|
||||||
|
post_definition_state.record_reachability_constraint(
|
||||||
|
&mut self.reachability_constraints,
|
||||||
|
reachability_id,
|
||||||
|
);
|
||||||
|
|
||||||
|
self.member_states[member_id].record_reachability_constraint(
|
||||||
|
&mut self.reachability_constraints,
|
||||||
|
negated_reachability_id,
|
||||||
|
);
|
||||||
|
|
||||||
|
self.member_states[member_id].merge(
|
||||||
|
post_definition_state,
|
||||||
|
&mut self.narrowing_constraints,
|
||||||
|
&mut self.reachability_constraints,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn record_reachability_constraint(
|
pub(super) fn record_reachability_constraint(
|
||||||
|
|
|
||||||
|
|
@ -100,14 +100,14 @@ impl<'db> SemanticModel<'db> {
|
||||||
pub fn resolve_module(&self, module: Option<&str>, level: u32) -> Option<Module<'db>> {
|
pub fn resolve_module(&self, module: Option<&str>, level: u32) -> Option<Module<'db>> {
|
||||||
let module_name =
|
let module_name =
|
||||||
ModuleName::from_identifier_parts(self.db, self.file, module, level).ok()?;
|
ModuleName::from_identifier_parts(self.db, self.file, module, level).ok()?;
|
||||||
resolve_module(self.db, &module_name)
|
resolve_module(self.db, self.file, &module_name)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns completions for symbols available in a `import <CURSOR>` context.
|
/// Returns completions for symbols available in a `import <CURSOR>` context.
|
||||||
pub fn import_completions(&self) -> Vec<Completion<'db>> {
|
pub fn import_completions(&self) -> Vec<Completion<'db>> {
|
||||||
let typing_extensions = ModuleName::new("typing_extensions").unwrap();
|
let typing_extensions = ModuleName::new("typing_extensions").unwrap();
|
||||||
let is_typing_extensions_available = self.file.is_stub(self.db)
|
let is_typing_extensions_available = self.file.is_stub(self.db)
|
||||||
|| resolve_real_shadowable_module(self.db, &typing_extensions).is_some();
|
|| resolve_real_shadowable_module(self.db, self.file, &typing_extensions).is_some();
|
||||||
list_modules(self.db)
|
list_modules(self.db)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|module| {
|
.filter(|module| {
|
||||||
|
|
@ -146,7 +146,7 @@ impl<'db> SemanticModel<'db> {
|
||||||
&self,
|
&self,
|
||||||
module_name: &ModuleName,
|
module_name: &ModuleName,
|
||||||
) -> Vec<Completion<'db>> {
|
) -> Vec<Completion<'db>> {
|
||||||
let Some(module) = resolve_module(self.db, module_name) else {
|
let Some(module) = resolve_module(self.db, self.file, module_name) else {
|
||||||
tracing::debug!("Could not resolve module from `{module_name:?}`");
|
tracing::debug!("Could not resolve module from `{module_name:?}`");
|
||||||
return vec![];
|
return vec![];
|
||||||
};
|
};
|
||||||
|
|
@ -156,7 +156,7 @@ impl<'db> SemanticModel<'db> {
|
||||||
/// Returns completions for symbols available in the given module as if
|
/// Returns completions for symbols available in the given module as if
|
||||||
/// it were imported by this model's `File`.
|
/// it were imported by this model's `File`.
|
||||||
fn module_completions(&self, module_name: &ModuleName) -> Vec<Completion<'db>> {
|
fn module_completions(&self, module_name: &ModuleName) -> Vec<Completion<'db>> {
|
||||||
let Some(module) = resolve_module(self.db, module_name) else {
|
let Some(module) = resolve_module(self.db, self.file, module_name) else {
|
||||||
tracing::debug!("Could not resolve module from `{module_name:?}`");
|
tracing::debug!("Could not resolve module from `{module_name:?}`");
|
||||||
return vec![];
|
return vec![];
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -413,9 +413,15 @@ pub fn create_suppression_fix(db: &dyn Db, file: File, id: LintId, range: TextRa
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always insert a new suppression at the end of the range to avoid having to deal with multiline strings
|
// Always insert a new suppression at the end of the range to avoid having to deal with multiline strings
|
||||||
// etc.
|
// etc. Also make sure to not pass a sub-token range to `Tokens::after`.
|
||||||
let parsed = parsed_module(db, file).load(db);
|
let parsed = parsed_module(db, file).load(db);
|
||||||
let tokens_after = parsed.tokens().after(range.end());
|
let tokens = parsed.tokens().at_offset(range.end());
|
||||||
|
let token_range = match tokens {
|
||||||
|
ruff_python_ast::token::TokenAt::None => range,
|
||||||
|
ruff_python_ast::token::TokenAt::Single(token) => token.range(),
|
||||||
|
ruff_python_ast::token::TokenAt::Between(..) => range,
|
||||||
|
};
|
||||||
|
let tokens_after = parsed.tokens().after(token_range.end());
|
||||||
|
|
||||||
// Same as for `line_end` when building up the `suppressions`: Ignore newlines
|
// Same as for `line_end` when building up the `suppressions`: Ignore newlines
|
||||||
// in multiline-strings, inside f-strings, or after a line continuation because we can't
|
// in multiline-strings, inside f-strings, or after a line continuation because we can't
|
||||||
|
|
|
||||||
|
|
@ -44,6 +44,7 @@ use crate::semantic_index::scope::ScopeId;
|
||||||
use crate::semantic_index::{imported_modules, place_table, semantic_index};
|
use crate::semantic_index::{imported_modules, place_table, semantic_index};
|
||||||
use crate::suppression::check_suppressions;
|
use crate::suppression::check_suppressions;
|
||||||
use crate::types::bound_super::BoundSuperType;
|
use crate::types::bound_super::BoundSuperType;
|
||||||
|
use crate::types::builder::RecursivelyDefined;
|
||||||
use crate::types::call::{Binding, Bindings, CallArguments, CallableBinding};
|
use crate::types::call::{Binding, Bindings, CallArguments, CallableBinding};
|
||||||
pub(crate) use crate::types::class_base::ClassBase;
|
pub(crate) use crate::types::class_base::ClassBase;
|
||||||
use crate::types::constraints::{
|
use crate::types::constraints::{
|
||||||
|
|
@ -67,7 +68,7 @@ pub(crate) use crate::types::narrow::infer_narrowing_constraint;
|
||||||
use crate::types::newtype::NewType;
|
use crate::types::newtype::NewType;
|
||||||
pub(crate) use crate::types::signatures::{Parameter, Parameters};
|
pub(crate) use crate::types::signatures::{Parameter, Parameters};
|
||||||
use crate::types::signatures::{ParameterForm, walk_signature};
|
use crate::types::signatures::{ParameterForm, walk_signature};
|
||||||
use crate::types::tuple::{TupleSpec, TupleSpecBuilder};
|
use crate::types::tuple::{Tuple, TupleSpec, TupleSpecBuilder};
|
||||||
pub(crate) use crate::types::typed_dict::{TypedDictParams, TypedDictType, walk_typed_dict_type};
|
pub(crate) use crate::types::typed_dict::{TypedDictParams, TypedDictType, walk_typed_dict_type};
|
||||||
pub use crate::types::variance::TypeVarVariance;
|
pub use crate::types::variance::TypeVarVariance;
|
||||||
use crate::types::variance::VarianceInferable;
|
use crate::types::variance::VarianceInferable;
|
||||||
|
|
@ -5472,9 +5473,9 @@ impl<'db> Type<'db> {
|
||||||
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
|
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
|
||||||
bound.try_bool_impl(db, allow_short_circuit, visitor)?
|
bound.try_bool_impl(db, allow_short_circuit, visitor)?
|
||||||
}
|
}
|
||||||
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => constraints
|
||||||
try_union(constraints)?
|
.as_type(db)
|
||||||
}
|
.try_bool_impl(db, allow_short_circuit, visitor)?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -6524,7 +6525,7 @@ impl<'db> Type<'db> {
|
||||||
TypeVarBoundOrConstraints::UpperBound(bound) => {
|
TypeVarBoundOrConstraints::UpperBound(bound) => {
|
||||||
non_async_special_case(db, bound)
|
non_async_special_case(db, bound)
|
||||||
}
|
}
|
||||||
TypeVarBoundOrConstraints::Constraints(union) => non_async_special_case(db, Type::Union(union)),
|
TypeVarBoundOrConstraints::Constraints(constraints) => non_async_special_case(db, constraints.as_type(db)),
|
||||||
},
|
},
|
||||||
Type::Union(union) => {
|
Type::Union(union) => {
|
||||||
let elements = union.elements(db);
|
let elements = union.elements(db);
|
||||||
|
|
@ -7403,7 +7404,7 @@ impl<'db> Type<'db> {
|
||||||
| SpecialFormType::Union
|
| SpecialFormType::Union
|
||||||
| SpecialFormType::Intersection => Err(InvalidTypeExpressionError {
|
| SpecialFormType::Intersection => Err(InvalidTypeExpressionError {
|
||||||
invalid_expressions: smallvec::smallvec_inline![
|
invalid_expressions: smallvec::smallvec_inline![
|
||||||
InvalidTypeExpression::RequiresArguments(*self)
|
InvalidTypeExpression::RequiresArguments(*special_form)
|
||||||
],
|
],
|
||||||
fallback_type: Type::unknown(),
|
fallback_type: Type::unknown(),
|
||||||
}),
|
}),
|
||||||
|
|
@ -7429,7 +7430,7 @@ impl<'db> Type<'db> {
|
||||||
| SpecialFormType::Unpack
|
| SpecialFormType::Unpack
|
||||||
| SpecialFormType::CallableTypeOf => Err(InvalidTypeExpressionError {
|
| SpecialFormType::CallableTypeOf => Err(InvalidTypeExpressionError {
|
||||||
invalid_expressions: smallvec::smallvec_inline![
|
invalid_expressions: smallvec::smallvec_inline![
|
||||||
InvalidTypeExpression::RequiresOneArgument(*self)
|
InvalidTypeExpression::RequiresOneArgument(*special_form)
|
||||||
],
|
],
|
||||||
fallback_type: Type::unknown(),
|
fallback_type: Type::unknown(),
|
||||||
}),
|
}),
|
||||||
|
|
@ -7437,7 +7438,7 @@ impl<'db> Type<'db> {
|
||||||
SpecialFormType::Annotated | SpecialFormType::Concatenate => {
|
SpecialFormType::Annotated | SpecialFormType::Concatenate => {
|
||||||
Err(InvalidTypeExpressionError {
|
Err(InvalidTypeExpressionError {
|
||||||
invalid_expressions: smallvec::smallvec_inline![
|
invalid_expressions: smallvec::smallvec_inline![
|
||||||
InvalidTypeExpression::RequiresTwoArguments(*self)
|
InvalidTypeExpression::RequiresTwoArguments(*special_form)
|
||||||
],
|
],
|
||||||
fallback_type: Type::unknown(),
|
fallback_type: Type::unknown(),
|
||||||
})
|
})
|
||||||
|
|
@ -8606,12 +8607,9 @@ impl<'db> TypeMapping<'_, 'db> {
|
||||||
| TypeMapping::Materialize(_)
|
| TypeMapping::Materialize(_)
|
||||||
| TypeMapping::ReplaceParameterDefaults
|
| TypeMapping::ReplaceParameterDefaults
|
||||||
| TypeMapping::EagerExpansion => context,
|
| TypeMapping::EagerExpansion => context,
|
||||||
TypeMapping::BindSelf { .. } => GenericContext::from_typevar_instances(
|
TypeMapping::BindSelf {
|
||||||
db,
|
binding_context, ..
|
||||||
context
|
} => context.remove_self(db, *binding_context),
|
||||||
.variables(db)
|
|
||||||
.filter(|var| !var.typevar(db).is_self(db)),
|
|
||||||
),
|
|
||||||
TypeMapping::ReplaceSelf { new_upper_bound } => GenericContext::from_typevar_instances(
|
TypeMapping::ReplaceSelf { new_upper_bound } => GenericContext::from_typevar_instances(
|
||||||
db,
|
db,
|
||||||
context.variables(db).map(|typevar| {
|
context.variables(db).map(|typevar| {
|
||||||
|
|
@ -9159,11 +9157,11 @@ impl<'db> InvalidTypeExpressionError<'db> {
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||||
enum InvalidTypeExpression<'db> {
|
enum InvalidTypeExpression<'db> {
|
||||||
/// Some types always require exactly one argument when used in a type expression
|
/// Some types always require exactly one argument when used in a type expression
|
||||||
RequiresOneArgument(Type<'db>),
|
RequiresOneArgument(SpecialFormType),
|
||||||
/// Some types always require at least one argument when used in a type expression
|
/// Some types always require at least one argument when used in a type expression
|
||||||
RequiresArguments(Type<'db>),
|
RequiresArguments(SpecialFormType),
|
||||||
/// Some types always require at least two arguments when used in a type expression
|
/// Some types always require at least two arguments when used in a type expression
|
||||||
RequiresTwoArguments(Type<'db>),
|
RequiresTwoArguments(SpecialFormType),
|
||||||
/// The `Protocol` class is invalid in type expressions
|
/// The `Protocol` class is invalid in type expressions
|
||||||
Protocol,
|
Protocol,
|
||||||
/// Same for `Generic`
|
/// Same for `Generic`
|
||||||
|
|
@ -9203,20 +9201,17 @@ impl<'db> InvalidTypeExpression<'db> {
|
||||||
impl std::fmt::Display for Display<'_> {
|
impl std::fmt::Display for Display<'_> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self.error {
|
match self.error {
|
||||||
InvalidTypeExpression::RequiresOneArgument(ty) => write!(
|
InvalidTypeExpression::RequiresOneArgument(special_form) => write!(
|
||||||
f,
|
f,
|
||||||
"`{ty}` requires exactly one argument when used in a type expression",
|
"`{special_form}` requires exactly one argument when used in a type expression",
|
||||||
ty = ty.display(self.db)
|
|
||||||
),
|
),
|
||||||
InvalidTypeExpression::RequiresArguments(ty) => write!(
|
InvalidTypeExpression::RequiresArguments(special_form) => write!(
|
||||||
f,
|
f,
|
||||||
"`{ty}` requires at least one argument when used in a type expression",
|
"`{special_form}` requires at least one argument when used in a type expression",
|
||||||
ty = ty.display(self.db)
|
|
||||||
),
|
),
|
||||||
InvalidTypeExpression::RequiresTwoArguments(ty) => write!(
|
InvalidTypeExpression::RequiresTwoArguments(special_form) => write!(
|
||||||
f,
|
f,
|
||||||
"`{ty}` requires at least two arguments when used in a type expression",
|
"`{special_form}` requires at least two arguments when used in a type expression",
|
||||||
ty = ty.display(self.db)
|
|
||||||
),
|
),
|
||||||
InvalidTypeExpression::Protocol => {
|
InvalidTypeExpression::Protocol => {
|
||||||
f.write_str("`typing.Protocol` is not allowed in type expressions")
|
f.write_str("`typing.Protocol` is not allowed in type expressions")
|
||||||
|
|
@ -9668,7 +9663,7 @@ impl<'db> TypeVarInstance<'db> {
|
||||||
TypeVarBoundOrConstraints::UpperBound(upper_bound.to_instance(db)?)
|
TypeVarBoundOrConstraints::UpperBound(upper_bound.to_instance(db)?)
|
||||||
}
|
}
|
||||||
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
||||||
TypeVarBoundOrConstraints::Constraints(constraints.to_instance(db)?.as_union()?)
|
TypeVarBoundOrConstraints::Constraints(constraints.to_instance(db)?)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let identity = TypeVarIdentity::new(
|
let identity = TypeVarIdentity::new(
|
||||||
|
|
@ -9703,6 +9698,7 @@ impl<'db> TypeVarInstance<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[salsa::tracked(
|
#[salsa::tracked(
|
||||||
|
cycle_fn=lazy_bound_or_constraints_cycle_recover,
|
||||||
cycle_initial=lazy_bound_or_constraints_cycle_initial,
|
cycle_initial=lazy_bound_or_constraints_cycle_initial,
|
||||||
heap_size=ruff_memory_usage::heap_size
|
heap_size=ruff_memory_usage::heap_size
|
||||||
)]
|
)]
|
||||||
|
|
@ -9732,28 +9728,37 @@ impl<'db> TypeVarInstance<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[salsa::tracked(
|
#[salsa::tracked(
|
||||||
|
cycle_fn=lazy_bound_or_constraints_cycle_recover,
|
||||||
cycle_initial=lazy_bound_or_constraints_cycle_initial,
|
cycle_initial=lazy_bound_or_constraints_cycle_initial,
|
||||||
heap_size=ruff_memory_usage::heap_size
|
heap_size=ruff_memory_usage::heap_size
|
||||||
)]
|
)]
|
||||||
fn lazy_constraints(self, db: &'db dyn Db) -> Option<TypeVarBoundOrConstraints<'db>> {
|
fn lazy_constraints(self, db: &'db dyn Db) -> Option<TypeVarBoundOrConstraints<'db>> {
|
||||||
let definition = self.definition(db)?;
|
let definition = self.definition(db)?;
|
||||||
let module = parsed_module(db, definition.file(db)).load(db);
|
let module = parsed_module(db, definition.file(db)).load(db);
|
||||||
let ty = match definition.kind(db) {
|
let constraints = match definition.kind(db) {
|
||||||
// PEP 695 typevar
|
// PEP 695 typevar
|
||||||
DefinitionKind::TypeVar(typevar) => {
|
DefinitionKind::TypeVar(typevar) => {
|
||||||
let typevar_node = typevar.node(&module);
|
let typevar_node = typevar.node(&module);
|
||||||
definition_expression_type(db, definition, typevar_node.bound.as_ref()?)
|
let bound =
|
||||||
.as_union()?
|
definition_expression_type(db, definition, typevar_node.bound.as_ref()?);
|
||||||
|
let constraints = if let Some(tuple) = bound
|
||||||
|
.as_nominal_instance()
|
||||||
|
.and_then(|instance| instance.tuple_spec(db))
|
||||||
|
{
|
||||||
|
if let Tuple::Fixed(tuple) = tuple.into_owned() {
|
||||||
|
tuple.owned_elements()
|
||||||
|
} else {
|
||||||
|
vec![Type::unknown()].into_boxed_slice()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
vec![Type::unknown()].into_boxed_slice()
|
||||||
|
};
|
||||||
|
TypeVarConstraints::new(db, constraints)
|
||||||
}
|
}
|
||||||
// legacy typevar
|
// legacy typevar
|
||||||
DefinitionKind::Assignment(assignment) => {
|
DefinitionKind::Assignment(assignment) => {
|
||||||
let call_expr = assignment.value(&module).as_call_expr()?;
|
let call_expr = assignment.value(&module).as_call_expr()?;
|
||||||
// We don't use `UnionType::from_elements` or `UnionBuilder` here,
|
TypeVarConstraints::new(
|
||||||
// because we don't want to simplify the list of constraints as we would with
|
|
||||||
// an actual union type.
|
|
||||||
// TODO: We probably shouldn't use `UnionType` to store these at all? TypeVar
|
|
||||||
// constraints are not a union.
|
|
||||||
UnionType::new(
|
|
||||||
db,
|
db,
|
||||||
call_expr
|
call_expr
|
||||||
.arguments
|
.arguments
|
||||||
|
|
@ -9767,7 +9772,7 @@ impl<'db> TypeVarInstance<'db> {
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if ty
|
if constraints
|
||||||
.elements(db)
|
.elements(db)
|
||||||
.iter()
|
.iter()
|
||||||
.any(|ty| self.type_is_self_referential(db, *ty))
|
.any(|ty| self.type_is_self_referential(db, *ty))
|
||||||
|
|
@ -9775,7 +9780,7 @@ impl<'db> TypeVarInstance<'db> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(TypeVarBoundOrConstraints::Constraints(ty))
|
Some(TypeVarBoundOrConstraints::Constraints(constraints))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[salsa::tracked(cycle_fn=lazy_default_cycle_recover, cycle_initial=lazy_default_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
|
#[salsa::tracked(cycle_fn=lazy_default_cycle_recover, cycle_initial=lazy_default_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
|
||||||
|
|
@ -9833,7 +9838,23 @@ fn lazy_bound_or_constraints_cycle_initial<'db>(
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::ref_option)]
|
#[expect(clippy::ref_option)]
|
||||||
|
fn lazy_bound_or_constraints_cycle_recover<'db>(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
cycle: &salsa::Cycle,
|
||||||
|
previous: &Option<TypeVarBoundOrConstraints<'db>>,
|
||||||
|
current: Option<TypeVarBoundOrConstraints<'db>>,
|
||||||
|
_typevar: TypeVarInstance<'db>,
|
||||||
|
) -> Option<TypeVarBoundOrConstraints<'db>> {
|
||||||
|
// Normalize the bounds/constraints to ensure cycle convergence.
|
||||||
|
match (previous, current) {
|
||||||
|
(Some(prev), Some(current)) => Some(current.cycle_normalized(db, *prev, cycle)),
|
||||||
|
(None, Some(current)) => Some(current.recursive_type_normalized(db, cycle)),
|
||||||
|
(_, None) => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[expect(clippy::ref_option)]
|
||||||
fn lazy_default_cycle_recover<'db>(
|
fn lazy_default_cycle_recover<'db>(
|
||||||
db: &'db dyn Db,
|
db: &'db dyn Db,
|
||||||
cycle: &salsa::Cycle,
|
cycle: &salsa::Cycle,
|
||||||
|
|
@ -9841,6 +9862,7 @@ fn lazy_default_cycle_recover<'db>(
|
||||||
default: Option<Type<'db>>,
|
default: Option<Type<'db>>,
|
||||||
_typevar: TypeVarInstance<'db>,
|
_typevar: TypeVarInstance<'db>,
|
||||||
) -> Option<Type<'db>> {
|
) -> Option<Type<'db>> {
|
||||||
|
// Normalize the default to ensure cycle convergence.
|
||||||
match (previous_default, default) {
|
match (previous_default, default) {
|
||||||
(Some(prev), Some(default)) => Some(default.cycle_normalized(db, *prev, cycle)),
|
(Some(prev), Some(default)) => Some(default.cycle_normalized(db, *prev, cycle)),
|
||||||
(None, Some(default)) => Some(default.recursive_type_normalized(db, cycle)),
|
(None, Some(default)) => Some(default.recursive_type_normalized(db, cycle)),
|
||||||
|
|
@ -10168,10 +10190,133 @@ impl<'db> From<TypeVarBoundOrConstraints<'db>> for TypeVarBoundOrConstraintsEval
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Type variable constraints (e.g. `T: (int, str)`).
|
||||||
|
/// This is structurally identical to [`UnionType`], except that it does not perform simplification and preserves the element types.
|
||||||
|
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
|
||||||
|
pub struct TypeVarConstraints<'db> {
|
||||||
|
#[returns(ref)]
|
||||||
|
elements: Box<[Type<'db>]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl get_size2::GetSize for TypeVarConstraints<'_> {}
|
||||||
|
|
||||||
|
fn walk_type_var_constraints<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
constraints: TypeVarConstraints<'db>,
|
||||||
|
visitor: &V,
|
||||||
|
) {
|
||||||
|
for ty in constraints.elements(db) {
|
||||||
|
visitor.visit_type(db, *ty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'db> TypeVarConstraints<'db> {
|
||||||
|
fn as_type(self, db: &'db dyn Db) -> Type<'db> {
|
||||||
|
let mut builder = UnionBuilder::new(db);
|
||||||
|
for ty in self.elements(db) {
|
||||||
|
builder = builder.add(*ty);
|
||||||
|
}
|
||||||
|
builder.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_instance(self, db: &'db dyn Db) -> Option<TypeVarConstraints<'db>> {
|
||||||
|
let mut instance_elements = Vec::new();
|
||||||
|
for ty in self.elements(db) {
|
||||||
|
instance_elements.push(ty.to_instance(db)?);
|
||||||
|
}
|
||||||
|
Some(TypeVarConstraints::new(
|
||||||
|
db,
|
||||||
|
instance_elements.into_boxed_slice(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map(self, db: &'db dyn Db, transform_fn: impl FnMut(&Type<'db>) -> Type<'db>) -> Self {
|
||||||
|
let mapped = self
|
||||||
|
.elements(db)
|
||||||
|
.iter()
|
||||||
|
.map(transform_fn)
|
||||||
|
.collect::<Box<_>>();
|
||||||
|
TypeVarConstraints::new(db, mapped)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn map_with_boundness_and_qualifiers(
|
||||||
|
self,
|
||||||
|
db: &'db dyn Db,
|
||||||
|
mut transform_fn: impl FnMut(&Type<'db>) -> PlaceAndQualifiers<'db>,
|
||||||
|
) -> PlaceAndQualifiers<'db> {
|
||||||
|
let mut builder = UnionBuilder::new(db);
|
||||||
|
let mut qualifiers = TypeQualifiers::empty();
|
||||||
|
|
||||||
|
let mut all_unbound = true;
|
||||||
|
let mut possibly_unbound = false;
|
||||||
|
let mut origin = TypeOrigin::Declared;
|
||||||
|
for ty in self.elements(db) {
|
||||||
|
let PlaceAndQualifiers {
|
||||||
|
place: ty_member,
|
||||||
|
qualifiers: new_qualifiers,
|
||||||
|
} = transform_fn(ty);
|
||||||
|
qualifiers |= new_qualifiers;
|
||||||
|
match ty_member {
|
||||||
|
Place::Undefined => {
|
||||||
|
possibly_unbound = true;
|
||||||
|
}
|
||||||
|
Place::Defined(ty_member, member_origin, member_boundness) => {
|
||||||
|
origin = origin.merge(member_origin);
|
||||||
|
if member_boundness == Definedness::PossiblyUndefined {
|
||||||
|
possibly_unbound = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
all_unbound = false;
|
||||||
|
builder = builder.add(ty_member);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PlaceAndQualifiers {
|
||||||
|
place: if all_unbound {
|
||||||
|
Place::Undefined
|
||||||
|
} else {
|
||||||
|
Place::Defined(
|
||||||
|
builder.build(),
|
||||||
|
origin,
|
||||||
|
if possibly_unbound {
|
||||||
|
Definedness::PossiblyUndefined
|
||||||
|
} else {
|
||||||
|
Definedness::AlwaysDefined
|
||||||
|
},
|
||||||
|
)
|
||||||
|
},
|
||||||
|
qualifiers,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
|
||||||
|
let normalized = self
|
||||||
|
.elements(db)
|
||||||
|
.iter()
|
||||||
|
.map(|ty| ty.normalized_impl(db, visitor))
|
||||||
|
.collect::<Box<_>>();
|
||||||
|
TypeVarConstraints::new(db, normalized)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn materialize_impl(
|
||||||
|
self,
|
||||||
|
db: &'db dyn Db,
|
||||||
|
materialization_kind: MaterializationKind,
|
||||||
|
visitor: &ApplyTypeMappingVisitor<'db>,
|
||||||
|
) -> Self {
|
||||||
|
let materialized = self
|
||||||
|
.elements(db)
|
||||||
|
.iter()
|
||||||
|
.map(|ty| ty.materialize(db, materialization_kind, visitor))
|
||||||
|
.collect::<Box<_>>();
|
||||||
|
TypeVarConstraints::new(db, materialized)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, salsa::Update, get_size2::GetSize)]
|
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, salsa::Update, get_size2::GetSize)]
|
||||||
pub enum TypeVarBoundOrConstraints<'db> {
|
pub enum TypeVarBoundOrConstraints<'db> {
|
||||||
UpperBound(Type<'db>),
|
UpperBound(Type<'db>),
|
||||||
Constraints(UnionType<'db>),
|
Constraints(TypeVarConstraints<'db>),
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walk_type_var_bounds<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
|
fn walk_type_var_bounds<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
|
||||||
|
|
@ -10182,7 +10327,7 @@ fn walk_type_var_bounds<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
|
||||||
match bounds {
|
match bounds {
|
||||||
TypeVarBoundOrConstraints::UpperBound(bound) => visitor.visit_type(db, bound),
|
TypeVarBoundOrConstraints::UpperBound(bound) => visitor.visit_type(db, bound),
|
||||||
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
||||||
visitor.visit_union_type(db, constraints);
|
walk_type_var_constraints(db, constraints, visitor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -10194,18 +10339,61 @@ impl<'db> TypeVarBoundOrConstraints<'db> {
|
||||||
TypeVarBoundOrConstraints::UpperBound(bound.normalized_impl(db, visitor))
|
TypeVarBoundOrConstraints::UpperBound(bound.normalized_impl(db, visitor))
|
||||||
}
|
}
|
||||||
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
||||||
// Constraints are a non-normalized union by design (it's not really a union at
|
TypeVarBoundOrConstraints::Constraints(constraints.normalized_impl(db, visitor))
|
||||||
// all, we are just using a union to store the types). Normalize the types but not
|
}
|
||||||
// the containing union.
|
}
|
||||||
TypeVarBoundOrConstraints::Constraints(UnionType::new(
|
}
|
||||||
|
|
||||||
|
/// Normalize for cycle recovery by combining with the previous value and
|
||||||
|
/// removing divergent types introduced by the cycle.
|
||||||
|
///
|
||||||
|
/// See [`Type::cycle_normalized`] for more details on how this works.
|
||||||
|
fn cycle_normalized(self, db: &'db dyn Db, previous: Self, cycle: &salsa::Cycle) -> Self {
|
||||||
|
match (self, previous) {
|
||||||
|
(
|
||||||
|
TypeVarBoundOrConstraints::UpperBound(bound),
|
||||||
|
TypeVarBoundOrConstraints::UpperBound(prev_bound),
|
||||||
|
) => {
|
||||||
|
TypeVarBoundOrConstraints::UpperBound(bound.cycle_normalized(db, prev_bound, cycle))
|
||||||
|
}
|
||||||
|
(
|
||||||
|
TypeVarBoundOrConstraints::Constraints(constraints),
|
||||||
|
TypeVarBoundOrConstraints::Constraints(prev_constraints),
|
||||||
|
) => {
|
||||||
|
// Normalize each constraint with its corresponding previous constraint
|
||||||
|
let current_elements = constraints.elements(db);
|
||||||
|
let prev_elements = prev_constraints.elements(db);
|
||||||
|
TypeVarBoundOrConstraints::Constraints(TypeVarConstraints::new(
|
||||||
db,
|
db,
|
||||||
constraints
|
current_elements
|
||||||
.elements(db)
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ty| ty.normalized_impl(db, visitor))
|
.zip(prev_elements.iter())
|
||||||
|
.map(|(ty, prev_ty)| ty.cycle_normalized(db, *prev_ty, cycle))
|
||||||
.collect::<Box<_>>(),
|
.collect::<Box<_>>(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
// The choice of whether it's an upper bound or constraints is purely syntactic and
|
||||||
|
// thus can never change in a cycle: `parsed_module` does not participate in cycles,
|
||||||
|
// the AST will never change from one iteration to the next.
|
||||||
|
_ => unreachable!(
|
||||||
|
"TypeVar switched from bound to constraints (or vice versa) in fixpoint iteration"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normalize recursive types for cycle recovery when there's no previous value.
|
||||||
|
///
|
||||||
|
/// See [`Type::recursive_type_normalized`] for more details.
|
||||||
|
fn recursive_type_normalized(self, db: &'db dyn Db, cycle: &salsa::Cycle) -> Self {
|
||||||
|
match self {
|
||||||
|
TypeVarBoundOrConstraints::UpperBound(bound) => {
|
||||||
|
TypeVarBoundOrConstraints::UpperBound(bound.recursive_type_normalized(db, cycle))
|
||||||
|
}
|
||||||
|
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
||||||
|
TypeVarBoundOrConstraints::Constraints(
|
||||||
|
constraints.map(db, |ty| ty.recursive_type_normalized(db, cycle)),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -10220,13 +10408,10 @@ impl<'db> TypeVarBoundOrConstraints<'db> {
|
||||||
bound.materialize(db, materialization_kind, visitor),
|
bound.materialize(db, materialization_kind, visitor),
|
||||||
),
|
),
|
||||||
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
||||||
TypeVarBoundOrConstraints::Constraints(UnionType::new(
|
TypeVarBoundOrConstraints::Constraints(constraints.materialize_impl(
|
||||||
db,
|
db,
|
||||||
constraints
|
materialization_kind,
|
||||||
.elements(db)
|
visitor,
|
||||||
.iter()
|
|
||||||
.map(|ty| ty.materialize(db, materialization_kind, visitor))
|
|
||||||
.collect::<Box<_>>(),
|
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -12797,7 +12982,7 @@ impl<'db> ModuleLiteralType<'db> {
|
||||||
let relative_submodule_name = ModuleName::new(name)?;
|
let relative_submodule_name = ModuleName::new(name)?;
|
||||||
let mut absolute_submodule_name = self.module(db).name(db).clone();
|
let mut absolute_submodule_name = self.module(db).name(db).clone();
|
||||||
absolute_submodule_name.extend(&relative_submodule_name);
|
absolute_submodule_name.extend(&relative_submodule_name);
|
||||||
let submodule = resolve_module(db, &absolute_submodule_name)?;
|
let submodule = resolve_module(db, importing_file, &absolute_submodule_name)?;
|
||||||
Some(Type::module_literal(db, importing_file, submodule))
|
Some(Type::module_literal(db, importing_file, submodule))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -13181,6 +13366,9 @@ pub struct UnionType<'db> {
|
||||||
/// The union type includes values in any of these types.
|
/// The union type includes values in any of these types.
|
||||||
#[returns(deref)]
|
#[returns(deref)]
|
||||||
pub elements: Box<[Type<'db>]>,
|
pub elements: Box<[Type<'db>]>,
|
||||||
|
/// Whether the value pointed to by this type is recursively defined.
|
||||||
|
/// If `Yes`, union literal widening is performed early.
|
||||||
|
recursively_defined: RecursivelyDefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn walk_union<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
|
pub(crate) fn walk_union<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
|
||||||
|
|
@ -13265,7 +13453,14 @@ impl<'db> UnionType<'db> {
|
||||||
db: &'db dyn Db,
|
db: &'db dyn Db,
|
||||||
transform_fn: impl FnMut(&Type<'db>) -> Type<'db>,
|
transform_fn: impl FnMut(&Type<'db>) -> Type<'db>,
|
||||||
) -> Type<'db> {
|
) -> Type<'db> {
|
||||||
Self::from_elements(db, self.elements(db).iter().map(transform_fn))
|
self.elements(db)
|
||||||
|
.iter()
|
||||||
|
.map(transform_fn)
|
||||||
|
.fold(UnionBuilder::new(db), |builder, element| {
|
||||||
|
builder.add(element)
|
||||||
|
})
|
||||||
|
.recursively_defined(self.recursively_defined(db))
|
||||||
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A fallible version of [`UnionType::map`].
|
/// A fallible version of [`UnionType::map`].
|
||||||
|
|
@ -13280,7 +13475,12 @@ impl<'db> UnionType<'db> {
|
||||||
db: &'db dyn Db,
|
db: &'db dyn Db,
|
||||||
transform_fn: impl FnMut(&Type<'db>) -> Option<Type<'db>>,
|
transform_fn: impl FnMut(&Type<'db>) -> Option<Type<'db>>,
|
||||||
) -> Option<Type<'db>> {
|
) -> Option<Type<'db>> {
|
||||||
Self::try_from_elements(db, self.elements(db).iter().map(transform_fn))
|
let mut builder = UnionBuilder::new(db);
|
||||||
|
for element in self.elements(db).iter().map(transform_fn) {
|
||||||
|
builder = builder.add(element?);
|
||||||
|
}
|
||||||
|
builder = builder.recursively_defined(self.recursively_defined(db));
|
||||||
|
Some(builder.build())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn to_instance(self, db: &'db dyn Db) -> Option<Type<'db>> {
|
pub(crate) fn to_instance(self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||||
|
|
@ -13292,7 +13492,14 @@ impl<'db> UnionType<'db> {
|
||||||
db: &'db dyn Db,
|
db: &'db dyn Db,
|
||||||
mut f: impl FnMut(&Type<'db>) -> bool,
|
mut f: impl FnMut(&Type<'db>) -> bool,
|
||||||
) -> Type<'db> {
|
) -> Type<'db> {
|
||||||
Self::from_elements(db, self.elements(db).iter().filter(|ty| f(ty)))
|
self.elements(db)
|
||||||
|
.iter()
|
||||||
|
.filter(|ty| f(ty))
|
||||||
|
.fold(UnionBuilder::new(db), |builder, element| {
|
||||||
|
builder.add(*element)
|
||||||
|
})
|
||||||
|
.recursively_defined(self.recursively_defined(db))
|
||||||
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn map_with_boundness(
|
pub(crate) fn map_with_boundness(
|
||||||
|
|
@ -13327,7 +13534,9 @@ impl<'db> UnionType<'db> {
|
||||||
Place::Undefined
|
Place::Undefined
|
||||||
} else {
|
} else {
|
||||||
Place::Defined(
|
Place::Defined(
|
||||||
builder.build(),
|
builder
|
||||||
|
.recursively_defined(self.recursively_defined(db))
|
||||||
|
.build(),
|
||||||
origin,
|
origin,
|
||||||
if possibly_unbound {
|
if possibly_unbound {
|
||||||
Definedness::PossiblyUndefined
|
Definedness::PossiblyUndefined
|
||||||
|
|
@ -13375,7 +13584,9 @@ impl<'db> UnionType<'db> {
|
||||||
Place::Undefined
|
Place::Undefined
|
||||||
} else {
|
} else {
|
||||||
Place::Defined(
|
Place::Defined(
|
||||||
builder.build(),
|
builder
|
||||||
|
.recursively_defined(self.recursively_defined(db))
|
||||||
|
.build(),
|
||||||
origin,
|
origin,
|
||||||
if possibly_unbound {
|
if possibly_unbound {
|
||||||
Definedness::PossiblyUndefined
|
Definedness::PossiblyUndefined
|
||||||
|
|
@ -13410,6 +13621,7 @@ impl<'db> UnionType<'db> {
|
||||||
.unpack_aliases(true),
|
.unpack_aliases(true),
|
||||||
UnionBuilder::add,
|
UnionBuilder::add,
|
||||||
)
|
)
|
||||||
|
.recursively_defined(self.recursively_defined(db))
|
||||||
.build()
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -13422,7 +13634,8 @@ impl<'db> UnionType<'db> {
|
||||||
let mut builder = UnionBuilder::new(db)
|
let mut builder = UnionBuilder::new(db)
|
||||||
.order_elements(false)
|
.order_elements(false)
|
||||||
.unpack_aliases(false)
|
.unpack_aliases(false)
|
||||||
.cycle_recovery(true);
|
.cycle_recovery(true)
|
||||||
|
.recursively_defined(self.recursively_defined(db));
|
||||||
let mut empty = true;
|
let mut empty = true;
|
||||||
for ty in self.elements(db) {
|
for ty in self.elements(db) {
|
||||||
if nested {
|
if nested {
|
||||||
|
|
@ -13437,6 +13650,7 @@ impl<'db> UnionType<'db> {
|
||||||
// `Divergent` in a union type does not mean true divergence, so we skip it if not nested.
|
// `Divergent` in a union type does not mean true divergence, so we skip it if not nested.
|
||||||
// e.g. T | Divergent == T | (T | (T | (T | ...))) == T
|
// e.g. T | Divergent == T | (T | (T | (T | ...))) == T
|
||||||
if ty == &div {
|
if ty == &div {
|
||||||
|
builder = builder.recursively_defined(RecursivelyDefined::Yes);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
builder = builder.add(
|
builder = builder.add(
|
||||||
|
|
|
||||||
|
|
@ -157,7 +157,7 @@ impl<'db> BoundSuperError<'db> {
|
||||||
.map(|c| c.display(db))
|
.map(|c| c.display(db))
|
||||||
.join(", ")
|
.join(", ")
|
||||||
));
|
));
|
||||||
Type::Union(constraints)
|
constraints.as_type(db)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
diagnostic.info(format_args!(
|
diagnostic.info(format_args!(
|
||||||
|
|
@ -374,7 +374,7 @@ impl<'db> BoundSuperType<'db> {
|
||||||
delegate_with_error_mapped(bound, Some(type_var))
|
delegate_with_error_mapped(bound, Some(type_var))
|
||||||
}
|
}
|
||||||
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
||||||
delegate_with_error_mapped(Type::Union(constraints), Some(type_var))
|
delegate_with_error_mapped(constraints.as_type(db), Some(type_var))
|
||||||
}
|
}
|
||||||
None => delegate_with_error_mapped(Type::object(), Some(type_var)),
|
None => delegate_with_error_mapped(Type::object(), Some(type_var)),
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -202,12 +202,30 @@ enum ReduceResult<'db> {
|
||||||
Type(Type<'db>),
|
Type(Type<'db>),
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO increase this once we extend `UnionElement` throughout all union/intersection
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||||
// representations, so that we can make large unions of literals fast in all operations.
|
pub enum RecursivelyDefined {
|
||||||
//
|
Yes,
|
||||||
// For now (until we solve https://github.com/astral-sh/ty/issues/957), keep this number
|
No,
|
||||||
// below 200, which is the salsa fixpoint iteration limit.
|
}
|
||||||
const MAX_UNION_LITERALS: usize = 190;
|
|
||||||
|
impl RecursivelyDefined {
|
||||||
|
const fn is_yes(self) -> bool {
|
||||||
|
matches!(self, RecursivelyDefined::Yes)
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn or(self, other: RecursivelyDefined) -> RecursivelyDefined {
|
||||||
|
match (self, other) {
|
||||||
|
(RecursivelyDefined::Yes, _) | (_, RecursivelyDefined::Yes) => RecursivelyDefined::Yes,
|
||||||
|
_ => RecursivelyDefined::No,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If the value is defined recursively, widening is performed from fewer literal elements, resulting in faster convergence of the fixed-point iteration.
|
||||||
|
const MAX_RECURSIVE_UNION_LITERALS: usize = 10;
|
||||||
|
/// If the value is defined non-recursively, the fixed-point iteration will converge in one go,
|
||||||
|
/// so in principle we can have as many literal elements as we want, but to avoid unintended huge computational loads, we limit it to 256.
|
||||||
|
const MAX_NON_RECURSIVE_UNION_LITERALS: usize = 256;
|
||||||
|
|
||||||
pub(crate) struct UnionBuilder<'db> {
|
pub(crate) struct UnionBuilder<'db> {
|
||||||
elements: Vec<UnionElement<'db>>,
|
elements: Vec<UnionElement<'db>>,
|
||||||
|
|
@ -217,6 +235,7 @@ pub(crate) struct UnionBuilder<'db> {
|
||||||
// This is enabled when joining types in a `cycle_recovery` function.
|
// This is enabled when joining types in a `cycle_recovery` function.
|
||||||
// Since a cycle cannot be created within a `cycle_recovery` function, execution of `is_redundant_with` is skipped.
|
// Since a cycle cannot be created within a `cycle_recovery` function, execution of `is_redundant_with` is skipped.
|
||||||
cycle_recovery: bool,
|
cycle_recovery: bool,
|
||||||
|
recursively_defined: RecursivelyDefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> UnionBuilder<'db> {
|
impl<'db> UnionBuilder<'db> {
|
||||||
|
|
@ -227,6 +246,7 @@ impl<'db> UnionBuilder<'db> {
|
||||||
unpack_aliases: true,
|
unpack_aliases: true,
|
||||||
order_elements: false,
|
order_elements: false,
|
||||||
cycle_recovery: false,
|
cycle_recovery: false,
|
||||||
|
recursively_defined: RecursivelyDefined::No,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -248,6 +268,11 @@ impl<'db> UnionBuilder<'db> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn recursively_defined(mut self, val: RecursivelyDefined) -> Self {
|
||||||
|
self.recursively_defined = val;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn is_empty(&self) -> bool {
|
pub(crate) fn is_empty(&self) -> bool {
|
||||||
self.elements.is_empty()
|
self.elements.is_empty()
|
||||||
}
|
}
|
||||||
|
|
@ -258,6 +283,27 @@ impl<'db> UnionBuilder<'db> {
|
||||||
self.elements.push(UnionElement::Type(Type::object()));
|
self.elements.push(UnionElement::Type(Type::object()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn widen_literal_types(&mut self, seen_aliases: &mut Vec<Type<'db>>) {
|
||||||
|
let mut replace_with = vec![];
|
||||||
|
for elem in &self.elements {
|
||||||
|
match elem {
|
||||||
|
UnionElement::IntLiterals(_) => {
|
||||||
|
replace_with.push(KnownClass::Int.to_instance(self.db));
|
||||||
|
}
|
||||||
|
UnionElement::StringLiterals(_) => {
|
||||||
|
replace_with.push(KnownClass::Str.to_instance(self.db));
|
||||||
|
}
|
||||||
|
UnionElement::BytesLiterals(_) => {
|
||||||
|
replace_with.push(KnownClass::Bytes.to_instance(self.db));
|
||||||
|
}
|
||||||
|
UnionElement::Type(_) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for ty in replace_with {
|
||||||
|
self.add_in_place_impl(ty, seen_aliases);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Adds a type to this union.
|
/// Adds a type to this union.
|
||||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||||
self.add_in_place(ty);
|
self.add_in_place(ty);
|
||||||
|
|
@ -270,6 +316,15 @@ impl<'db> UnionBuilder<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add_in_place_impl(&mut self, ty: Type<'db>, seen_aliases: &mut Vec<Type<'db>>) {
|
pub(crate) fn add_in_place_impl(&mut self, ty: Type<'db>, seen_aliases: &mut Vec<Type<'db>>) {
|
||||||
|
let cycle_recovery = self.cycle_recovery;
|
||||||
|
let should_widen = |literals, recursively_defined: RecursivelyDefined| {
|
||||||
|
if recursively_defined.is_yes() && cycle_recovery {
|
||||||
|
literals >= MAX_RECURSIVE_UNION_LITERALS
|
||||||
|
} else {
|
||||||
|
literals >= MAX_NON_RECURSIVE_UNION_LITERALS
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
match ty {
|
match ty {
|
||||||
Type::Union(union) => {
|
Type::Union(union) => {
|
||||||
let new_elements = union.elements(self.db);
|
let new_elements = union.elements(self.db);
|
||||||
|
|
@ -277,6 +332,20 @@ impl<'db> UnionBuilder<'db> {
|
||||||
for element in new_elements {
|
for element in new_elements {
|
||||||
self.add_in_place_impl(*element, seen_aliases);
|
self.add_in_place_impl(*element, seen_aliases);
|
||||||
}
|
}
|
||||||
|
self.recursively_defined = self
|
||||||
|
.recursively_defined
|
||||||
|
.or(union.recursively_defined(self.db));
|
||||||
|
if self.cycle_recovery && self.recursively_defined.is_yes() {
|
||||||
|
let literals = self.elements.iter().fold(0, |acc, elem| match elem {
|
||||||
|
UnionElement::IntLiterals(literals) => acc + literals.len(),
|
||||||
|
UnionElement::StringLiterals(literals) => acc + literals.len(),
|
||||||
|
UnionElement::BytesLiterals(literals) => acc + literals.len(),
|
||||||
|
UnionElement::Type(_) => acc,
|
||||||
|
});
|
||||||
|
if should_widen(literals, self.recursively_defined) {
|
||||||
|
self.widen_literal_types(seen_aliases);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Adding `Never` to a union is a no-op.
|
// Adding `Never` to a union is a no-op.
|
||||||
Type::Never => {}
|
Type::Never => {}
|
||||||
|
|
@ -300,7 +369,7 @@ impl<'db> UnionBuilder<'db> {
|
||||||
for (index, element) in self.elements.iter_mut().enumerate() {
|
for (index, element) in self.elements.iter_mut().enumerate() {
|
||||||
match element {
|
match element {
|
||||||
UnionElement::StringLiterals(literals) => {
|
UnionElement::StringLiterals(literals) => {
|
||||||
if literals.len() >= MAX_UNION_LITERALS {
|
if should_widen(literals.len(), self.recursively_defined) {
|
||||||
let replace_with = KnownClass::Str.to_instance(self.db);
|
let replace_with = KnownClass::Str.to_instance(self.db);
|
||||||
self.add_in_place_impl(replace_with, seen_aliases);
|
self.add_in_place_impl(replace_with, seen_aliases);
|
||||||
return;
|
return;
|
||||||
|
|
@ -345,7 +414,7 @@ impl<'db> UnionBuilder<'db> {
|
||||||
for (index, element) in self.elements.iter_mut().enumerate() {
|
for (index, element) in self.elements.iter_mut().enumerate() {
|
||||||
match element {
|
match element {
|
||||||
UnionElement::BytesLiterals(literals) => {
|
UnionElement::BytesLiterals(literals) => {
|
||||||
if literals.len() >= MAX_UNION_LITERALS {
|
if should_widen(literals.len(), self.recursively_defined) {
|
||||||
let replace_with = KnownClass::Bytes.to_instance(self.db);
|
let replace_with = KnownClass::Bytes.to_instance(self.db);
|
||||||
self.add_in_place_impl(replace_with, seen_aliases);
|
self.add_in_place_impl(replace_with, seen_aliases);
|
||||||
return;
|
return;
|
||||||
|
|
@ -390,7 +459,7 @@ impl<'db> UnionBuilder<'db> {
|
||||||
for (index, element) in self.elements.iter_mut().enumerate() {
|
for (index, element) in self.elements.iter_mut().enumerate() {
|
||||||
match element {
|
match element {
|
||||||
UnionElement::IntLiterals(literals) => {
|
UnionElement::IntLiterals(literals) => {
|
||||||
if literals.len() >= MAX_UNION_LITERALS {
|
if should_widen(literals.len(), self.recursively_defined) {
|
||||||
let replace_with = KnownClass::Int.to_instance(self.db);
|
let replace_with = KnownClass::Int.to_instance(self.db);
|
||||||
self.add_in_place_impl(replace_with, seen_aliases);
|
self.add_in_place_impl(replace_with, seen_aliases);
|
||||||
return;
|
return;
|
||||||
|
|
@ -585,6 +654,7 @@ impl<'db> UnionBuilder<'db> {
|
||||||
_ => Some(Type::Union(UnionType::new(
|
_ => Some(Type::Union(UnionType::new(
|
||||||
self.db,
|
self.db,
|
||||||
types.into_boxed_slice(),
|
types.into_boxed_slice(),
|
||||||
|
self.recursively_defined,
|
||||||
))),
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -696,6 +766,7 @@ impl<'db> IntersectionBuilder<'db> {
|
||||||
enum_member_literals(db, instance.class_literal(db), None)
|
enum_member_literals(db, instance.class_literal(db), None)
|
||||||
.expect("Calling `enum_member_literals` on an enum class")
|
.expect("Calling `enum_member_literals` on an enum class")
|
||||||
.collect::<Box<[_]>>(),
|
.collect::<Box<[_]>>(),
|
||||||
|
RecursivelyDefined::No,
|
||||||
)),
|
)),
|
||||||
seen_aliases,
|
seen_aliases,
|
||||||
)
|
)
|
||||||
|
|
@ -1184,7 +1255,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||||
speculative = speculative.add_positive(bound);
|
speculative = speculative.add_positive(bound);
|
||||||
}
|
}
|
||||||
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
||||||
speculative = speculative.add_positive(Type::Union(constraints));
|
speculative = speculative.add_positive(constraints.as_type(db));
|
||||||
}
|
}
|
||||||
// TypeVars without a bound or constraint implicitly have `object` as their
|
// TypeVars without a bound or constraint implicitly have `object` as their
|
||||||
// upper bound, and it is always a no-op to add `object` to an intersection.
|
// upper bound, and it is always a no-op to add `object` to an intersection.
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,9 @@ use crate::types::function::{
|
||||||
use crate::types::generics::{
|
use crate::types::generics::{
|
||||||
InferableTypeVars, Specialization, SpecializationBuilder, SpecializationError,
|
InferableTypeVars, Specialization, SpecializationBuilder, SpecializationError,
|
||||||
};
|
};
|
||||||
use crate::types::signatures::{Parameter, ParameterForm, ParameterKind, Parameters};
|
use crate::types::signatures::{
|
||||||
|
CallableSignature, Parameter, ParameterForm, ParameterKind, Parameters,
|
||||||
|
};
|
||||||
use crate::types::tuple::{TupleLength, TupleType};
|
use crate::types::tuple::{TupleLength, TupleType};
|
||||||
use crate::types::{
|
use crate::types::{
|
||||||
BoundMethodType, BoundTypeVarIdentity, ClassLiteral, DATACLASS_FLAGS, DataclassFlags,
|
BoundMethodType, BoundTypeVarIdentity, ClassLiteral, DATACLASS_FLAGS, DataclassFlags,
|
||||||
|
|
@ -788,51 +790,67 @@ impl<'db> Bindings<'db> {
|
||||||
))
|
))
|
||||||
};
|
};
|
||||||
|
|
||||||
let function_generic_context = |function: FunctionType<'db>| {
|
let signature_generic_context =
|
||||||
let union = UnionType::from_elements(
|
|signature: &CallableSignature<'db>| {
|
||||||
|
UnionType::try_from_elements(
|
||||||
db,
|
db,
|
||||||
function
|
signature.overloads.iter().map(|signature| {
|
||||||
.signature(db)
|
signature.generic_context.map(wrap_generic_context)
|
||||||
.overloads
|
}),
|
||||||
.iter()
|
)
|
||||||
.filter_map(|signature| signature.generic_context)
|
|
||||||
.map(wrap_generic_context),
|
|
||||||
);
|
|
||||||
if union.is_never() {
|
|
||||||
Type::none(db)
|
|
||||||
} else {
|
|
||||||
union
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: Handle generic functions, and unions/intersections of
|
let generic_context_for_simple_type = |ty: Type<'db>| match ty {
|
||||||
// generic types
|
Type::ClassLiteral(class) => {
|
||||||
overload.set_return_type(match ty {
|
class.generic_context(db).map(wrap_generic_context)
|
||||||
Type::ClassLiteral(class) => class
|
|
||||||
.generic_context(db)
|
|
||||||
.map(wrap_generic_context)
|
|
||||||
.unwrap_or_else(|| Type::none(db)),
|
|
||||||
|
|
||||||
Type::FunctionLiteral(function) => {
|
|
||||||
function_generic_context(*function)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Type::BoundMethod(bound_method) => {
|
Type::FunctionLiteral(function) => {
|
||||||
function_generic_context(bound_method.function(db))
|
signature_generic_context(function.signature(db))
|
||||||
|
}
|
||||||
|
|
||||||
|
Type::BoundMethod(bound_method) => signature_generic_context(
|
||||||
|
bound_method.function(db).signature(db),
|
||||||
|
),
|
||||||
|
|
||||||
|
Type::Callable(callable) => {
|
||||||
|
signature_generic_context(callable.signatures(db))
|
||||||
}
|
}
|
||||||
|
|
||||||
Type::KnownInstance(KnownInstanceType::TypeAliasType(
|
Type::KnownInstance(KnownInstanceType::TypeAliasType(
|
||||||
TypeAliasType::PEP695(alias),
|
TypeAliasType::PEP695(alias),
|
||||||
)) => alias
|
)) => alias.generic_context(db).map(wrap_generic_context),
|
||||||
.generic_context(db)
|
|
||||||
.map(wrap_generic_context)
|
|
||||||
.unwrap_or_else(|| Type::none(db)),
|
|
||||||
|
|
||||||
_ => Type::none(db),
|
_ => None,
|
||||||
});
|
};
|
||||||
|
|
||||||
|
let generic_context = match ty {
|
||||||
|
Type::Union(union_type) => UnionType::try_from_elements(
|
||||||
|
db,
|
||||||
|
union_type
|
||||||
|
.elements(db)
|
||||||
|
.iter()
|
||||||
|
.map(|ty| generic_context_for_simple_type(*ty)),
|
||||||
|
),
|
||||||
|
_ => generic_context_for_simple_type(*ty),
|
||||||
|
};
|
||||||
|
|
||||||
|
overload.set_return_type(
|
||||||
|
generic_context.unwrap_or_else(|| Type::none(db)),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Some(KnownFunction::IntoCallable) => {
|
||||||
|
let [Some(ty)] = overload.parameter_types() else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let Some(callables) = ty.try_upcast_to_callable(db) else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
overload.set_return_type(callables.into_type(db));
|
||||||
|
}
|
||||||
|
|
||||||
Some(KnownFunction::DunderAllNames) => {
|
Some(KnownFunction::DunderAllNames) => {
|
||||||
if let [Some(ty)] = overload.parameter_types() {
|
if let [Some(ty)] = overload.parameter_types() {
|
||||||
overload.set_return_type(match ty {
|
overload.set_return_type(match ty {
|
||||||
|
|
|
||||||
|
|
@ -340,9 +340,18 @@ impl<'db> From<GenericAlias<'db>> for Type<'db> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn variance_of_cycle_initial<'db>(
|
||||||
|
_db: &'db dyn Db,
|
||||||
|
_id: salsa::Id,
|
||||||
|
_self: GenericAlias<'db>,
|
||||||
|
_typevar: BoundTypeVarInstance<'db>,
|
||||||
|
) -> TypeVarVariance {
|
||||||
|
TypeVarVariance::Bivariant
|
||||||
|
}
|
||||||
|
|
||||||
#[salsa::tracked]
|
#[salsa::tracked]
|
||||||
impl<'db> VarianceInferable<'db> for GenericAlias<'db> {
|
impl<'db> VarianceInferable<'db> for GenericAlias<'db> {
|
||||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size, cycle_initial=variance_of_cycle_initial)]
|
||||||
fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance {
|
fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance {
|
||||||
let origin = self.origin(db);
|
let origin = self.origin(db);
|
||||||
|
|
||||||
|
|
@ -1126,8 +1135,12 @@ impl<'db> ClassType<'db> {
|
||||||
/// constructor signature of this class.
|
/// constructor signature of this class.
|
||||||
#[salsa::tracked(cycle_initial=into_callable_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
|
#[salsa::tracked(cycle_initial=into_callable_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
|
||||||
pub(super) fn into_callable(self, db: &'db dyn Db) -> CallableTypes<'db> {
|
pub(super) fn into_callable(self, db: &'db dyn Db) -> CallableTypes<'db> {
|
||||||
|
// TODO: This mimics a lot of the logic in Type::try_call_from_constructor. Can we
|
||||||
|
// consolidate the two? Can we invoke a class by upcasting the class into a Callable, and
|
||||||
|
// then relying on the call binding machinery to Just Work™?
|
||||||
|
|
||||||
let (class_literal, _) = self.class_literal(db);
|
let (class_literal, _) = self.class_literal(db);
|
||||||
let generic_context = class_literal.generic_context(db);
|
let class_generic_context = class_literal.generic_context(db);
|
||||||
|
|
||||||
let self_ty = Type::from(self);
|
let self_ty = Type::from(self);
|
||||||
let metaclass_dunder_call_function_symbol = self_ty
|
let metaclass_dunder_call_function_symbol = self_ty
|
||||||
|
|
@ -1225,6 +1238,11 @@ impl<'db> ClassType<'db> {
|
||||||
});
|
});
|
||||||
let return_type = self_annotation.unwrap_or(correct_return_type);
|
let return_type = self_annotation.unwrap_or(correct_return_type);
|
||||||
let instance_ty = self_annotation.unwrap_or_else(|| Type::instance(db, self));
|
let instance_ty = self_annotation.unwrap_or_else(|| Type::instance(db, self));
|
||||||
|
let generic_context = GenericContext::merge_optional(
|
||||||
|
db,
|
||||||
|
class_generic_context,
|
||||||
|
signature.generic_context,
|
||||||
|
);
|
||||||
Signature::new_generic(
|
Signature::new_generic(
|
||||||
generic_context,
|
generic_context,
|
||||||
signature.parameters().clone(),
|
signature.parameters().clone(),
|
||||||
|
|
@ -1271,9 +1289,13 @@ impl<'db> ClassType<'db> {
|
||||||
)
|
)
|
||||||
.place;
|
.place;
|
||||||
|
|
||||||
if let Place::Defined(Type::FunctionLiteral(new_function), _, _) =
|
if let Place::Defined(Type::FunctionLiteral(mut new_function), _, _) =
|
||||||
new_function_symbol
|
new_function_symbol
|
||||||
{
|
{
|
||||||
|
if let Some(class_generic_context) = class_generic_context {
|
||||||
|
new_function =
|
||||||
|
new_function.with_inherited_generic_context(db, class_generic_context);
|
||||||
|
}
|
||||||
CallableTypes::one(
|
CallableTypes::one(
|
||||||
new_function
|
new_function
|
||||||
.into_bound_method_type(db, correct_return_type)
|
.into_bound_method_type(db, correct_return_type)
|
||||||
|
|
@ -1283,7 +1305,11 @@ impl<'db> ClassType<'db> {
|
||||||
// Fallback if no `object.__new__` is found.
|
// Fallback if no `object.__new__` is found.
|
||||||
CallableTypes::one(CallableType::single(
|
CallableTypes::one(CallableType::single(
|
||||||
db,
|
db,
|
||||||
Signature::new(Parameters::empty(), Some(correct_return_type)),
|
Signature::new_generic(
|
||||||
|
class_generic_context,
|
||||||
|
Parameters::empty(),
|
||||||
|
Some(correct_return_type),
|
||||||
|
),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -5860,7 +5886,7 @@ impl SlotsKind {
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::db::tests::setup_db;
|
use crate::db::tests::setup_db;
|
||||||
use crate::module_resolver::resolve_module;
|
use crate::module_resolver::resolve_module_confident;
|
||||||
use crate::{PythonVersionSource, PythonVersionWithSource};
|
use crate::{PythonVersionSource, PythonVersionWithSource};
|
||||||
use salsa::Setter;
|
use salsa::Setter;
|
||||||
use strum::IntoEnumIterator;
|
use strum::IntoEnumIterator;
|
||||||
|
|
@ -5876,7 +5902,8 @@ mod tests {
|
||||||
});
|
});
|
||||||
for class in KnownClass::iter() {
|
for class in KnownClass::iter() {
|
||||||
let class_name = class.name(&db);
|
let class_name = class.name(&db);
|
||||||
let class_module = resolve_module(&db, &class.canonical_module(&db).name()).unwrap();
|
let class_module =
|
||||||
|
resolve_module_confident(&db, &class.canonical_module(&db).name()).unwrap();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
KnownClass::try_from_file_and_name(
|
KnownClass::try_from_file_and_name(
|
||||||
|
|
|
||||||
|
|
@ -2998,11 +2998,10 @@ pub(crate) fn report_invalid_arguments_to_annotated(
|
||||||
let Some(builder) = context.report_lint(&INVALID_TYPE_FORM, subscript) else {
|
let Some(builder) = context.report_lint(&INVALID_TYPE_FORM, subscript) else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
builder.into_diagnostic(format_args!(
|
builder.into_diagnostic(
|
||||||
"Special form `{}` expected at least 2 arguments \
|
"Special form `typing.Annotated` expected at least 2 arguments \
|
||||||
(one type and at least one metadata element)",
|
(one type and at least one metadata element)",
|
||||||
SpecialFormType::Annotated
|
);
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn report_invalid_argument_number_to_special_form(
|
pub(crate) fn report_invalid_argument_number_to_special_form(
|
||||||
|
|
@ -3103,8 +3102,7 @@ pub(crate) fn report_invalid_arguments_to_callable(
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
builder.into_diagnostic(format_args!(
|
builder.into_diagnostic(format_args!(
|
||||||
"Special form `{}` expected exactly two arguments (parameter types and return type)",
|
"Special form `typing.Callable` expected exactly two arguments (parameter types and return type)",
|
||||||
SpecialFormType::Callable
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -696,7 +696,8 @@ impl<'db> FmtDetailed<'db> for DisplayRepresentation<'db> {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
Type::SpecialForm(special_form) => {
|
Type::SpecialForm(special_form) => {
|
||||||
write!(f.with_type(self.ty), "{special_form}")
|
f.set_invalid_syntax();
|
||||||
|
write!(f.with_type(self.ty), "<special form '{special_form}'>")
|
||||||
}
|
}
|
||||||
Type::KnownInstance(known_instance) => known_instance
|
Type::KnownInstance(known_instance) => known_instance
|
||||||
.display_with(self.db, self.settings.clone())
|
.display_with(self.db, self.settings.clone())
|
||||||
|
|
@ -2173,16 +2174,24 @@ impl<'db> FmtDetailed<'db> for DisplayKnownInstanceRepr<'db> {
|
||||||
let ty = Type::KnownInstance(self.known_instance);
|
let ty = Type::KnownInstance(self.known_instance);
|
||||||
match self.known_instance {
|
match self.known_instance {
|
||||||
KnownInstanceType::SubscriptedProtocol(generic_context) => {
|
KnownInstanceType::SubscriptedProtocol(generic_context) => {
|
||||||
|
f.set_invalid_syntax();
|
||||||
|
f.write_str("<special form '")?;
|
||||||
f.with_type(ty).write_str("typing.Protocol")?;
|
f.with_type(ty).write_str("typing.Protocol")?;
|
||||||
f.write_str(&generic_context.display(self.db).to_string())
|
f.write_str(&generic_context.display(self.db).to_string())?;
|
||||||
|
f.write_str("'>")
|
||||||
}
|
}
|
||||||
KnownInstanceType::SubscriptedGeneric(generic_context) => {
|
KnownInstanceType::SubscriptedGeneric(generic_context) => {
|
||||||
|
f.set_invalid_syntax();
|
||||||
|
f.write_str("<special form '")?;
|
||||||
f.with_type(ty).write_str("typing.Generic")?;
|
f.with_type(ty).write_str("typing.Generic")?;
|
||||||
f.write_str(&generic_context.display(self.db).to_string())
|
f.write_str(&generic_context.display(self.db).to_string())?;
|
||||||
|
f.write_str("'>")
|
||||||
}
|
}
|
||||||
KnownInstanceType::TypeAliasType(alias) => {
|
KnownInstanceType::TypeAliasType(alias) => {
|
||||||
if let Some(specialization) = alias.specialization(self.db) {
|
if let Some(specialization) = alias.specialization(self.db) {
|
||||||
f.write_str(alias.name(self.db))?;
|
f.set_invalid_syntax();
|
||||||
|
f.write_str("<type alias '")?;
|
||||||
|
f.with_type(ty).write_str(alias.name(self.db))?;
|
||||||
f.write_str(
|
f.write_str(
|
||||||
&specialization
|
&specialization
|
||||||
.display_short(
|
.display_short(
|
||||||
|
|
@ -2191,7 +2200,8 @@ impl<'db> FmtDetailed<'db> for DisplayKnownInstanceRepr<'db> {
|
||||||
DisplaySettings::default(),
|
DisplaySettings::default(),
|
||||||
)
|
)
|
||||||
.to_string(),
|
.to_string(),
|
||||||
)
|
)?;
|
||||||
|
f.write_str("'>")
|
||||||
} else {
|
} else {
|
||||||
f.with_type(ty).write_str("typing.TypeAliasType")
|
f.with_type(ty).write_str("typing.TypeAliasType")
|
||||||
}
|
}
|
||||||
|
|
@ -2201,9 +2211,9 @@ impl<'db> FmtDetailed<'db> for DisplayKnownInstanceRepr<'db> {
|
||||||
// have a `Type::TypeVar(_)`, which is rendered as the typevar's name.
|
// have a `Type::TypeVar(_)`, which is rendered as the typevar's name.
|
||||||
KnownInstanceType::TypeVar(typevar_instance) => {
|
KnownInstanceType::TypeVar(typevar_instance) => {
|
||||||
if typevar_instance.kind(self.db).is_paramspec() {
|
if typevar_instance.kind(self.db).is_paramspec() {
|
||||||
f.write_str("typing.ParamSpec")
|
f.with_type(ty).write_str("typing.ParamSpec")
|
||||||
} else {
|
} else {
|
||||||
f.write_str("typing.TypeVar")
|
f.with_type(ty).write_str("typing.TypeVar")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
KnownInstanceType::Deprecated(_) => f.write_str("warnings.deprecated"),
|
KnownInstanceType::Deprecated(_) => f.write_str("warnings.deprecated"),
|
||||||
|
|
@ -2226,22 +2236,56 @@ impl<'db> FmtDetailed<'db> for DisplayKnownInstanceRepr<'db> {
|
||||||
f.with_type(ty).write_str("ty_extensions.Specialization")?;
|
f.with_type(ty).write_str("ty_extensions.Specialization")?;
|
||||||
write!(f, "{}", specialization.display_full(self.db))
|
write!(f, "{}", specialization.display_full(self.db))
|
||||||
}
|
}
|
||||||
KnownInstanceType::UnionType(_) => f.with_type(ty).write_str("types.UnionType"),
|
KnownInstanceType::UnionType(union) => {
|
||||||
KnownInstanceType::Literal(_) => {
|
|
||||||
f.set_invalid_syntax();
|
f.set_invalid_syntax();
|
||||||
f.write_str("<typing.Literal special form>")
|
f.write_char('<')?;
|
||||||
|
f.with_type(ty).write_str("types.UnionType")?;
|
||||||
|
f.write_str(" special form")?;
|
||||||
|
if let Ok(ty) = union.union_type(self.db) {
|
||||||
|
write!(f, " '{}'", ty.display(self.db))?;
|
||||||
}
|
}
|
||||||
KnownInstanceType::Annotated(_) => {
|
f.write_char('>')
|
||||||
|
}
|
||||||
|
KnownInstanceType::Literal(inner) => {
|
||||||
f.set_invalid_syntax();
|
f.set_invalid_syntax();
|
||||||
f.write_str("<typing.Annotated special form>")
|
write!(
|
||||||
|
f,
|
||||||
|
"<special form '{}'>",
|
||||||
|
inner.inner(self.db).display(self.db)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
KnownInstanceType::TypeGenericAlias(_) | KnownInstanceType::Callable(_) => {
|
KnownInstanceType::Annotated(inner) => {
|
||||||
f.with_type(ty).write_str("GenericAlias")
|
f.set_invalid_syntax();
|
||||||
|
f.write_str("<special form '")?;
|
||||||
|
f.with_type(ty).write_str("typing.Annotated")?;
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"[{}, <metadata>]'>",
|
||||||
|
inner.inner(self.db).display(self.db)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
KnownInstanceType::Callable(callable) => {
|
||||||
|
f.set_invalid_syntax();
|
||||||
|
f.write_char('<')?;
|
||||||
|
f.with_type(ty).write_str("typing.Callable")?;
|
||||||
|
write!(f, " special form '{}'>", callable.display(self.db))
|
||||||
|
}
|
||||||
|
KnownInstanceType::TypeGenericAlias(inner) => {
|
||||||
|
f.set_invalid_syntax();
|
||||||
|
f.write_str("<special form '")?;
|
||||||
|
write!(
|
||||||
|
f.with_type(ty),
|
||||||
|
"type[{}]",
|
||||||
|
inner.inner(self.db).display(self.db)
|
||||||
|
)?;
|
||||||
|
f.write_str("'>")
|
||||||
}
|
}
|
||||||
KnownInstanceType::LiteralStringAlias(_) => f.write_str("str"),
|
KnownInstanceType::LiteralStringAlias(_) => f.write_str("str"),
|
||||||
KnownInstanceType::NewType(declaration) => {
|
KnownInstanceType::NewType(declaration) => {
|
||||||
f.set_invalid_syntax();
|
f.set_invalid_syntax();
|
||||||
write!(f, "<NewType pseudo-class '{}'>", declaration.name(self.db))
|
f.write_str("<NewType pseudo-class '")?;
|
||||||
|
f.with_type(ty).write_str(declaration.name(self.db))?;
|
||||||
|
f.write_str("'>")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1339,6 +1339,8 @@ pub enum KnownFunction {
|
||||||
IsSingleValued,
|
IsSingleValued,
|
||||||
/// `ty_extensions.generic_context`
|
/// `ty_extensions.generic_context`
|
||||||
GenericContext,
|
GenericContext,
|
||||||
|
/// `ty_extensions.into_callable`
|
||||||
|
IntoCallable,
|
||||||
/// `ty_extensions.dunder_all_names`
|
/// `ty_extensions.dunder_all_names`
|
||||||
DunderAllNames,
|
DunderAllNames,
|
||||||
/// `ty_extensions.enum_members`
|
/// `ty_extensions.enum_members`
|
||||||
|
|
@ -1411,6 +1413,7 @@ impl KnownFunction {
|
||||||
| Self::IsSingleton
|
| Self::IsSingleton
|
||||||
| Self::IsSubtypeOf
|
| Self::IsSubtypeOf
|
||||||
| Self::GenericContext
|
| Self::GenericContext
|
||||||
|
| Self::IntoCallable
|
||||||
| Self::DunderAllNames
|
| Self::DunderAllNames
|
||||||
| Self::EnumMembers
|
| Self::EnumMembers
|
||||||
| Self::StaticAssert
|
| Self::StaticAssert
|
||||||
|
|
@ -1483,17 +1486,22 @@ impl KnownFunction {
|
||||||
|
|
||||||
diagnostic.annotate(
|
diagnostic.annotate(
|
||||||
Annotation::secondary(context.span(&call_expression.arguments.args[0]))
|
Annotation::secondary(context.span(&call_expression.arguments.args[0]))
|
||||||
.message(format_args!(
|
.message(format_args!("Inferred type is `{}`", actual_ty.display(db),)),
|
||||||
"Inferred type of argument is `{}`",
|
|
||||||
actual_ty.display(db),
|
|
||||||
)),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if actual_ty.is_subtype_of(db, *asserted_ty) {
|
||||||
|
diagnostic.info(format_args!(
|
||||||
|
"`{inferred_type}` is a subtype of `{asserted_type}`, but they are not equivalent",
|
||||||
|
asserted_type = asserted_ty.display(db),
|
||||||
|
inferred_type = actual_ty.display(db),
|
||||||
|
));
|
||||||
|
} else {
|
||||||
diagnostic.info(format_args!(
|
diagnostic.info(format_args!(
|
||||||
"`{asserted_type}` and `{inferred_type}` are not equivalent types",
|
"`{asserted_type}` and `{inferred_type}` are not equivalent types",
|
||||||
asserted_type = asserted_ty.display(db),
|
asserted_type = asserted_ty.display(db),
|
||||||
inferred_type = actual_ty.display(db),
|
inferred_type = actual_ty.display(db),
|
||||||
));
|
));
|
||||||
|
}
|
||||||
|
|
||||||
diagnostic.set_concise_message(format_args!(
|
diagnostic.set_concise_message(format_args!(
|
||||||
"Type `{}` does not match asserted type `{}`",
|
"Type `{}` does not match asserted type `{}`",
|
||||||
|
|
@ -1882,7 +1890,7 @@ impl KnownFunction {
|
||||||
let Some(module_name) = ModuleName::new(module_name) else {
|
let Some(module_name) = ModuleName::new(module_name) else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let Some(module) = resolve_module(db, &module_name) else {
|
let Some(module) = resolve_module(db, file, &module_name) else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1941,6 +1949,7 @@ pub(crate) mod tests {
|
||||||
KnownFunction::IsSingleton
|
KnownFunction::IsSingleton
|
||||||
| KnownFunction::IsSubtypeOf
|
| KnownFunction::IsSubtypeOf
|
||||||
| KnownFunction::GenericContext
|
| KnownFunction::GenericContext
|
||||||
|
| KnownFunction::IntoCallable
|
||||||
| KnownFunction::DunderAllNames
|
| KnownFunction::DunderAllNames
|
||||||
| KnownFunction::EnumMembers
|
| KnownFunction::EnumMembers
|
||||||
| KnownFunction::StaticAssert
|
| KnownFunction::StaticAssert
|
||||||
|
|
|
||||||
|
|
@ -19,11 +19,12 @@ use crate::types::visitor::{
|
||||||
TypeCollector, TypeVisitor, any_over_type, walk_type_with_recursion_guard,
|
TypeCollector, TypeVisitor, any_over_type, walk_type_with_recursion_guard,
|
||||||
};
|
};
|
||||||
use crate::types::{
|
use crate::types::{
|
||||||
ApplyTypeMappingVisitor, BoundTypeVarIdentity, BoundTypeVarInstance, ClassLiteral,
|
ApplyTypeMappingVisitor, BindingContext, BoundTypeVarIdentity, BoundTypeVarInstance,
|
||||||
FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor,
|
ClassLiteral, FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor,
|
||||||
KnownClass, KnownInstanceType, MaterializationKind, NormalizedVisitor, Type, TypeContext,
|
IsEquivalentVisitor, KnownClass, KnownInstanceType, MaterializationKind, NormalizedVisitor,
|
||||||
TypeMapping, TypeRelation, TypeVarBoundOrConstraints, TypeVarIdentity, TypeVarInstance,
|
Type, TypeContext, TypeMapping, TypeRelation, TypeVarBoundOrConstraints, TypeVarIdentity,
|
||||||
TypeVarKind, TypeVarVariance, UnionType, declaration_type, walk_bound_type_var_type,
|
TypeVarInstance, TypeVarKind, TypeVarVariance, UnionType, declaration_type,
|
||||||
|
walk_bound_type_var_type,
|
||||||
};
|
};
|
||||||
use crate::{Db, FxOrderMap, FxOrderSet};
|
use crate::{Db, FxOrderMap, FxOrderSet};
|
||||||
|
|
||||||
|
|
@ -263,6 +264,34 @@ impl<'db> GenericContext<'db> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn merge_optional(
|
||||||
|
db: &'db dyn Db,
|
||||||
|
left: Option<Self>,
|
||||||
|
right: Option<Self>,
|
||||||
|
) -> Option<Self> {
|
||||||
|
match (left, right) {
|
||||||
|
(None, None) => None,
|
||||||
|
(Some(one), None) | (None, Some(one)) => Some(one),
|
||||||
|
(Some(left), Some(right)) => Some(left.merge(db, right)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn remove_self(
|
||||||
|
self,
|
||||||
|
db: &'db dyn Db,
|
||||||
|
binding_context: Option<BindingContext<'db>>,
|
||||||
|
) -> Self {
|
||||||
|
Self::from_typevar_instances(
|
||||||
|
db,
|
||||||
|
self.variables(db).filter(|bound_typevar| {
|
||||||
|
!(bound_typevar.typevar(db).is_self(db)
|
||||||
|
&& binding_context.is_none_or(|binding_context| {
|
||||||
|
bound_typevar.binding_context(db) == binding_context
|
||||||
|
}))
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn inferable_typevars(self, db: &'db dyn Db) -> InferableTypeVars<'db, 'db> {
|
pub(crate) fn inferable_typevars(self, db: &'db dyn Db) -> InferableTypeVars<'db, 'db> {
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct CollectTypeVars<'db> {
|
struct CollectTypeVars<'db> {
|
||||||
|
|
|
||||||
|
|
@ -938,7 +938,7 @@ mod resolve_definition {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Resolve the module to its file
|
// Resolve the module to its file
|
||||||
let Some(resolved_module) = resolve_module(db, &module_name) else {
|
let Some(resolved_module) = resolve_module(db, file, &module_name) else {
|
||||||
return Vec::new(); // Module not found, return empty list
|
return Vec::new(); // Module not found, return empty list
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1025,7 +1025,7 @@ mod resolve_definition {
|
||||||
else {
|
else {
|
||||||
return Vec::new();
|
return Vec::new();
|
||||||
};
|
};
|
||||||
let Some(resolved_module) = resolve_module(db, &module_name) else {
|
let Some(resolved_module) = resolve_module(db, file, &module_name) else {
|
||||||
return Vec::new();
|
return Vec::new();
|
||||||
};
|
};
|
||||||
resolved_module.file(db)
|
resolved_module.file(db)
|
||||||
|
|
@ -1134,7 +1134,12 @@ mod resolve_definition {
|
||||||
// It's definitely a stub, so now rerun module resolution but with stubs disabled.
|
// It's definitely a stub, so now rerun module resolution but with stubs disabled.
|
||||||
let stub_module = file_to_module(db, stub_file_for_module_lookup)?;
|
let stub_module = file_to_module(db, stub_file_for_module_lookup)?;
|
||||||
trace!("Found stub module: {}", stub_module.name(db));
|
trace!("Found stub module: {}", stub_module.name(db));
|
||||||
let real_module = resolve_real_module(db, stub_module.name(db))?;
|
// We need to pass an importing file to `resolve_real_module` which is a bit odd
|
||||||
|
// here because there isn't really an importing file. However this `resolve_real_module`
|
||||||
|
// can be understood as essentially `import .`, which is also what `file_to_module` is,
|
||||||
|
// so this is in fact exactly the file we want to consider the importer.
|
||||||
|
let real_module =
|
||||||
|
resolve_real_module(db, stub_file_for_module_lookup, stub_module.name(db))?;
|
||||||
trace!("Found real module: {}", real_module.name(db));
|
trace!("Found real module: {}", real_module.name(db));
|
||||||
let real_file = real_module.file(db)?;
|
let real_file = real_module.file(db)?;
|
||||||
trace!("Found real file: {}", real_file.path(db));
|
trace!("Found real file: {}", real_file.path(db));
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ use itertools::{Either, EitherOrBoth, Itertools};
|
||||||
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span};
|
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span};
|
||||||
use ruff_db::files::File;
|
use ruff_db::files::File;
|
||||||
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
|
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
|
||||||
|
use ruff_db::source::source_text;
|
||||||
use ruff_python_ast::visitor::{Visitor, walk_expr};
|
use ruff_python_ast::visitor::{Visitor, walk_expr};
|
||||||
use ruff_python_ast::{
|
use ruff_python_ast::{
|
||||||
self as ast, AnyNodeRef, ExprContext, HasNodeIndex, NodeIndex, PythonVersion,
|
self as ast, AnyNodeRef, ExprContext, HasNodeIndex, NodeIndex, PythonVersion,
|
||||||
|
|
@ -3272,18 +3273,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
std::mem::replace(&mut self.deferred_state, DeferredExpressionState::Deferred);
|
std::mem::replace(&mut self.deferred_state, DeferredExpressionState::Deferred);
|
||||||
match bound.as_deref() {
|
match bound.as_deref() {
|
||||||
Some(expr @ ast::Expr::Tuple(ast::ExprTuple { elts, .. })) => {
|
Some(expr @ ast::Expr::Tuple(ast::ExprTuple { elts, .. })) => {
|
||||||
// We don't use UnionType::from_elements or UnionBuilder here, because we don't
|
// Here, we interpret `bound` as a heterogeneous tuple and convert it to `TypeVarConstraints` in `TypeVarInstance::lazy_constraints`.
|
||||||
// want to simplify the list of constraints like we do with the elements of an
|
let tuple_ty = Type::heterogeneous_tuple(
|
||||||
// actual union type.
|
|
||||||
// TODO: Consider using a new `OneOfType` connective here instead, since that
|
|
||||||
// more accurately represents the actual semantics of typevar constraints.
|
|
||||||
let ty = Type::Union(UnionType::new(
|
|
||||||
self.db(),
|
self.db(),
|
||||||
elts.iter()
|
elts.iter()
|
||||||
.map(|expr| self.infer_type_expression(expr))
|
.map(|expr| self.infer_type_expression(expr))
|
||||||
.collect::<Box<[_]>>(),
|
.collect::<Box<[_]>>(),
|
||||||
));
|
);
|
||||||
self.store_expression_type(expr, ty);
|
self.store_expression_type(expr, tuple_ty);
|
||||||
}
|
}
|
||||||
Some(expr) => {
|
Some(expr) => {
|
||||||
self.infer_type_expression(expr);
|
self.infer_type_expression(expr);
|
||||||
|
|
@ -5935,7 +5932,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
) else {
|
) else {
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
resolve_module(self.db(), &module_name).is_some()
|
resolve_module(self.db(), self.file(), &module_name).is_some()
|
||||||
}) {
|
}) {
|
||||||
diagnostic
|
diagnostic
|
||||||
.help("The module can be resolved if the number of leading dots is reduced");
|
.help("The module can be resolved if the number of leading dots is reduced");
|
||||||
|
|
@ -6172,7 +6169,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if resolve_module(self.db(), &module_name).is_none() {
|
if resolve_module(self.db(), self.file(), &module_name).is_none() {
|
||||||
self.report_unresolved_import(import_from.into(), module_ref.range(), *level, module);
|
self.report_unresolved_import(import_from.into(), module_ref.range(), *level, module);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -6190,7 +6187,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(module) = resolve_module(self.db(), &module_name) else {
|
let Some(module) = resolve_module(self.db(), self.file(), &module_name) else {
|
||||||
self.add_unknown_declaration_with_binding(alias.into(), definition);
|
self.add_unknown_declaration_with_binding(alias.into(), definition);
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
@ -6375,7 +6372,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
self.add_binding(import_from.into(), definition, |_, _| Type::unknown());
|
self.add_binding(import_from.into(), definition, |_, _| Type::unknown());
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let Some(module) = resolve_module(self.db(), &thispackage_name) else {
|
let Some(module) = resolve_module(self.db(), self.file(), &thispackage_name) else {
|
||||||
self.add_binding(import_from.into(), definition, |_, _| Type::unknown());
|
self.add_binding(import_from.into(), definition, |_, _| Type::unknown());
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
@ -6606,7 +6603,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn module_type_from_name(&self, module_name: &ModuleName) -> Option<Type<'db>> {
|
fn module_type_from_name(&self, module_name: &ModuleName) -> Option<Type<'db>> {
|
||||||
resolve_module(self.db(), module_name)
|
resolve_module(self.db(), self.file(), module_name)
|
||||||
.map(|module| Type::module_literal(self.db(), self.file(), module))
|
.map(|module| Type::module_literal(self.db(), self.file(), module))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -7105,10 +7102,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn store_expression_type(&mut self, expression: &ast::Expr, ty: Type<'db>) {
|
fn store_expression_type(&mut self, expression: &ast::Expr, ty: Type<'db>) {
|
||||||
if self.deferred_state.in_string_annotation() {
|
if self.deferred_state.in_string_annotation()
|
||||||
|
|| self.inner_expression_inference_state.is_get()
|
||||||
|
{
|
||||||
// Avoid storing the type of expressions that are part of a string annotation because
|
// Avoid storing the type of expressions that are part of a string annotation because
|
||||||
// the expression ids don't exists in the semantic index. Instead, we'll store the type
|
// the expression ids don't exists in the semantic index. Instead, we'll store the type
|
||||||
// on the string expression itself that represents the annotation.
|
// on the string expression itself that represents the annotation.
|
||||||
|
// Also, if `inner_expression_inference_state` is `Get`, the expression type has already been stored.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -9111,6 +9111,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
|
|
||||||
/// Infer the type of a [`ast::ExprAttribute`] expression, assuming a load context.
|
/// Infer the type of a [`ast::ExprAttribute`] expression, assuming a load context.
|
||||||
fn infer_attribute_load(&mut self, attribute: &ast::ExprAttribute) -> Type<'db> {
|
fn infer_attribute_load(&mut self, attribute: &ast::ExprAttribute) -> Type<'db> {
|
||||||
|
fn is_dotted_name(attribute: &ast::Expr) -> bool {
|
||||||
|
match attribute {
|
||||||
|
ast::Expr::Name(_) => true,
|
||||||
|
ast::Expr::Attribute(ast::ExprAttribute { value, .. }) => is_dotted_name(value),
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let ast::ExprAttribute { value, attr, .. } = attribute;
|
let ast::ExprAttribute { value, attr, .. } = attribute;
|
||||||
|
|
||||||
let value_type = self.infer_maybe_standalone_expression(value, TypeContext::default());
|
let value_type = self.infer_maybe_standalone_expression(value, TypeContext::default());
|
||||||
|
|
@ -9186,7 +9194,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
{
|
{
|
||||||
let mut maybe_submodule_name = module_name.clone();
|
let mut maybe_submodule_name = module_name.clone();
|
||||||
maybe_submodule_name.extend(&relative_submodule);
|
maybe_submodule_name.extend(&relative_submodule);
|
||||||
if resolve_module(db, &maybe_submodule_name).is_some() {
|
if resolve_module(db, self.file(), &maybe_submodule_name).is_some() {
|
||||||
if let Some(builder) = self
|
if let Some(builder) = self
|
||||||
.context
|
.context
|
||||||
.report_lint(&POSSIBLY_MISSING_ATTRIBUTE, attribute)
|
.report_lint(&POSSIBLY_MISSING_ATTRIBUTE, attribute)
|
||||||
|
|
@ -9204,6 +9212,42 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Type::SpecialForm(special_form) = value_type {
|
||||||
|
if let Some(builder) =
|
||||||
|
self.context.report_lint(&UNRESOLVED_ATTRIBUTE, attribute)
|
||||||
|
{
|
||||||
|
let mut diag = builder.into_diagnostic(format_args!(
|
||||||
|
"Special form `{special_form}` has no attribute `{attr_name}`",
|
||||||
|
));
|
||||||
|
if let Ok(defined_type) = value_type.in_type_expression(
|
||||||
|
db,
|
||||||
|
self.scope(),
|
||||||
|
self.typevar_binding_context,
|
||||||
|
) && !defined_type.member(db, attr_name).place.is_undefined()
|
||||||
|
{
|
||||||
|
diag.help(format_args!(
|
||||||
|
"Objects with type `{ty}` have a{maybe_n} `{attr_name}` attribute, but the symbol \
|
||||||
|
`{special_form}` does not itself inhabit the type `{ty}`",
|
||||||
|
maybe_n = if attr_name.starts_with(['a', 'e', 'i', 'o', 'u']) {
|
||||||
|
"n"
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
},
|
||||||
|
ty = defined_type.display(self.db())
|
||||||
|
));
|
||||||
|
if is_dotted_name(value) {
|
||||||
|
let source = &source_text(self.db(), self.file())[value.range()];
|
||||||
|
diag.help(format_args!(
|
||||||
|
"This error may indicate that `{source}` was defined as \
|
||||||
|
`{source} = {special_form}` when `{source}: {special_form}` \
|
||||||
|
was intended"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fallback();
|
||||||
|
}
|
||||||
|
|
||||||
let Some(builder) = self.context.report_lint(&UNRESOLVED_ATTRIBUTE, attribute)
|
let Some(builder) = self.context.report_lint(&UNRESOLVED_ATTRIBUTE, attribute)
|
||||||
else {
|
else {
|
||||||
return fallback();
|
return fallback();
|
||||||
|
|
@ -11436,6 +11480,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
if typevar.default_type(db).is_some() {
|
if typevar.default_type(db).is_some() {
|
||||||
typevar_with_defaults += 1;
|
typevar_with_defaults += 1;
|
||||||
}
|
}
|
||||||
|
// TODO consider just accepting the given specialization without checking
|
||||||
|
// against bounds/constraints, but recording the expression for deferred
|
||||||
|
// checking at end of scope. This would avoid a lot of cycles caused by eagerly
|
||||||
|
// doing assignment checks here.
|
||||||
match typevar.typevar(db).bound_or_constraints(db) {
|
match typevar.typevar(db).bound_or_constraints(db) {
|
||||||
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
|
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
|
||||||
if provided_type
|
if provided_type
|
||||||
|
|
@ -11460,10 +11508,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
||||||
|
// TODO: this is wrong, the given specialization needs to be assignable
|
||||||
|
// to _at least one_ of the individual constraints, not to the union of
|
||||||
|
// all of them. `int | str` is not a valid specialization of a typevar
|
||||||
|
// constrained to `(int, str)`.
|
||||||
if provided_type
|
if provided_type
|
||||||
.when_assignable_to(
|
.when_assignable_to(
|
||||||
db,
|
db,
|
||||||
Type::Union(constraints),
|
constraints.as_type(db),
|
||||||
InferableTypeVars::None,
|
InferableTypeVars::None,
|
||||||
)
|
)
|
||||||
.is_never_satisfied(db)
|
.is_never_satisfied(db)
|
||||||
|
|
|
||||||
|
|
@ -198,7 +198,7 @@ impl ClassInfoConstraintFunction {
|
||||||
self.generate_constraint(db, bound)
|
self.generate_constraint(db, bound)
|
||||||
}
|
}
|
||||||
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
TypeVarBoundOrConstraints::Constraints(constraints) => {
|
||||||
self.generate_constraint(db, Type::Union(constraints))
|
self.generate_constraint(db, constraints.as_type(db))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -667,10 +667,11 @@ impl<'db> Signature<'db> {
|
||||||
|
|
||||||
let mut parameters = Parameters::new(db, parameters);
|
let mut parameters = Parameters::new(db, parameters);
|
||||||
let mut return_ty = self.return_ty;
|
let mut return_ty = self.return_ty;
|
||||||
|
let binding_context = self.definition.map(BindingContext::Definition);
|
||||||
if let Some(self_type) = self_type {
|
if let Some(self_type) = self_type {
|
||||||
let self_mapping = TypeMapping::BindSelf {
|
let self_mapping = TypeMapping::BindSelf {
|
||||||
self_type,
|
self_type,
|
||||||
binding_context: self.definition.map(BindingContext::Definition),
|
binding_context,
|
||||||
};
|
};
|
||||||
parameters = parameters.apply_type_mapping_impl(
|
parameters = parameters.apply_type_mapping_impl(
|
||||||
db,
|
db,
|
||||||
|
|
@ -682,7 +683,9 @@ impl<'db> Signature<'db> {
|
||||||
.map(|ty| ty.apply_type_mapping(db, &self_mapping, TypeContext::default()));
|
.map(|ty| ty.apply_type_mapping(db, &self_mapping, TypeContext::default()));
|
||||||
}
|
}
|
||||||
Self {
|
Self {
|
||||||
generic_context: self.generic_context,
|
generic_context: self
|
||||||
|
.generic_context
|
||||||
|
.map(|generic_context| generic_context.remove_self(db, binding_context)),
|
||||||
definition: self.definition,
|
definition: self.definition,
|
||||||
parameters,
|
parameters,
|
||||||
return_ty,
|
return_ty,
|
||||||
|
|
|
||||||
|
|
@ -3,8 +3,7 @@
|
||||||
|
|
||||||
use super::{ClassType, Type, class::KnownClass};
|
use super::{ClassType, Type, class::KnownClass};
|
||||||
use crate::db::Db;
|
use crate::db::Db;
|
||||||
use crate::module_resolver::{KnownModule, file_to_module};
|
use crate::module_resolver::{KnownModule, file_to_module, resolve_module_confident};
|
||||||
use crate::resolve_module;
|
|
||||||
use crate::semantic_index::place::ScopedPlaceId;
|
use crate::semantic_index::place::ScopedPlaceId;
|
||||||
use crate::semantic_index::{FileScopeId, place_table, use_def_map};
|
use crate::semantic_index::{FileScopeId, place_table, use_def_map};
|
||||||
use crate::types::TypeDefinition;
|
use crate::types::TypeDefinition;
|
||||||
|
|
@ -544,7 +543,7 @@ impl SpecialFormType {
|
||||||
self.definition_modules()
|
self.definition_modules()
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|module| {
|
.find_map(|module| {
|
||||||
let file = resolve_module(db, &module.name())?.file(db)?;
|
let file = resolve_module_confident(db, &module.name())?.file(db)?;
|
||||||
let scope = FileScopeId::global().to_scope_id(db, file);
|
let scope = FileScopeId::global().to_scope_id(db, file);
|
||||||
let symbol_id = place_table(db, scope).symbol_id(self.name())?;
|
let symbol_id = place_table(db, scope).symbol_id(self.name())?;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ use crate::types::{
|
||||||
ApplyTypeMappingVisitor, BoundTypeVarInstance, ClassType, DynamicType,
|
ApplyTypeMappingVisitor, BoundTypeVarInstance, ClassType, DynamicType,
|
||||||
FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, KnownClass,
|
FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, KnownClass,
|
||||||
MaterializationKind, MemberLookupPolicy, NormalizedVisitor, SpecialFormType, Type, TypeContext,
|
MaterializationKind, MemberLookupPolicy, NormalizedVisitor, SpecialFormType, Type, TypeContext,
|
||||||
TypeMapping, TypeRelation, TypeVarBoundOrConstraints, UnionType, todo_type,
|
TypeMapping, TypeRelation, TypeVarBoundOrConstraints, todo_type,
|
||||||
};
|
};
|
||||||
use crate::{Db, FxOrderSet};
|
use crate::{Db, FxOrderSet};
|
||||||
|
|
||||||
|
|
@ -190,7 +190,9 @@ impl<'db> SubclassOfType<'db> {
|
||||||
match bound_typevar.typevar(db).bound_or_constraints(db) {
|
match bound_typevar.typevar(db).bound_or_constraints(db) {
|
||||||
None => unreachable!(),
|
None => unreachable!(),
|
||||||
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => bound,
|
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => bound,
|
||||||
Some(TypeVarBoundOrConstraints::Constraints(union)) => Type::Union(union),
|
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
||||||
|
constraints.as_type(db)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -351,7 +353,7 @@ impl<'db> SubclassOfInner<'db> {
|
||||||
.and_then(|subclass_of| subclass_of.into_class(db))
|
.and_then(|subclass_of| subclass_of.into_class(db))
|
||||||
}
|
}
|
||||||
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
||||||
match constraints.elements(db) {
|
match &**constraints.elements(db) {
|
||||||
[bound] => Self::try_from_instance(db, *bound)
|
[bound] => Self::try_from_instance(db, *bound)
|
||||||
.and_then(|subclass_of| subclass_of.into_class(db)),
|
.and_then(|subclass_of| subclass_of.into_class(db)),
|
||||||
_ => Some(ClassType::object(db)),
|
_ => Some(ClassType::object(db)),
|
||||||
|
|
@ -416,16 +418,10 @@ impl<'db> SubclassOfInner<'db> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
|
||||||
let constraints = constraints
|
TypeVarBoundOrConstraints::Constraints(constraints.map(db, |constraint| {
|
||||||
.elements(db)
|
|
||||||
.iter()
|
|
||||||
.map(|constraint| {
|
|
||||||
SubclassOfType::try_from_instance(db, *constraint)
|
SubclassOfType::try_from_instance(db, *constraint)
|
||||||
.unwrap_or(SubclassOfType::subclass_of_unknown())
|
.unwrap_or(SubclassOfType::subclass_of_unknown())
|
||||||
})
|
}))
|
||||||
.collect::<Box<_>>();
|
|
||||||
|
|
||||||
TypeVarBoundOrConstraints::Constraints(UnionType::new(db, constraints))
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ use itertools::{Either, EitherOrBoth, Itertools};
|
||||||
|
|
||||||
use crate::semantic_index::definition::Definition;
|
use crate::semantic_index::definition::Definition;
|
||||||
use crate::subscript::{Nth, OutOfBoundsError, PyIndex, PySlice, StepSizeZeroError};
|
use crate::subscript::{Nth, OutOfBoundsError, PyIndex, PySlice, StepSizeZeroError};
|
||||||
|
use crate::types::builder::RecursivelyDefined;
|
||||||
use crate::types::class::{ClassType, KnownClass};
|
use crate::types::class::{ClassType, KnownClass};
|
||||||
use crate::types::constraints::{ConstraintSet, IteratorConstraintsExtension};
|
use crate::types::constraints::{ConstraintSet, IteratorConstraintsExtension};
|
||||||
use crate::types::generics::InferableTypeVars;
|
use crate::types::generics::InferableTypeVars;
|
||||||
|
|
@ -348,6 +349,10 @@ impl<T> FixedLengthTuple<T> {
|
||||||
&self.0
|
&self.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn owned_elements(self) -> Box<[T]> {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn elements(&self) -> impl DoubleEndedIterator<Item = &T> + ExactSizeIterator + '_ {
|
pub(crate) fn elements(&self) -> impl DoubleEndedIterator<Item = &T> + ExactSizeIterator + '_ {
|
||||||
self.0.iter()
|
self.0.iter()
|
||||||
}
|
}
|
||||||
|
|
@ -1458,7 +1463,7 @@ impl<'db> Tuple<Type<'db>> {
|
||||||
// those techniques ensure that union elements are deduplicated and unions are eagerly simplified
|
// those techniques ensure that union elements are deduplicated and unions are eagerly simplified
|
||||||
// into other types where necessary. Here, however, we know that there are no duplicates
|
// into other types where necessary. Here, however, we know that there are no duplicates
|
||||||
// in this union, so it's probably more efficient to use `UnionType::new()` directly.
|
// in this union, so it's probably more efficient to use `UnionType::new()` directly.
|
||||||
Type::Union(UnionType::new(db, elements))
|
Type::Union(UnionType::new(db, elements, RecursivelyDefined::No))
|
||||||
};
|
};
|
||||||
|
|
||||||
TupleSpec::heterogeneous([
|
TupleSpec::heterogeneous([
|
||||||
|
|
|
||||||
|
|
@ -79,8 +79,7 @@ fn run_corpus_tests(pattern: &str) -> anyhow::Result<()> {
|
||||||
let root = SystemPathBuf::from("/src");
|
let root = SystemPathBuf::from("/src");
|
||||||
|
|
||||||
let mut db = CorpusDb::new();
|
let mut db = CorpusDb::new();
|
||||||
db.memory_file_system()
|
db.memory_file_system().create_directory_all(&root)?;
|
||||||
.create_directory_all(root.as_ref())?;
|
|
||||||
|
|
||||||
let workspace_root = get_cargo_workspace_root()?;
|
let workspace_root = get_cargo_workspace_root()?;
|
||||||
let workspace_root = workspace_root.to_string();
|
let workspace_root = workspace_root.to_string();
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
use lsp_types::{
|
use lsp_types::{
|
||||||
ClientCapabilities, CodeActionKind, CodeActionOptions, CompletionOptions,
|
self as types, ClientCapabilities, CodeActionKind, CodeActionOptions, CompletionOptions,
|
||||||
DeclarationCapability, DiagnosticOptions, DiagnosticServerCapabilities,
|
DeclarationCapability, DiagnosticOptions, DiagnosticServerCapabilities,
|
||||||
HoverProviderCapability, InlayHintOptions, InlayHintServerCapabilities, MarkupKind,
|
HoverProviderCapability, InlayHintOptions, InlayHintServerCapabilities, MarkupKind,
|
||||||
NotebookCellSelector, NotebookSelector, OneOf, RenameOptions, SelectionRangeProviderCapability,
|
NotebookCellSelector, NotebookSelector, OneOf, RenameOptions, SelectionRangeProviderCapability,
|
||||||
|
|
@ -8,11 +8,9 @@ use lsp_types::{
|
||||||
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||||
TypeDefinitionProviderCapability, WorkDoneProgressOptions,
|
TypeDefinitionProviderCapability, WorkDoneProgressOptions,
|
||||||
};
|
};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use crate::PositionEncoding;
|
use crate::PositionEncoding;
|
||||||
use crate::session::GlobalSettings;
|
|
||||||
use lsp_types as types;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
bitflags::bitflags! {
|
bitflags::bitflags! {
|
||||||
/// Represents the resolved client capabilities for the language server.
|
/// Represents the resolved client capabilities for the language server.
|
||||||
|
|
@ -349,7 +347,6 @@ impl ResolvedClientCapabilities {
|
||||||
pub(crate) fn server_capabilities(
|
pub(crate) fn server_capabilities(
|
||||||
position_encoding: PositionEncoding,
|
position_encoding: PositionEncoding,
|
||||||
resolved_client_capabilities: ResolvedClientCapabilities,
|
resolved_client_capabilities: ResolvedClientCapabilities,
|
||||||
global_settings: &GlobalSettings,
|
|
||||||
) -> ServerCapabilities {
|
) -> ServerCapabilities {
|
||||||
let diagnostic_provider =
|
let diagnostic_provider =
|
||||||
if resolved_client_capabilities.supports_diagnostic_dynamic_registration() {
|
if resolved_client_capabilities.supports_diagnostic_dynamic_registration() {
|
||||||
|
|
@ -368,11 +365,9 @@ pub(crate) fn server_capabilities(
|
||||||
// dynamically based on the `ty.experimental.rename` setting.
|
// dynamically based on the `ty.experimental.rename` setting.
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, we check whether user has enabled rename support via the resolved settings
|
// Otherwise, we always register the rename provider and bail out in `prepareRename` if
|
||||||
// from initialization options.
|
// the feature is disabled.
|
||||||
global_settings
|
Some(OneOf::Right(server_rename_options()))
|
||||||
.is_rename_enabled()
|
|
||||||
.then(|| OneOf::Right(server_rename_options()))
|
|
||||||
};
|
};
|
||||||
|
|
||||||
ServerCapabilities {
|
ServerCapabilities {
|
||||||
|
|
|
||||||
|
|
@ -72,15 +72,8 @@ impl Server {
|
||||||
tracing::debug!("Resolved client capabilities: {resolved_client_capabilities}");
|
tracing::debug!("Resolved client capabilities: {resolved_client_capabilities}");
|
||||||
|
|
||||||
let position_encoding = Self::find_best_position_encoding(&client_capabilities);
|
let position_encoding = Self::find_best_position_encoding(&client_capabilities);
|
||||||
let server_capabilities = server_capabilities(
|
let server_capabilities =
|
||||||
position_encoding,
|
server_capabilities(position_encoding, resolved_client_capabilities);
|
||||||
resolved_client_capabilities,
|
|
||||||
&initialization_options
|
|
||||||
.options
|
|
||||||
.global
|
|
||||||
.clone()
|
|
||||||
.into_settings(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let version = ruff_db::program_version().unwrap_or("Unknown");
|
let version = ruff_db::program_version().unwrap_or("Unknown");
|
||||||
tracing::info!("Version: {version}");
|
tracing::info!("Version: {version}");
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue